Repository: ambari
Updated Branches:
  refs/heads/trunk b0dc16843 -> b849349bc


AMBARI-5968. Decommission of DataNode does not work (Ivan Kozlov via ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b849349b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b849349b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b849349b

Branch: refs/heads/trunk
Commit: b849349bc3db1c28742c65bc188039e6aa5ad1d4
Parents: b0dc168
Author: Nate Cole <[email protected]>
Authored: Tue Jun 3 09:13:57 2014 -0400
Committer: Nate Cole <[email protected]>
Committed: Tue Jun 3 09:13:57 2014 -0400

----------------------------------------------------------------------
 .../HDFS/package/scripts/hdfs_namenode.py       | 10 +++++++---
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 20 +++++++++++++++++++-
 2 files changed, 26 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b849349b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index 67e16ed..62e5764 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -145,9 +145,13 @@ def decommission():
           user=hdfs_user
   )
 
-  # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
-  # need to execute each command scoped to a particular namenode
-  ExecuteHadoop(format('dfsadmin -fs hdfs://{namenode_rpc} -refreshNodes'),
+  if params.dfs_ha_enabled:
+    # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
+    # need to execute each command scoped to a particular namenode
+    nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} -refreshNodes')
+  else:
+    nn_refresh_cmd = format('dfsadmin -refreshNodes')
+  ExecuteHadoop(nn_refresh_cmd,
                 user=hdfs_user,
                 conf_dir=conf_dir,
                 kinit_override=True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b849349b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index fa70469..4aac90f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -370,7 +370,25 @@ class TestNamenode(RMFTestCase):
                               only_if = "su - hdfs -c 'hdfs haadmin 
-getServiceState nn1 | grep active > /dev/null'",
                               )
     self.assertNoMoreResources()
-    
+
+  def test_decommission_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "decommission",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
+                              owner = 'hdfs',
+                              content = Template('exclude_hosts_list.j2'),
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Execute', '', user = 'hdfs')
+    self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
+                              user = 'hdfs',
+                              conf_dir = '/etc/hadoop/conf',
+                              kinit_override = True)
+    self.assertNoMoreResources()
+
   def test_decommission_ha(self):
     self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
                        classname = "NameNode",

Reply via email to