This is an automated email from the ASF dual-hosted git repository.

amagyar pushed a commit to branch branch-2.7
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-2.7 by this push:
     new 1933594  [AMBARI-25231] : Replace deprecated hadoop commands (#2962)
1933594 is described below

commit 193359448c30833d12c6344f7d4e3611e91670be
Author: virajjasani <34790606+virajjas...@users.noreply.github.com>
AuthorDate: Thu Jun 6 13:49:36 2019 +0530

    [AMBARI-25231] : Replace deprecated hadoop commands (#2962)
---
 .../libraries/providers/__init__.py                |  1 +
 .../libraries/providers/execute_hdfs.py            | 42 ++++++++++++++++++++++
 .../libraries/resources/__init__.py                |  1 +
 .../resources/{__init__.py => execute_hdfs.py}     | 24 ++++++++-----
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py     | 23 ++++++------
 .../services/HDFS/package/scripts/hdfs_namenode.py | 10 +++---
 .../test/python/stacks/2.0.6/HDFS/test_namenode.py | 36 +++++++++----------
 7 files changed, 95 insertions(+), 42 deletions(-)

diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index aed6c5f..d88d948 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -38,6 +38,7 @@ PROVIDERS = dict(
   ),
   default=dict(
     
ExecuteHadoop="resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider",
+    
ExecuteHDFS="resource_management.libraries.providers.execute_hdfs.ExecuteHDFSProvider",
     
TemplateConfig="resource_management.libraries.providers.template_config.TemplateConfigProvider",
     
XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     
PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hdfs.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hdfs.py
new file mode 100644
index 0000000..e207000
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hdfs.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management.core.providers import Provider
+from resource_management.core.resources import Execute
+from resource_management.core.shell import quote_bash_args
+from resource_management.libraries.functions.format import format
+
+
+class ExecuteHDFSProvider(Provider):
+    def action_run(self):
+        conf_dir = self.resource.conf_dir
+        command = self.resource.command
+        if isinstance(command, (list, tuple)):
+            command = ' '.join(quote_bash_args(x) for x in command)
+        Execute(format("hdfs --config {conf_dir} {command}"),
+                user=self.resource.user,
+                tries=self.resource.tries,
+                try_sleep=self.resource.try_sleep,
+                logoutput=self.resource.logoutput,
+                path=self.resource.bin_dir,
+                environment=self.resource.environment,
+                )
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 524292f..6f06210 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -21,6 +21,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.resources.execute_hadoop import *
+from resource_management.libraries.resources.execute_hdfs import *
 from resource_management.libraries.resources.template_config import *
 from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hdfs.py
similarity index 56%
copy from 
ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
copy to 
ambari-common/src/main/python/resource_management/libraries/resources/execute_hdfs.py
index 524292f..e85b448 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hdfs.py
@@ -20,12 +20,18 @@ Ambari Agent
 
 """
 
-from resource_management.libraries.resources.execute_hadoop import *
-from resource_management.libraries.resources.template_config import *
-from resource_management.libraries.resources.xml_config import *
-from resource_management.libraries.resources.properties_file import *
-from resource_management.libraries.resources.repository import *
-from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_resource import *
-from resource_management.libraries.resources.msi import *
-from resource_management.libraries.resources.modify_properties_file import *
\ No newline at end of file
+_all__ = ["ExecuteHDFS"]
+from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument
+
+
+class ExecuteHDFS(Resource):
+    action = ForcedListArgument(default="run")
+    command = ResourceArgument(default=lambda obj: obj.name)
+    tries = ResourceArgument(default=1)
+    try_sleep = ResourceArgument(default=0)
+    user = ResourceArgument()
+    logoutput = ResourceArgument()
+    bin_dir = ResourceArgument(default=[])
+    environment = ResourceArgument(default={})
+    conf_dir = ResourceArgument()
+    actions = Resource.actions + ["run"]
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 213c8d9..3315145 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -31,6 +31,7 @@ from resource_management.libraries.functions.format import 
format
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.namenode_ha_utils import 
get_name_service_by_hostname
 from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
+from resource_management.libraries.resources.execute_hdfs import ExecuteHDFS
 from resource_management.libraries.functions import Direction
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
@@ -452,10 +453,11 @@ def refreshProxyUsers():
     nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} 
-refreshSuperUserGroupsConfiguration')
   else:
     nn_refresh_cmd = format('dfsadmin -fs {namenode_address} 
-refreshSuperUserGroupsConfiguration')
-  ExecuteHadoop(nn_refresh_cmd,
-                user=params.hdfs_user,
-                conf_dir=params.hadoop_conf_dir,
-                bin_dir=params.hadoop_bin_dir)
+  ExecuteHDFS(nn_refresh_cmd,
+              user=params.hdfs_user,
+              conf_dir=params.hadoop_conf_dir,
+              bin_dir=params.hadoop_bin_dir)
+
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def decommission():
@@ -491,10 +493,11 @@ def decommission():
       nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} 
-refreshNodes')
     else:
       nn_refresh_cmd = format('dfsadmin -fs {namenode_address} -refreshNodes')
-    ExecuteHadoop(nn_refresh_cmd,
-                  user=hdfs_user,
-                  conf_dir=conf_dir,
-                  bin_dir=params.hadoop_bin_dir)
+    ExecuteHDFS(nn_refresh_cmd,
+                user=hdfs_user,
+                conf_dir=conf_dir,
+                bin_dir=params.hadoop_bin_dir)
+
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def decommission():
@@ -517,9 +520,9 @@ def decommission():
   if params.dfs_ha_enabled:
     # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
     # need to execute each command scoped to a particular namenode
-    nn_refresh_cmd = format('cmd /c hadoop dfsadmin -fs hdfs://{namenode_rpc} 
-refreshNodes')
+    nn_refresh_cmd = format('cmd /c hdfs dfsadmin -fs hdfs://{namenode_rpc} 
-refreshNodes')
   else:
-    nn_refresh_cmd = format('cmd /c hadoop dfsadmin -fs {namenode_address} 
-refreshNodes')
+    nn_refresh_cmd = format('cmd /c hdfs dfsadmin -fs {namenode_address} 
-refreshNodes')
   Execute(nn_refresh_cmd, user=hdfs_user)
 
 
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
index 19751f6..d0c8661 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
@@ -163,8 +163,8 @@ def decommission():
       nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} 
-refreshNodes')
     else:
       nn_refresh_cmd = format('dfsadmin -refreshNodes')
-    ExecuteHadoop(nn_refresh_cmd,
-                  user=hdfs_user,
-                  conf_dir=conf_dir,
-                  kinit_override=True,
-                  bin_dir=params.hadoop_bin_dir)
+    ExecuteHDFS(nn_refresh_cmd,
+                user=hdfs_user,
+                conf_dir=conf_dir,
+                kinit_override=True,
+                bin_dir=params.hadoop_bin_dir)
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index f46ac4b..d76982e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1052,10 +1052,10 @@ class TestNamenode(RMFTestCase):
                               group = 'hadoop',
                               )
     self.assertResourceCalled('Execute', '', user = 'hdfs')
-    self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
-                              user = 'hdfs',
-                              conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin')
+    self.assertResourceCalled('ExecuteHDFS', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
+                              user='hdfs',
+                              conf_dir='/etc/hadoop/conf',
+                              bin_dir='/usr/bin')
     self.assertNoMoreResources()
 
   def test_decommission_update_files_only(self):
@@ -1088,10 +1088,10 @@ class TestNamenode(RMFTestCase):
                               group = 'hadoop',
                               )
     self.assertResourceCalled('Execute', '', user = 'hdfs')
-    self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
-                              user = 'hdfs',
-                              conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin')
+    self.assertResourceCalled('ExecuteHDFS', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
+                              user='hdfs',
+                              conf_dir='/etc/hadoop/conf',
+                              bin_dir='/usr/bin')
     self.assertNoMoreResources()
 
 
@@ -1111,11 +1111,11 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/nn.service.keytab 
nn/c6401.ambari.apache....@example.com;',
         user = 'hdfs',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
-        bin_dir = '/usr/bin',
-        conf_dir = '/etc/hadoop/conf',
-        user = 'hdfs',
-    )
+    self.assertResourceCalled('ExecuteHDFS', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
+                              bin_dir='/usr/bin',
+                              conf_dir='/etc/hadoop/conf',
+                              user='hdfs',
+                              )
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
@@ -1763,11 +1763,11 @@ class TestNamenode(RMFTestCase):
                          stack_version = self.STACK_VERSION,
                          target = RMFTestCase.TARGET_COMMON_SERVICES
                          )
-
-      self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshSuperUserGroupsConfiguration',
-                                user = 'hdfs',
-                                conf_dir = '/etc/hadoop/conf',
-                                bin_dir = '/usr/bin')
+      self.assertResourceCalled('ExecuteHDFS',
+                                'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshSuperUserGroupsConfiguration',
+                                user='hdfs',
+                                conf_dir='/etc/hadoop/conf',
+                                bin_dir='/usr/bin')
       self.assertNoMoreResources()
 
   def test_reload_configs(self):

Reply via email to