Repository: ambari Updated Branches: refs/heads/trunk 6dae33e54 -> 3c2fa88f5
AMBARI-10545. A couple environment varibles are missing for NFS Gateway. (jaimin) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3c2fa88f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3c2fa88f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3c2fa88f Branch: refs/heads/trunk Commit: 3c2fa88f54ab794e069d729c61c3dbb39407a1a9 Parents: 6dae33e Author: Jaimin Jetly <[email protected]> Authored: Thu Apr 16 14:06:04 2015 -0700 Committer: Jaimin Jetly <[email protected]> Committed: Thu Apr 16 14:06:04 2015 -0700 ---------------------------------------------------------------------- .../HDFS/2.1.0.2.0/package/scripts/utils.py | 24 ++++++++++++-------- .../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 20 +++++++++------- 2 files changed, 27 insertions(+), 17 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/3c2fa88f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py index 156a0ed..217f2f0 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py @@ -150,19 +150,19 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False, hadoop_env_exports = { 'HADOOP_LIBEXEC_DIR': params.hadoop_libexec_dir } + log_dir = format("{hdfs_log_dir_prefix}/{user}") + # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs # on Linux such as CentOS6.2. https://bugzilla.redhat.com/show_bug.cgi?id=731542 if name == "nfs3" : - pid_file = format( - "{hadoop_pid_dir_prefix}/root/hadoop_privileged_nfs3.pid") - - print pid_file + pid_file = format("{pid_dir}/hadoop_privileged_nfs3.pid") custom_export = { - 'HADOOP_PRIVILEGED_NFS_USER': params.hdfs_user + 'HADOOP_PRIVILEGED_NFS_USER': params.hdfs_user, + 'HADOOP_PRIVILEGED_NFS_PID_DIR': pid_dir, + 'HADOOP_PRIVILEGED_NFS_LOG_DIR': log_dir } hadoop_env_exports.update(custom_export) - log_dir = format("{hdfs_log_dir_prefix}/{user}") check_process = format( "ls {pid_file} >/dev/null 2>&1 &&" " ps -p `cat {pid_file}` >/dev/null 2>&1") @@ -172,9 +172,15 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False, owner=user, recursive=True) if create_log_dir: - Directory(log_dir, - owner=user, - recursive=True) + if name == "nfs3": + Directory(log_dir, + mode=0775, + owner=params.root_user, + group=params.user_group) + else: + Directory(log_dir, + owner=user, + recursive=True) if params.security_enabled and name == "datanode": ## The directory where pid files are stored in the secure data environment. http://git-wip-us.apache.org/repos/asf/ambari/blob/3c2fa88f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py index e58f7be..d683198 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py @@ -61,14 +61,15 @@ class TestNFSGateway(RMFTestCase): ) self.assertResourceCalled('Directory', '/var/log/hadoop/root', owner = 'root', - recursive = True, + group = 'hadoop', + mode = 0775 ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'], not_if='ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3", - environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs'}, + environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs', 'HADOOP_PRIVILEGED_NFS_LOG_DIR': '/var/log/hadoop/root', 'HADOOP_PRIVILEGED_NFS_PID_DIR': '/var/run/hadoop/root'}, not_if = 'ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() @@ -92,14 +93,15 @@ class TestNFSGateway(RMFTestCase): ) self.assertResourceCalled('Directory', '/var/log/hadoop/root', owner = 'root', - recursive = True, + group = 'hadoop', + mode = 0775 ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'], not_if='ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3", - environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs'}, + environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs', 'HADOOP_PRIVILEGED_NFS_LOG_DIR': '/var/log/hadoop/root', 'HADOOP_PRIVILEGED_NFS_PID_DIR': '/var/run/hadoop/root'}, not_if = None, ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', @@ -140,14 +142,15 @@ class TestNFSGateway(RMFTestCase): ) self.assertResourceCalled('Directory', '/var/log/hadoop/root', owner = 'root', - recursive = True, + group = 'hadoop', + mode = 0775 ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'], not_if='ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3", - environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs'}, + environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs', 'HADOOP_PRIVILEGED_NFS_LOG_DIR': '/var/log/hadoop/root', 'HADOOP_PRIVILEGED_NFS_PID_DIR': '/var/run/hadoop/root'}, not_if = 'ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() @@ -171,14 +174,15 @@ class TestNFSGateway(RMFTestCase): ) self.assertResourceCalled('Directory', '/var/log/hadoop/root', owner = 'root', - recursive = True, + group = 'hadoop', + mode = 0775 ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'], not_if='ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3", - environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs'}, + environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec', 'HADOOP_PRIVILEGED_NFS_USER': 'hdfs', 'HADOOP_PRIVILEGED_NFS_LOG_DIR': '/var/log/hadoop/root', 'HADOOP_PRIVILEGED_NFS_PID_DIR': '/var/run/hadoop/root'}, not_if = None, ) self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
