ambari git commit: AMBARI-12355. DATANODE START failed on secure cluster.(vbrodetskyi)
Repository: ambari Updated Branches: refs/heads/trunk dbf9ccbbb - 2c5073a43 AMBARI-12355. DATANODE START failed on secure cluster.(vbrodetskyi) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2c5073a4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2c5073a4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2c5073a4 Branch: refs/heads/trunk Commit: 2c5073a43bde2708f90b510a05fd2d295fb60d38 Parents: dbf9ccb Author: Vitaly Brodetskyi vbrodets...@hortonworks.com Authored: Fri Jul 10 14:57:07 2015 +0300 Committer: Vitaly Brodetskyi vbrodets...@hortonworks.com Committed: Fri Jul 10 14:57:07 2015 +0300 -- .../HDFS/2.1.0.2.0/package/scripts/utils.py | 4 +-- .../python/stacks/2.0.6/HDFS/test_datanode.py | 24 +++ .../stacks/2.0.6/HDFS/test_journalnode.py | 12 .../python/stacks/2.0.6/HDFS/test_namenode.py | 32 ++-- .../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 12 .../python/stacks/2.0.6/HDFS/test_snamenode.py | 12 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 20 ++-- 7 files changed, 57 insertions(+), 59 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2c5073a4/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py -- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py index 745a8d4..a185b5f 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py @@ -185,9 +185,7 @@ def service(action=None, name=None, user=None, options=, create_pid_dir=False, } hadoop_env_exports.update(custom_export) - check_process = as_user(format( -ls {pid_file} /dev/null 21 - ps -p `cat {pid_file}` /dev/null 21), user=params.hdfs_user) + check_process = as_sudo([test, -f, pid_file]) ++ as_sudo([pgrep, -F, pid_file]) # on STOP directories shouldn't be created # since during stop still old dirs are used (which were created during previous start) http://git-wip-us.apache.org/repos/asf/ambari/blob/2c5073a4/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py index b99f53a..efb17be 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py @@ -63,11 +63,11 @@ class TestDatanode(RMFTestCase): ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'], -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid, ) self.assertResourceCalled('Execute', ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode', environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid, ) self.assertNoMoreResources() @@ -82,7 +82,7 @@ class TestDatanode(RMFTestCase): ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'], -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh
ambari git commit: AMBARI-12355. DATANODE START failed on secure cluster.(vbrodetskyi)
Repository: ambari Updated Branches: refs/heads/branch-2.1 b10ace406 - 38493e4d4 AMBARI-12355. DATANODE START failed on secure cluster.(vbrodetskyi) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/38493e4d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/38493e4d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/38493e4d Branch: refs/heads/branch-2.1 Commit: 38493e4d411e92a42e623a77fb02d8928bd6acb7 Parents: b10ace4 Author: Vitaly Brodetskyi vbrodets...@hortonworks.com Authored: Fri Jul 10 14:50:32 2015 +0300 Committer: Vitaly Brodetskyi vbrodets...@hortonworks.com Committed: Fri Jul 10 14:50:32 2015 +0300 -- .../HDFS/2.1.0.2.0/package/scripts/utils.py | 4 +-- .../python/stacks/2.0.6/HDFS/test_datanode.py | 24 +++ .../stacks/2.0.6/HDFS/test_journalnode.py | 12 .../python/stacks/2.0.6/HDFS/test_namenode.py | 32 ++-- .../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 12 .../python/stacks/2.0.6/HDFS/test_snamenode.py | 12 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 20 ++-- 7 files changed, 57 insertions(+), 59 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py -- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py index 745a8d4..a185b5f 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py @@ -185,9 +185,7 @@ def service(action=None, name=None, user=None, options=, create_pid_dir=False, } hadoop_env_exports.update(custom_export) - check_process = as_user(format( -ls {pid_file} /dev/null 21 - ps -p `cat {pid_file}` /dev/null 21), user=params.hdfs_user) + check_process = as_sudo([test, -f, pid_file]) ++ as_sudo([pgrep, -F, pid_file]) # on STOP directories shouldn't be created # since during stop still old dirs are used (which were created during previous start) http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py index b99f53a..efb17be 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py @@ -63,11 +63,11 @@ class TestDatanode(RMFTestCase): ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'], -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid, ) self.assertResourceCalled('Execute', ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode', environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid, ) self.assertNoMoreResources() @@ -82,7 +82,7 @@ class TestDatanode(RMFTestCase): ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'], -not_if = ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid /dev/null 21 ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` /dev/null 21', +not_if = ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid ambari-sudo.sh
ambari git commit: AMBARI-12382. RU - Finalize Failed, Ranger KMS does not advertise version since no structured-out-*.json file (srimanth)
Repository: ambari Updated Branches: refs/heads/branch-2.1 c4ba56565 - ee5eb5a00 AMBARI-12382. RU - Finalize Failed, Ranger KMS does not advertise version since no structured-out-*.json file (srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ee5eb5a0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ee5eb5a0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ee5eb5a0 Branch: refs/heads/branch-2.1 Commit: ee5eb5a006a2ac01dd928478fab4bc6857328a59 Parents: c4ba565 Author: Srimanth Gunturi sgunt...@hortonworks.com Authored: Fri Jul 10 14:48:41 2015 -0700 Committer: Srimanth Gunturi sgunt...@hortonworks.com Committed: Fri Jul 10 16:44:07 2015 -0700 -- .../RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py| 3 +++ 1 file changed, 3 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ee5eb5a0/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py index 3512561..5c3790e 100755 --- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py +++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py @@ -29,6 +29,9 @@ import upgrade class KmsServer(Script): + def get_stack_to_component(self): +return {HDP: ranger-kms} + def install(self, env): self.install_packages(env) import params
ambari git commit: AMBARI-12382. RU - Finalize Failed, Ranger KMS does not advertise version since no structured-out-*.json file (srimanth)
Repository: ambari Updated Branches: refs/heads/trunk 7d0fd97cc - 2f68125ea AMBARI-12382. RU - Finalize Failed, Ranger KMS does not advertise version since no structured-out-*.json file (srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2f68125e Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2f68125e Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2f68125e Branch: refs/heads/trunk Commit: 2f68125ea97c790d2b4dd7e65ed9343c0bdb3117 Parents: 7d0fd97 Author: Srimanth Gunturi sgunt...@hortonworks.com Authored: Fri Jul 10 14:48:41 2015 -0700 Committer: Srimanth Gunturi sgunt...@hortonworks.com Committed: Fri Jul 10 14:48:47 2015 -0700 -- .../RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py| 3 +++ 1 file changed, 3 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2f68125e/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py index 3512561..5c3790e 100755 --- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py +++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py @@ -29,6 +29,9 @@ import upgrade class KmsServer(Script): + def get_stack_to_component(self): +return {HDP: ranger-kms} + def install(self, env): self.install_packages(env) import params
ambari git commit: AMBARI-12385 - Oozie Service Fails After Downgrade Because Of Missing Falcon JAR In WAR (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/branch-2.1 ee5eb5a00 - 85c688062 AMBARI-12385 - Oozie Service Fails After Downgrade Because Of Missing Falcon JAR In WAR (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/85c68806 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/85c68806 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/85c68806 Branch: refs/heads/branch-2.1 Commit: 85c688062526806b8c14b14947e788db7467a9fc Parents: ee5eb5a Author: Jonathan Hurley jhur...@hortonworks.com Authored: Fri Jul 10 23:19:06 2015 -0400 Committer: Jonathan Hurley jhur...@hortonworks.com Committed: Sat Jul 11 01:42:22 2015 -0400 -- .../OOZIE/4.0.0.2.0/package/scripts/oozie.py| 8 +- .../4.0.0.2.0/package/scripts/oozie_server.py | 16 +- .../package/scripts/oozie_server_upgrade.py | 53 +-- .../4.0.0.2.0/package/scripts/params_linux.py | 12 +- .../4.0.0.2.0/package/scripts/status_params.py | 3 +- .../stacks/2.0.6/OOZIE/test_oozie_server.py | 147 +-- .../stacks/2.2/configs/oozie-downgrade.json | 3 - .../stacks/2.2/configs/oozie-upgrade.json | 3 - 8 files changed, 172 insertions(+), 73 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/85c68806/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py -- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py index 7055255..35df33a 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py @@ -226,11 +226,11 @@ def oozie_server_specific(): #falcon el extension if params.has_falcon_host: Execute(format('{sudo} cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}'), - not_if = no_op_test, -) + not_if = no_op_test) + Execute(format('{sudo} chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'), - not_if = no_op_test, -) + not_if = no_op_test) + if params.lzo_enabled and len(params.all_lzo_packages) 0: Package(params.all_lzo_packages) Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'), http://git-wip-us.apache.org/repos/asf/ambari/blob/85c68806/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py -- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py index c9bdd78..81d8c0c 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py @@ -53,9 +53,14 @@ class OozieServer(Script): def start(self, env, rolling_restart=False): import params env.set_params(params) -#TODO remove this when config command will be implemented + self.configure(env) +# preparing the WAR file must run after configure since configure writes out +# oozie-env.sh which is needed to have the right environment directories setup! +if rolling_restart is True: + OozieUpgrade.prepare_warfile() + oozie_service(action='start', rolling_restart=rolling_restart) def stop(self, env, rolling_restart=False): @@ -141,9 +146,11 @@ class OozieServerDefault(OozieServer): def pre_rolling_restart(self, env): -Performs the tasks surrounding the Oozie startup when a rolling upgrade -is in progress. This includes backing up the configuration, updating -the database, preparing the WAR, and installing the sharelib in HDFS. +Performs the tasks that should be done before an upgrade of oozie. This includes: + - backing up configurations + - running hdp-select and conf-select + - restoring configurations + - preparing the libext directory :param env: :return: @@ -164,7 +171,6 @@ class OozieServerDefault(OozieServer): OozieUpgrade.restore_configuration() OozieUpgrade.prepare_libext_directory() -OozieUpgrade.prepare_warfile() @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) http://git-wip-us.apache.org/repos/asf/ambari/blob/85c68806/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
ambari git commit: AMBARI-12385 - Oozie Service Fails After Downgrade Because Of Missing Falcon JAR In WAR (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/trunk 2f68125ea - afdb7f9df AMBARI-12385 - Oozie Service Fails After Downgrade Because Of Missing Falcon JAR In WAR (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/afdb7f9d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/afdb7f9d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/afdb7f9d Branch: refs/heads/trunk Commit: afdb7f9df508acfcc958dca15408b6356a4c0fc0 Parents: 2f68125 Author: Jonathan Hurley jhur...@hortonworks.com Authored: Fri Jul 10 23:19:06 2015 -0400 Committer: Jonathan Hurley jhur...@hortonworks.com Committed: Sat Jul 11 01:40:37 2015 -0400 -- .../OOZIE/4.0.0.2.0/package/scripts/oozie.py| 8 +- .../4.0.0.2.0/package/scripts/oozie_server.py | 16 +- .../package/scripts/oozie_server_upgrade.py | 53 +-- .../4.0.0.2.0/package/scripts/params_linux.py | 12 +- .../4.0.0.2.0/package/scripts/status_params.py | 3 +- .../stacks/2.0.6/OOZIE/test_oozie_server.py | 147 +-- .../stacks/2.2/configs/oozie-downgrade.json | 3 - .../stacks/2.2/configs/oozie-upgrade.json | 3 - 8 files changed, 172 insertions(+), 73 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/afdb7f9d/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py -- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py index 7055255..35df33a 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py @@ -226,11 +226,11 @@ def oozie_server_specific(): #falcon el extension if params.has_falcon_host: Execute(format('{sudo} cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}'), - not_if = no_op_test, -) + not_if = no_op_test) + Execute(format('{sudo} chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'), - not_if = no_op_test, -) + not_if = no_op_test) + if params.lzo_enabled and len(params.all_lzo_packages) 0: Package(params.all_lzo_packages) Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'), http://git-wip-us.apache.org/repos/asf/ambari/blob/afdb7f9d/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py -- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py index c9bdd78..81d8c0c 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py @@ -53,9 +53,14 @@ class OozieServer(Script): def start(self, env, rolling_restart=False): import params env.set_params(params) -#TODO remove this when config command will be implemented + self.configure(env) +# preparing the WAR file must run after configure since configure writes out +# oozie-env.sh which is needed to have the right environment directories setup! +if rolling_restart is True: + OozieUpgrade.prepare_warfile() + oozie_service(action='start', rolling_restart=rolling_restart) def stop(self, env, rolling_restart=False): @@ -141,9 +146,11 @@ class OozieServerDefault(OozieServer): def pre_rolling_restart(self, env): -Performs the tasks surrounding the Oozie startup when a rolling upgrade -is in progress. This includes backing up the configuration, updating -the database, preparing the WAR, and installing the sharelib in HDFS. +Performs the tasks that should be done before an upgrade of oozie. This includes: + - backing up configurations + - running hdp-select and conf-select + - restoring configurations + - preparing the libext directory :param env: :return: @@ -164,7 +171,6 @@ class OozieServerDefault(OozieServer): OozieUpgrade.restore_configuration() OozieUpgrade.prepare_libext_directory() -OozieUpgrade.prepare_warfile() @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) http://git-wip-us.apache.org/repos/asf/ambari/blob/afdb7f9d/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
ambari git commit: AMBARI-12372 Utils config small refactor. (ababiichuk)
Repository: ambari Updated Branches: refs/heads/trunk 4fbab311d - ac4d262f4 AMBARI-12372 Utils config small refactor. (ababiichuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ac4d262f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ac4d262f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ac4d262f Branch: refs/heads/trunk Commit: ac4d262f4a0d15351f1eeaa6abdb8c8c2b8b81db Parents: 4fbab31 Author: aBabiichuk ababiic...@cybervisiontech.com Authored: Fri Jul 10 17:44:16 2015 +0300 Committer: aBabiichuk ababiic...@cybervisiontech.com Committed: Fri Jul 10 18:19:22 2015 +0300 -- .../main/admin/serviceAccounts_controller.js| 4 +- .../configs/stack_config_properties_mapper.js | 8 +- ambari-web/app/utils/config.js | 321 +- ambari-web/test/utils/config_test.js| 325 ++- 4 files changed, 254 insertions(+), 404 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ac4d262f/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js -- diff --git a/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js b/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js index 3fcb1df..1d72d0c 100644 --- a/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js +++ b/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js @@ -114,8 +114,8 @@ App.MainAdminServiceAccountsController = App.MainServiceInfoConfigsController.ex * @param {Object[]} advancedConfigs */ createConfigObject: function(serverConfigs, advancedConfigs) { -var configSet = App.config.mergePreDefinedWithLoaded(serverConfigs, advancedConfigs, this.get('serviceConfigTags'), this.get('selectedService')); -var miscConfigs = configSet.configs.filterProperty('serviceName', this.get('selectedService')).filterProperty('category', 'Users and Groups').filterProperty('isVisible', true).rejectProperty('displayType', 'password').rejectProperty('displayType', 'checkbox'); +var configs = App.config.mergePredefinedWithSaved(serverConfigs, advancedConfigs, this.get('selectedService')); +var miscConfigs = configs.filterProperty('serviceName', this.get('selectedService')).filterProperty('category', 'Users and Groups').filterProperty('isVisible', true).rejectProperty('displayType', 'password').rejectProperty('displayType', 'checkbox'); miscConfigs = App.config.miscConfigVisibleProperty(miscConfigs, App.Service.find().mapProperty('serviceName').concat('MISC')); http://git-wip-us.apache.org/repos/asf/ambari/blob/ac4d262f/ambari-web/app/mappers/configs/stack_config_properties_mapper.js -- diff --git a/ambari-web/app/mappers/configs/stack_config_properties_mapper.js b/ambari-web/app/mappers/configs/stack_config_properties_mapper.js index 5913943..8315565 100644 --- a/ambari-web/app/mappers/configs/stack_config_properties_mapper.js +++ b/ambari-web/app/mappers/configs/stack_config_properties_mapper.js @@ -83,7 +83,7 @@ App.stackConfigPropertiesMapper = App.QuickDataMapper.create({ console.timeEnd('stackConfigMapper execution time'); }, - /*** METHODS TO MERGE STACK PROPERTIES WITH STORED ON UI (NOT USED FOR NOW)*/ + /*** METHODS TO MERGE STACK PROPERTIES WITH STORED ON UI */ /** * find UI config with current name and fileName @@ -98,7 +98,11 @@ App.stackConfigPropertiesMapper = App.QuickDataMapper.create({ config.StackConfigurations.property_display_name = uiConfigProperty uiConfigProperty.displayName ? uiConfigProperty.displayName : config.StackConfigurations.property_name; } config.category = uiConfigProperty ? uiConfigProperty.category : 'Advanced ' + App.config.getConfigTagFromFileName(config.StackConfigurations.type); -config.display_type = uiConfigProperty ? uiConfigProperty.displayType || displayType : displayType; +if (App.config.isContentProperty(config.StackConfigurations.property_name, config.StackConfigurations.type)) { + config.display_type = 'content'; +} else { + config.display_type = uiConfigProperty ? uiConfigProperty.displayType || displayType : displayType; +} }, /** http://git-wip-us.apache.org/repos/asf/ambari/blob/ac4d262f/ambari-web/app/utils/config.js -- diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js index 7bb9670..b209526 100644 --- a/ambari-web/app/utils/config.js +++ b/ambari-web/app/utils/config.js @@ -185,24 +185,6 @@ App.config =
ambari git commit: AMBARI-12369 - if audit to DB is made true for hdfs then namenode is not able to connect to mysql DB (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/branch-2.1 c4e9c5077 - 0ebf58bfc AMBARI-12369 - if audit to DB is made true for hdfs then namenode is not able to connect to mysql DB (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ebf58bf Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ebf58bf Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ebf58bf Branch: refs/heads/branch-2.1 Commit: 0ebf58bfc1e6f9ad0e780f11b3e3b849d331d3d0 Parents: c4e9c50 Author: Jonathan Hurley jhur...@hortonworks.com Authored: Thu Jul 9 23:29:32 2015 -0400 Committer: Jonathan Hurley jhur...@hortonworks.com Committed: Fri Jul 10 11:42:59 2015 -0400 -- .../stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml | 2 ++ 1 file changed, 2 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0ebf58bf/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml index 73e46a6..3b332ad 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml @@ -142,6 +142,8 @@ do JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile done +export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${JAVA_JDBC_LIBS} + # Setting path to hdfs command line export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
[2/2] ambari git commit: AMBARI-12371. Downloaded client configuration contains strange folder (aonishuk)
AMBARI-12371. Downloaded client configuration contains strange folder (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c4e9c507 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c4e9c507 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c4e9c507 Branch: refs/heads/branch-2.1 Commit: c4e9c50772db7e78ee5e5ba04f33ecb92f238042 Parents: 14ab4bf Author: Andrew Onishuk aonis...@hortonworks.com Authored: Fri Jul 10 18:08:31 2015 +0300 Committer: Andrew Onishuk aonis...@hortonworks.com Committed: Fri Jul 10 18:08:31 2015 +0300 -- .../resource_management/libraries/script/script.py| 14 -- .../test/python/stacks/2.0.6/HDFS/test_hdfs_client.py | 8 +--- 2 files changed, 17 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c4e9c507/ambari-common/src/main/python/resource_management/libraries/script/script.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index c422ac8..26fd158 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -25,6 +25,7 @@ import os import sys import logging import platform +import tarfile from ambari_commons import OSCheck, OSConst from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl from resource_management.libraries.resources import XmlConfig @@ -40,6 +41,7 @@ from resource_management.libraries.functions.version import compare_versions from resource_management.libraries.functions.version import format_hdp_stack_version from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration from resource_management.core.resources.system import Execute +from contextlib import closing import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. @@ -536,18 +538,26 @@ class Script(object): for filename, dict in file_dict.iteritems(): XmlConfig(filename, conf_dir=conf_tmp_dir, +mode=0644, **self.generate_configs_get_xml_file_content(filename, dict) ) for file_dict in env_configs_list: for filename,dicts in file_dict.iteritems(): File(os.path.join(conf_tmp_dir, filename), + mode=0644, content=InlineTemplate(self.generate_configs_get_template_file_content(filename, dicts))) for file_dict in properties_configs_list: for filename, dict in file_dict.iteritems(): PropertiesFile(os.path.join(conf_tmp_dir, filename), -properties=self.generate_configs_get_xml_file_dict(filename, dict) + mode=0644, + properties=self.generate_configs_get_xml_file_dict(filename, dict) ) - archive_dir(output_filename, conf_tmp_dir) + with closing(tarfile.open(output_filename, w:gz)) as tar: +try: + tar.add(conf_tmp_dir, arcname=os.path.basename(.)) +finally: + tar.close() + finally: Directory(conf_tmp_dir, action=delete) http://git-wip-us.apache.org/repos/asf/ambari/blob/c4e9c507/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py index 06d94e9..4948d01 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py @@ -48,25 +48,27 @@ class Test(RMFTestCase): ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', conf_dir = '/tmp/123', + mode=0644, configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'], configurations = self.getConfig()['configurations']['hdfs-site'], ) self.assertResourceCalled('File', '/tmp/123/hadoop-env.sh', + mode=0644, content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']), ) self.assertResourceCalled('File', '/tmp/123/log4j.properties', + mode=0644,
[1/2] ambari git commit: AMBARI-12371. Downloaded client configuration contains strange folder (aonishuk)
Repository: ambari Updated Branches: refs/heads/branch-2.1 14ab4bf7e - c4e9c5077 refs/heads/trunk f438398fd - 4fbab311d AMBARI-12371. Downloaded client configuration contains strange folder (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4fbab311 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4fbab311 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4fbab311 Branch: refs/heads/trunk Commit: 4fbab311df79264e4bbc65e7b5d546409f0a3622 Parents: f438398 Author: Andrew Onishuk aonis...@hortonworks.com Authored: Fri Jul 10 18:08:28 2015 +0300 Committer: Andrew Onishuk aonis...@hortonworks.com Committed: Fri Jul 10 18:08:28 2015 +0300 -- .../resource_management/libraries/script/script.py| 14 -- .../test/python/stacks/2.0.6/HDFS/test_hdfs_client.py | 8 +--- 2 files changed, 17 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/4fbab311/ambari-common/src/main/python/resource_management/libraries/script/script.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index c422ac8..26fd158 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -25,6 +25,7 @@ import os import sys import logging import platform +import tarfile from ambari_commons import OSCheck, OSConst from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl from resource_management.libraries.resources import XmlConfig @@ -40,6 +41,7 @@ from resource_management.libraries.functions.version import compare_versions from resource_management.libraries.functions.version import format_hdp_stack_version from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration from resource_management.core.resources.system import Execute +from contextlib import closing import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. @@ -536,18 +538,26 @@ class Script(object): for filename, dict in file_dict.iteritems(): XmlConfig(filename, conf_dir=conf_tmp_dir, +mode=0644, **self.generate_configs_get_xml_file_content(filename, dict) ) for file_dict in env_configs_list: for filename,dicts in file_dict.iteritems(): File(os.path.join(conf_tmp_dir, filename), + mode=0644, content=InlineTemplate(self.generate_configs_get_template_file_content(filename, dicts))) for file_dict in properties_configs_list: for filename, dict in file_dict.iteritems(): PropertiesFile(os.path.join(conf_tmp_dir, filename), -properties=self.generate_configs_get_xml_file_dict(filename, dict) + mode=0644, + properties=self.generate_configs_get_xml_file_dict(filename, dict) ) - archive_dir(output_filename, conf_tmp_dir) + with closing(tarfile.open(output_filename, w:gz)) as tar: +try: + tar.add(conf_tmp_dir, arcname=os.path.basename(.)) +finally: + tar.close() + finally: Directory(conf_tmp_dir, action=delete) http://git-wip-us.apache.org/repos/asf/ambari/blob/4fbab311/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py index 06d94e9..4948d01 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py @@ -48,25 +48,27 @@ class Test(RMFTestCase): ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', conf_dir = '/tmp/123', + mode=0644, configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'], configurations = self.getConfig()['configurations']['hdfs-site'], ) self.assertResourceCalled('File', '/tmp/123/hadoop-env.sh', + mode=0644, content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
ambari git commit: AMBARI-12369 - if audit to DB is made true for hdfs then namenode is not able to connect to mysql DB (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/trunk ac4d262f4 - 8e5c54076 AMBARI-12369 - if audit to DB is made true for hdfs then namenode is not able to connect to mysql DB (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e5c5407 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e5c5407 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e5c5407 Branch: refs/heads/trunk Commit: 8e5c54076dd859a8f9a0b2f098c3012b95243e58 Parents: ac4d262 Author: Jonathan Hurley jhur...@hortonworks.com Authored: Thu Jul 9 23:29:32 2015 -0400 Committer: Jonathan Hurley jhur...@hortonworks.com Committed: Fri Jul 10 11:41:58 2015 -0400 -- .../stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml | 2 ++ 1 file changed, 2 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e5c5407/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml index 73e46a6..3b332ad 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml @@ -142,6 +142,8 @@ do JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile done +export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${JAVA_JDBC_LIBS} + # Setting path to hdfs command line export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
ambari git commit: AMBARI-12276. Ozzie running with pig fail to renew JHS delegation token when RU because JHS recovery is not enabled
Repository: ambari Updated Branches: refs/heads/trunk 2c5073a43 - f438398fd AMBARI-12276. Ozzie running with pig fail to renew JHS delegation token when RU because JHS recovery is not enabled Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f438398f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f438398f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f438398f Branch: refs/heads/trunk Commit: f438398fd76741b01f39cbed3bba58aa1d9a4bc5 Parents: 2c5073a Author: Sumit Mohanty smoha...@hortonworks.com Authored: Fri Jul 10 06:26:31 2015 -0700 Committer: Sumit Mohanty smoha...@hortonworks.com Committed: Fri Jul 10 06:26:31 2015 -0700 -- .../ambari/server/checks/CheckDescription.java | 11 ++ ...apReduce2JobHistoryStatePreservingCheck.java | 151 + .../2.1.0.2.0/package/scripts/params_linux.py | 1 + .../YARN/2.1.0.2.0/package/scripts/yarn.py | 6 + .../YARN/configuration-mapred/mapred-site.xml | 50 ++ ...duce2JobHistoryStatePreservingCheckTest.java | 163 +++ .../stacks/2.0.6/YARN/test_historyserver.py | 12 ++ 7 files changed, 394 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f438398f/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java index 0cd5352..5cfbb47 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java @@ -152,6 +152,17 @@ public enum CheckDescription { YARN should have state preserving restart enabled for the Timeline server. The yarn-site.xml property yarn.timeline-service.recovery.enabled should be set to true.); }}), + SERVICES_MR2_JOBHISTORY_ST(PrereqCheckType.SERVICE, + MapReduce2 JobHistory recovery should be enabled, + new HashMapString, String() {{ + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_ENABLE_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.enable should be set to true.); + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.class should be set to org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService.); + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_LEVELDB_PATH_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.leveldb.path should be set. Please note that \mapreduce.jobhistory.recovery.store.leveldb.path\ should be on a mount with ~3 GB of free space.); + }}), + SERVICES_HIVE_DYNAMIC_SERVICE_DISCOVERY(PrereqCheckType.SERVICE, Hive Dynamic Service Discovery, new HashMapString, String() {{ http://git-wip-us.apache.org/repos/asf/ambari/blob/f438398f/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java new file mode 100644 index 000..5f02c4f --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */
ambari git commit: AMBARI-12276. Ozzie running with pig fail to renew JHS delegation token when RU because JHS recovery is not enabled
Repository: ambari Updated Branches: refs/heads/branch-2.1 38493e4d4 - 14ab4bf7e AMBARI-12276. Ozzie running with pig fail to renew JHS delegation token when RU because JHS recovery is not enabled Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14ab4bf7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14ab4bf7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14ab4bf7 Branch: refs/heads/branch-2.1 Commit: 14ab4bf7ea915cd156a0046f8e52f38fe652ad87 Parents: 38493e4 Author: Sumit Mohanty smoha...@hortonworks.com Authored: Fri Jul 10 06:26:31 2015 -0700 Committer: Sumit Mohanty smoha...@hortonworks.com Committed: Fri Jul 10 06:27:09 2015 -0700 -- .../ambari/server/checks/CheckDescription.java | 11 ++ ...apReduce2JobHistoryStatePreservingCheck.java | 151 + .../2.1.0.2.0/package/scripts/params_linux.py | 1 + .../YARN/2.1.0.2.0/package/scripts/yarn.py | 6 + .../YARN/configuration-mapred/mapred-site.xml | 50 ++ ...duce2JobHistoryStatePreservingCheckTest.java | 163 +++ .../stacks/2.0.6/YARN/test_historyserver.py | 12 ++ 7 files changed, 394 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/14ab4bf7/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java index 0cd5352..5cfbb47 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java @@ -152,6 +152,17 @@ public enum CheckDescription { YARN should have state preserving restart enabled for the Timeline server. The yarn-site.xml property yarn.timeline-service.recovery.enabled should be set to true.); }}), + SERVICES_MR2_JOBHISTORY_ST(PrereqCheckType.SERVICE, + MapReduce2 JobHistory recovery should be enabled, + new HashMapString, String() {{ + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_ENABLE_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.enable should be set to true.); + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.class should be set to org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService.); + put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_LEVELDB_PATH_KEY, + MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.leveldb.path should be set. Please note that \mapreduce.jobhistory.recovery.store.leveldb.path\ should be on a mount with ~3 GB of free space.); + }}), + SERVICES_HIVE_DYNAMIC_SERVICE_DISCOVERY(PrereqCheckType.SERVICE, Hive Dynamic Service Discovery, new HashMapString, String() {{ http://git-wip-us.apache.org/repos/asf/ambari/blob/14ab4bf7/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java new file mode 100644 index 000..5f02c4f --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheck.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the
[1/2] ambari git commit: AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko)
Repository: ambari Updated Branches: refs/heads/branch-2.1 0ebf58bfc - 45de5f923 refs/heads/trunk 8e5c54076 - eda2f90bb AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45de5f92 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45de5f92 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45de5f92 Branch: refs/heads/branch-2.1 Commit: 45de5f923208a3c65951a5c925fc8a33bc79218a Parents: 0ebf58b Author: Lisnichenko Dmitro dlysniche...@hortonworks.com Authored: Fri Jul 10 20:58:45 2015 +0300 Committer: Lisnichenko Dmitro dlysniche...@hortonworks.com Committed: Fri Jul 10 20:58:45 2015 +0300 -- .../state/stack/upgrade/ClusterGrouping.java| 1 - .../2.0.6/hooks/after-INSTALL/scripts/params.py | 4 +++- .../HDP/2.0.6/hooks/before-ANY/scripts/hook.py | 3 ++- .../2.0.6/hooks/before-ANY/scripts/params.py| 4 +++- .../2.0.6/hooks/before-START/scripts/hook.py| 3 ++- .../2.0.6/hooks/before-START/scripts/params.py | 7 +++ .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml | 20 ++-- 7 files changed, 23 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/45de5f92/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java index 5b6bc50..ad84210 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java @@ -128,7 +128,6 @@ public class ClusterGrouping extends Grouping { wrapper = getManualStageWrapper(ctx, execution); break; -case CONFIGURE: case SERVER_ACTION: wrapper = new StageWrapper( StageWrapper.Type.SERVER_SIDE_ACTION, http://git-wip-us.apache.org/repos/asf/ambari/blob/45de5f92/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py index 11de040..7891a27 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py @@ -37,7 +37,6 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) # default hadoop params mapreduce_libs_path = /usr/lib/hadoop-mapreduce/* hadoop_libexec_dir = hdp_select.get_hadoop_dir(libexec) -hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True) hadoop_conf_empty_dir = /etc/hadoop/conf.empty # HDP 2.2+ params @@ -88,3 +87,6 @@ user_group = config['configurations']['cluster-env']['user_group'] namenode_host = default(/clusterHostInfo/namenode_host, []) has_namenode = not len(namenode_host) == 0 + +if has_namenode: + hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True) http://git-wip-us.apache.org/repos/asf/ambari/blob/45de5f92/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py index 1fd36d6..bb6eda4 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py @@ -28,7 +28,8 @@ class BeforeAnyHook(Hook): setup_jce() setup_users() -setup_hadoop_env() +if params.has_namenode: + setup_hadoop_env() if __name__ == __main__: BeforeAnyHook().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/45de5f92/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py index e32f750..d3b3580 100644 ---
[2/2] ambari git commit: AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko)
AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eda2f90b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eda2f90b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eda2f90b Branch: refs/heads/trunk Commit: eda2f90bbd745cebacbbb1c414c1652ba282cb3c Parents: 8e5c540 Author: Lisnichenko Dmitro dlysniche...@hortonworks.com Authored: Fri Jul 10 20:58:45 2015 +0300 Committer: Lisnichenko Dmitro dlysniche...@hortonworks.com Committed: Fri Jul 10 20:59:50 2015 +0300 -- .../state/stack/upgrade/ClusterGrouping.java| 1 - .../2.0.6/hooks/after-INSTALL/scripts/params.py | 4 +++- .../HDP/2.0.6/hooks/before-ANY/scripts/hook.py | 3 ++- .../2.0.6/hooks/before-ANY/scripts/params.py| 4 +++- .../2.0.6/hooks/before-START/scripts/hook.py| 3 ++- .../2.0.6/hooks/before-START/scripts/params.py | 7 +++ .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml | 20 ++-- 7 files changed, 23 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java index 5b6bc50..ad84210 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java @@ -128,7 +128,6 @@ public class ClusterGrouping extends Grouping { wrapper = getManualStageWrapper(ctx, execution); break; -case CONFIGURE: case SERVER_ACTION: wrapper = new StageWrapper( StageWrapper.Type.SERVER_SIDE_ACTION, http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py index 11de040..7891a27 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py @@ -37,7 +37,6 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) # default hadoop params mapreduce_libs_path = /usr/lib/hadoop-mapreduce/* hadoop_libexec_dir = hdp_select.get_hadoop_dir(libexec) -hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True) hadoop_conf_empty_dir = /etc/hadoop/conf.empty # HDP 2.2+ params @@ -88,3 +87,6 @@ user_group = config['configurations']['cluster-env']['user_group'] namenode_host = default(/clusterHostInfo/namenode_host, []) has_namenode = not len(namenode_host) == 0 + +if has_namenode: + hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True) http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py index a90c3b5..864b222 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py @@ -27,7 +27,8 @@ class BeforeAnyHook(Hook): env.set_params(params) setup_users() -setup_hadoop_env() +if params.has_namenode: + setup_hadoop_env() if __name__ == __main__: BeforeAnyHook().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py index 26a2f28..602f630 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py @@ -84,7
ambari git commit: AMBARI-12377. Installer wizard: webhcat server start fails (Eugene Chekanskiy via ncole)
Repository: ambari Updated Branches: refs/heads/trunk eda2f90bb - 9e4e95f54 AMBARI-12377. Installer wizard: webhcat server start fails (Eugene Chekanskiy via ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9e4e95f5 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9e4e95f5 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9e4e95f5 Branch: refs/heads/trunk Commit: 9e4e95f54aea52c05dc471c2d5529a6b73a7f31c Parents: eda2f90 Author: Nate Cole nc...@hortonworks.com Authored: Fri Jul 10 14:24:37 2015 -0400 Committer: Nate Cole nc...@hortonworks.com Committed: Fri Jul 10 14:24:37 2015 -0400 -- .../HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py | 9 ++--- .../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/9e4e95f5/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py index a956c6c..222a8c6 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py @@ -22,15 +22,18 @@ from resource_management.libraries.resources.properties_file import PropertiesFi from resource_management.core.resources.packaging import Package from resource_management.libraries.functions.format import format -def setup_atlas_hive(): +def setup_atlas_hive(configuration_directory=None): import params - + if params.has_atlas: +if configuration_directory is None: + configuration_directory = format({hive_config_dir}) + if not params.host_sys_prepped: Package(params.atlas_plugin_package, # FIXME HACK: install the package during RESTART/START when install_packages is not triggered. ) -PropertiesFile(format('{hive_config_dir}/client.properties'), +PropertiesFile(format('{configuration_directory}/client.properties'), properties = params.atlas_client_props, owner = params.hive_user, group = params.user_group, http://git-wip-us.apache.org/repos/asf/ambari/blob/9e4e95f5/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py index e86b02c..c7b1e10 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py @@ -115,4 +115,4 @@ def webhcat(): content=StaticFile(format({config_dir}/{log4j_webhcat_filename}.template)) ) - setup_atlas_hive() + setup_atlas_hive(configuration_directory=params.config_dir)
ambari git commit: AMBARI-12377. Installer wizard: webhcat server start fails (Eugene Chekanskiy via ncole)
Repository: ambari Updated Branches: refs/heads/branch-2.1 45de5f923 - 8905ec1dd AMBARI-12377. Installer wizard: webhcat server start fails (Eugene Chekanskiy via ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8905ec1d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8905ec1d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8905ec1d Branch: refs/heads/branch-2.1 Commit: 8905ec1dd06a80e7d5c257c657ad3d97a679f886 Parents: 45de5f9 Author: Nate Cole nc...@hortonworks.com Authored: Fri Jul 10 14:25:46 2015 -0400 Committer: Nate Cole nc...@hortonworks.com Committed: Fri Jul 10 14:25:46 2015 -0400 -- .../HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py | 9 ++--- .../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8905ec1d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py index a956c6c..222a8c6 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py @@ -22,15 +22,18 @@ from resource_management.libraries.resources.properties_file import PropertiesFi from resource_management.core.resources.packaging import Package from resource_management.libraries.functions.format import format -def setup_atlas_hive(): +def setup_atlas_hive(configuration_directory=None): import params - + if params.has_atlas: +if configuration_directory is None: + configuration_directory = format({hive_config_dir}) + if not params.host_sys_prepped: Package(params.atlas_plugin_package, # FIXME HACK: install the package during RESTART/START when install_packages is not triggered. ) -PropertiesFile(format('{hive_config_dir}/client.properties'), +PropertiesFile(format('{configuration_directory}/client.properties'), properties = params.atlas_client_props, owner = params.hive_user, group = params.user_group, http://git-wip-us.apache.org/repos/asf/ambari/blob/8905ec1d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py index e86b02c..c7b1e10 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py @@ -115,4 +115,4 @@ def webhcat(): content=StaticFile(format({config_dir}/{log4j_webhcat_filename}.template)) ) - setup_atlas_hive() + setup_atlas_hive(configuration_directory=params.config_dir)
ambari git commit: AMBARI-12378. Widget display goes on and off at service dashboard for Number and Graph widgets using same metrics.
Repository: ambari Updated Branches: refs/heads/branch-2.1 8905ec1dd - c4ba56565 AMBARI-12378. Widget display goes on and off at service dashboard for Number and Graph widgets using same metrics. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c4ba5656 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c4ba5656 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c4ba5656 Branch: refs/heads/branch-2.1 Commit: c4ba56565d1c994c3f7bb2d25810e8cf4bff1148 Parents: 8905ec1 Author: Jaimin Jetly jai...@hortonworks.com Authored: Fri Jul 10 12:56:30 2015 -0700 Committer: Jaimin Jetly jai...@hortonworks.com Committed: Fri Jul 10 12:56:30 2015 -0700 -- .../app/mixins/common/widgets/widget_mixin.js | 4 ++- .../test/mixins/common/widget_mixin_test.js | 30 2 files changed, 27 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c4ba5656/ambari-web/app/mixins/common/widgets/widget_mixin.js -- diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js index 961ce7d..ac2c6e3 100644 --- a/ambari-web/app/mixins/common/widgets/widget_mixin.js +++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js @@ -702,7 +702,9 @@ App.WidgetLoadAggregator = Em.Object.create({ var bulks = {}; requests.forEach(function (request) { - var id = request.startCallName + _ + request.data.component_name; + //poll metrics for graph widgets separately + var graphSuffix = request.context.get('content.widgetType') === GRAPH ? _graph : ''; + var id = request.startCallName + _ + request.data.component_name + graphSuffix; if (Em.isNone(bulks[id])) { bulks[id] = { http://git-wip-us.apache.org/repos/asf/ambari/blob/c4ba5656/ambari-web/test/mixins/common/widget_mixin_test.js -- diff --git a/ambari-web/test/mixins/common/widget_mixin_test.js b/ambari-web/test/mixins/common/widget_mixin_test.js index 1881d98..f0f663c 100644 --- a/ambari-web/test/mixins/common/widget_mixin_test.js +++ b/ambari-web/test/mixins/common/widget_mixin_test.js @@ -419,7 +419,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m1'] }, - context: 'c1' + context: Em.Object.create({ +content: { + widgetType: 'GRAPH' +} + }) }, { startCallName: 'n1', @@ -427,7 +431,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m2'] }, - context: 'c2' + context: Em.Object.create({ +content: { + widgetType: 'NUMBER' +} + }) }, { startCallName: 'n2', @@ -435,7 +443,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m3'] }, - context: 'c3' + context: Em.Object.create({ +content: { + widgetType: 'TEMPLATE' +} + }) }, { startCallName: 'n1', @@ -443,13 +455,19 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C2', metric_paths: ['m4'] }, - context: 'c4' + context: Em.Object.create({ +content: { + widgetType: 'GAUGE' +} + }) } ]; var result = aggregator.groupRequests(requests); - expect(result['n1_C1'].subRequests.length).to.equal(2); - expect(result['n1_C1'].data.metric_paths.length).to.equal(2); + expect(result['n1_C1'].subRequests.length).to.equal(1); + expect(result['n1_C1'].data.metric_paths.length).to.equal(1); + expect(result['n1_C1_graph'].subRequests.length).to.equal(1); + expect(result['n1_C1_graph'].data.metric_paths.length).to.equal(1); expect(result['n2_C1'].subRequests.length).to.equal(1); expect(result['n2_C1'].data.metric_paths.length).to.equal(1); expect(result['n1_C2'].subRequests.length).to.equal(1);
ambari git commit: AMBARI-12378. Widget display goes on and off at service dashboard for Number and Graph widgets using same metrics.
Repository: ambari Updated Branches: refs/heads/trunk 9e4e95f54 - 7d0fd97cc AMBARI-12378. Widget display goes on and off at service dashboard for Number and Graph widgets using same metrics. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d0fd97c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d0fd97c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d0fd97c Branch: refs/heads/trunk Commit: 7d0fd97cc697b22357f4c499b80b8baf9a1c1dbc Parents: 9e4e95f Author: Jaimin Jetly jai...@hortonworks.com Authored: Fri Jul 10 12:56:30 2015 -0700 Committer: Jaimin Jetly jai...@hortonworks.com Committed: Fri Jul 10 12:57:38 2015 -0700 -- .../app/mixins/common/widgets/widget_mixin.js | 4 ++- .../test/mixins/common/widget_mixin_test.js | 30 2 files changed, 27 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7d0fd97c/ambari-web/app/mixins/common/widgets/widget_mixin.js -- diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js index 5946e31..c96238b 100644 --- a/ambari-web/app/mixins/common/widgets/widget_mixin.js +++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js @@ -702,7 +702,9 @@ App.WidgetLoadAggregator = Em.Object.create({ var bulks = {}; requests.forEach(function (request) { - var id = request.startCallName + _ + request.data.component_name; + //poll metrics for graph widgets separately + var graphSuffix = request.context.get('content.widgetType') === GRAPH ? _graph : ''; + var id = request.startCallName + _ + request.data.component_name + graphSuffix; if (Em.isNone(bulks[id])) { bulks[id] = { http://git-wip-us.apache.org/repos/asf/ambari/blob/7d0fd97c/ambari-web/test/mixins/common/widget_mixin_test.js -- diff --git a/ambari-web/test/mixins/common/widget_mixin_test.js b/ambari-web/test/mixins/common/widget_mixin_test.js index 1a70dc8..b9885cb 100644 --- a/ambari-web/test/mixins/common/widget_mixin_test.js +++ b/ambari-web/test/mixins/common/widget_mixin_test.js @@ -419,7 +419,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m1'] }, - context: 'c1' + context: Em.Object.create({ +content: { + widgetType: 'GRAPH' +} + }) }, { startCallName: 'n1', @@ -427,7 +431,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m2'] }, - context: 'c2' + context: Em.Object.create({ +content: { + widgetType: 'NUMBER' +} + }) }, { startCallName: 'n2', @@ -435,7 +443,11 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C1', metric_paths: ['m3'] }, - context: 'c3' + context: Em.Object.create({ +content: { + widgetType: 'TEMPLATE' +} + }) }, { startCallName: 'n1', @@ -443,13 +455,19 @@ describe('App.WidgetLoadAggregator', function () { component_name: 'C2', metric_paths: ['m4'] }, - context: 'c4' + context: Em.Object.create({ +content: { + widgetType: 'GAUGE' +} + }) } ]; var result = aggregator.groupRequests(requests); - expect(result['n1_C1'].subRequests.length).to.equal(2); - expect(result['n1_C1'].data.metric_paths.length).to.equal(2); + expect(result['n1_C1'].subRequests.length).to.equal(1); + expect(result['n1_C1'].data.metric_paths.length).to.equal(1); + expect(result['n1_C1_graph'].subRequests.length).to.equal(1); + expect(result['n1_C1_graph'].data.metric_paths.length).to.equal(1); expect(result['n2_C1'].subRequests.length).to.equal(1); expect(result['n2_C1'].data.metric_paths.length).to.equal(1); expect(result['n1_C2'].subRequests.length).to.equal(1);