Repository: ambari Updated Branches: refs/heads/branch-2.2 5078781b6 -> 0780149ec
AMBARI-16106: Use additional flags for HAWQ Check (mithmatt) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0780149e Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0780149e Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0780149e Branch: refs/heads/branch-2.2 Commit: 0780149ec762e3762049d5d75c3633e9d6dda26b Parents: 5078781 Author: Matt <[email protected]> Authored: Fri Apr 29 17:50:02 2016 -0700 Committer: Matt <[email protected]> Committed: Fri Apr 29 17:50:02 2016 -0700 ---------------------------------------------------------------------- .../HAWQ/2.0.0/package/scripts/hawqmaster.py | 36 ++- .../HAWQ/2.0.0/package/scripts/params.py | 4 +- .../python/stacks/2.3/HAWQ/test_hawqmaster.py | 225 ++++++++++++++++++- 3 files changed, 246 insertions(+), 19 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/0780149e/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py index 91d4a25..18f17b2 100644 --- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py +++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py @@ -22,7 +22,7 @@ from resource_management.core.resources.system import Execute from resource_management.core.logger import Logger from resource_management.libraries.functions.default import default from resource_management.core.source import InlineTemplate -from resource_management.libraries.functions import hdp_select as stack_select +from resource_management.libraries.functions import hdp_select import master_helper import common @@ -39,12 +39,14 @@ class HawqMaster(Script): self.install_packages(env) self.configure(env) + def configure(self, env): import params env.set_params(params) env.set_params(hawq_constants) master_helper.configure_master() + def start(self, env): import params self.configure(env) @@ -56,18 +58,20 @@ class HawqMaster(Script): master_helper.setup_passwordless_ssh() common.start_component(hawq_constants.MASTER, params.hawq_master_address_port, params.hawq_master_dir) + def stop(self, env): - import params common.stop_component(hawq_constants.MASTER, hawq_constants.FAST) + def status(self, env): from hawqstatus import assert_component_running assert_component_running(hawq_constants.MASTER) + def immediate_stop_hawq_service(self, env): - import params common.stop_component(hawq_constants.CLUSTER, hawq_constants.IMMEDIATE) + def hawq_clear_cache(self, env): import params from utils import exec_psql_cmd @@ -75,24 +79,42 @@ class HawqMaster(Script): Logger.info("Clearing HAWQ's HDFS Metadata cache ...") exec_psql_cmd(cmd, params.hawqmaster_host, params.hawq_master_address_port) + def run_hawq_check(self, env): import params Logger.info("Executing HAWQ Check ...") params.File(hawq_constants.hawq_hosts_file, content=InlineTemplate("{% for host in hawq_all_hosts %}{{host}}\n{% endfor %}")) - Execute("source {0} && hawq check -f {1} --hadoop {2} --config {3}".format(hawq_constants.hawq_greenplum_path_file, - hawq_constants.hawq_hosts_file, - stack_select.get_hadoop_dir('home'), - hawq_constants.hawq_check_file), + + additional_flags = list() + + if params.dfs_nameservice: + additional_flags.append("--hdfs-ha") + + if params.hawq_global_rm_type == "yarn": + yarn_option = "--yarn-ha" if params.is_yarn_ha_enabled else "--yarn" + additional_flags.append(yarn_option) + + if str(params.security_enabled).lower() == "true": + additional_flags.append("--kerberos") + + Execute("source {0} && hawq check -f {1} --hadoop {2} --config {3} {4}".format(hawq_constants.hawq_greenplum_path_file, + hawq_constants.hawq_hosts_file, + hdp_select.get_hadoop_dir('home'), + hawq_constants.hawq_check_file, + " ".join(additional_flags)), user=hawq_constants.hawq_user, timeout=hawq_constants.default_exec_timeout) + def resync_hawq_standby(self,env): Logger.info("HAWQ Standby Master Re-Sync started in fast mode...") utils.exec_hawq_operation(hawq_constants.INIT, "{0} -n -a -v -M {1}".format(hawq_constants.STANDBY, hawq_constants.FAST)) + def remove_hawq_standby(self, env): Logger.info("Removing HAWQ Standby Master ...") utils.exec_hawq_operation(hawq_constants.INIT, "{0} -a -v -r --ignore-bad-hosts".format(hawq_constants.STANDBY)) + if __name__ == "__main__": HawqMaster().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/0780149e/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py index 4ccca85..11ddff4 100644 --- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py @@ -66,6 +66,8 @@ kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executab hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None) +hawq_global_rm_type = default('/configurations/hawq-site/hawq_global_rm_type', None) + # HDFSResource partial function HdfsResource = functools.partial(HdfsResource, user=hdfs_superuser, @@ -118,7 +120,7 @@ table_definition = { # YARN # Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use YARN as resource manager rm_host = __get_component_host('rm_host') -yarn_ha_enabled = default('/configurations/yarn-site/yarn.resourcemanager.ha.enabled', False) +is_yarn_ha_enabled = True if str(default('/configurations/yarn-site/yarn.resourcemanager.ha.enabled', False)).lower() == "true" else False # Config files hawq_check_content = config['configurations']['hawq-check-env']['content'] http://git-wip-us.apache.org/repos/asf/ambari/blob/0780149e/ambari-server/src/test/python/stacks/2.3/HAWQ/test_hawqmaster.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.3/HAWQ/test_hawqmaster.py b/ambari-server/src/test/python/stacks/2.3/HAWQ/test_hawqmaster.py index e34a4c5..9e6ce75 100644 --- a/ambari-server/src/test/python/stacks/2.3/HAWQ/test_hawqmaster.py +++ b/ambari-server/src/test/python/stacks/2.3/HAWQ/test_hawqmaster.py @@ -18,9 +18,11 @@ See the License for the specific language governing permissions and limitations under the License. ''' +import os, json +import resource_management.libraries.functions + from mock.mock import MagicMock, call, patch from stacks.utils.RMFTestCase import * -import resource_management.libraries.functions @patch.object(resource_management.libraries.functions, 'check_process_status', new = MagicMock()) class TestHawqMaster(RMFTestCase): @@ -29,6 +31,16 @@ class TestHawqMaster(RMFTestCase): GPADMIN = 'gpadmin' POSTGRES = 'postgres' DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp'] + CONFIG_FILE = os.path.join(os.path.dirname(__file__), '../configs/hawq_default.json') + HAWQ_CHECK_COMMAND = 'source /usr/local/hawq/greenplum_path.sh && hawq check -f /usr/local/hawq/etc/hawq_hosts --hadoop /usr/lib/hadoop --config /usr/local/hawq/etc/hawq_check.cnf ' + + def setUp(self): + try: + with open(self.CONFIG_FILE, "r") as f: + self.config_dict = json.load(f) + except IOError: + raise RuntimeError("Can not read config file: " + self.CONFIG_FILE) + def __asserts_for_configure(self): @@ -122,7 +134,7 @@ class TestHawqMaster(RMFTestCase): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', classname = 'HawqMaster', command = 'configure', - config_file ='hawq_default.json', + config_dict = self.config_dict, hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) @@ -137,7 +149,7 @@ class TestHawqMaster(RMFTestCase): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', classname = 'HawqMaster', command = 'install', - config_file ='hawq_default.json', + config_dict = self.config_dict, hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) @@ -152,7 +164,7 @@ class TestHawqMaster(RMFTestCase): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', classname = 'HawqMaster', command = 'start', - config_file ='hawq_default.json', + config_dict = self.config_dict, hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) @@ -199,26 +211,217 @@ class TestHawqMaster(RMFTestCase): self.assertNoMoreResources() + def __asserts_for_stop(self, componentCommand, expectedCommand): + + self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', + classname = 'HawqMaster', + command = componentCommand, + config_dict = self.config_dict, + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES + ) + + self.assertResourceCalled('Execute', expectedCommand, + logoutput = True, + not_if = None, + only_if = "netstat -tupln | egrep ':5432\\s' | egrep postgres", + user = self.GPADMIN, + timeout = 900 + ) + + self.assertNoMoreResources() + + @patch ('hawqmaster.common.__set_osparams') @patch ('common.get_local_hawq_site_property_value') def test_stop_default(self, get_local_hawq_site_property_value_mock, set_osparams_mock): + """ Run Stop HAWQMASTER """ + + get_local_hawq_site_property_value_mock.return_value = 5432 + self.__asserts_for_stop('stop', 'source /usr/local/hawq/greenplum_path.sh && hawq stop master -M fast -a -v') + + + @patch ('hawqmaster.common.__set_osparams') + @patch ('common.get_local_hawq_site_property_value') + def test_stop_cluster_immediate(self, get_local_hawq_site_property_value_mock, set_osparams_mock): + """ Run Stop HAWQ Cluster Immediate Mode """ + get_local_hawq_site_property_value_mock.return_value = 5432 + self.__asserts_for_stop('immediate_stop_hawq_service','source /usr/local/hawq/greenplum_path.sh && hawq stop cluster -M immediate -a -v') + + + def __asserts_for_hawq_check(self, expectedCommand): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', classname = 'HawqMaster', - command = 'stop', - config_file ='hawq_default.json', + command = 'run_hawq_check', + config_dict = self.config_dict, hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled('Execute', 'source /usr/local/hawq/greenplum_path.sh && hawq stop master -M fast -a -v', - logoutput = True, - not_if = None, - only_if = "netstat -tupln | egrep ':5432\\s' | egrep postgres", + self.assertResourceCalled('File', "/usr/local/hawq/etc/hawq_hosts", + content = InlineTemplate("{% for host in hawq_all_hosts %}{{host}}\n{% endfor %}"), + group = self.GPADMIN, + owner = self.GPADMIN, + mode = 0644 + ) + + self.assertResourceCalled('Execute', expectedCommand, + user=self.GPADMIN, + timeout=600 + ) + + self.assertNoMoreResources() + + + def test_run_hawq_check_case1(self): + """ Running HAWQ Check Case 1: Non HDFS-HA, Standalone Resource Management, Not Kerberized """ + + expectedCommand = self.HAWQ_CHECK_COMMAND + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case2(self): + """ Running HAWQ Check Case 2: Non HDFS-HA, Standalone Resource Management, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + expectedCommand = "{0}--kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case3(self): + """ Running HAWQ Check Case 3: Non HDFS-HA, YARN Resource Management Non YARN_HA, Not Kerberized """ + + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + expectedCommand = "{0}--yarn".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case4(self): + """ Running HAWQ Check Case 4: Non HDFS-HA, YARN Resource Management Non YARN_HA, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + expectedCommand = "{0}--yarn --kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case5(self): + """ Running HAWQ Check Case 5: Non HDFS-HA, YARN Resource Management YARN_HA, Not Kerberized """ + + self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + expectedCommand = "{0}--yarn-ha".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case6(self): + """ Running HAWQ Check Case 6: Non HDFS-HA, YARN Resource Management YARN_HA, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + expectedCommand = "{0}--yarn-ha --kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case7(self): + """ Running HAWQ Check Case 7: HDFS-HA, Standalone Resource Management, Not Kerberized """ + + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case8(self): + """ Running HAWQ Check Case 8: HDFS-HA, Standalone Resource Management, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha --kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case9(self): + """ Running HAWQ Check Case 9: HDFS-HA, YARN Resource Management Non YARN_HA, Not Kerberized """ + + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha --yarn".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case10(self): + """ Running HAWQ Check Case 10: HDFS-HA, YARN Resource Management Non YARN_HA, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha --yarn --kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case11(self): + """ Running HAWQ Check Case 11: HDFS-HA, YARN Resource Management YARN_HA, Not Kerberized """ + + self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha --yarn-ha".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_run_hawq_check_case12(self): + """ Running HAWQ Check Case 12: HDFS-HA, YARN Resource Management YARN_HA, Kerberized """ + + self.config_dict['configurations']['cluster-env']['security_enabled'] = "true" + self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true" + self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn" + self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice" + expectedCommand = "{0}--hdfs-ha --yarn-ha --kerberos".format(self.HAWQ_CHECK_COMMAND) + self.__asserts_for_hawq_check(expectedCommand) + + + def test_resync_hawq_standby(self): + """ Run custom command Resync HAWQ Standby """ + + self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', + classname = 'HawqMaster', + command = 'resync_hawq_standby', + config_dict = self.config_dict, + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES + ) + + self.assertResourceCalled('Execute', 'source /usr/local/hawq/greenplum_path.sh && hawq init standby -n -a -v -M fast', user = self.GPADMIN, - timeout = 900 + timeout = 900, + not_if = None, + only_if = None, + logoutput = True ) self.assertNoMoreResources() + + def test_remove_hawq_standby(self): + """ Run custom command Remove HAWQ Standby """ + + self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + '/scripts/hawqmaster.py', + classname = 'HawqMaster', + command = 'remove_hawq_standby', + config_dict = self.config_dict, + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES + ) + + self.assertResourceCalled('Execute', 'source /usr/local/hawq/greenplum_path.sh && hawq init standby -a -v -r --ignore-bad-hosts', + user = self.GPADMIN, + timeout = 900, + not_if = None, + only_if = None, + logoutput = True + ) + + self.assertNoMoreResources() \ No newline at end of file
