Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package crmsh for openSUSE:Factory checked in at 2024-01-26 22:47:46 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/crmsh (Old) and /work/SRC/openSUSE:Factory/.crmsh.new.1815 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "crmsh" Fri Jan 26 22:47:46 2024 rev:323 rq:1141679 version:4.6.0+20240126.e227ccf6 Changes: -------- --- /work/SRC/openSUSE:Factory/crmsh/crmsh.changes 2024-01-25 18:40:35.421340767 +0100 +++ /work/SRC/openSUSE:Factory/.crmsh.new.1815/crmsh.changes 2024-01-26 22:47:58.689734118 +0100 @@ -1,0 +2,10 @@ +Fri Jan 26 07:47:11 UTC 2024 - xli...@suse.com + +- Update to version 4.6.0+20240126.e227ccf6: + * Dev: unittest: Adjust unit test for previous change + * Dev: behave: Add functional test for previous change + * Dev: report: Enable crm report even cib.xml does not exist + * Dev: utils: Query pacemaker_remote node correctly + * Fix: ui_cluster: Can't start cluster with --all option if no cib (bsc#1219052) + +------------------------------------------------------------------- Old: ---- crmsh-4.6.0+20240124.feea55a9.tar.bz2 New: ---- crmsh-4.6.0+20240126.e227ccf6.tar.bz2 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ crmsh.spec ++++++ --- /var/tmp/diff_new_pack.XD70jX/_old 2024-01-26 22:47:59.413760191 +0100 +++ /var/tmp/diff_new_pack.XD70jX/_new 2024-01-26 22:47:59.413760191 +0100 @@ -36,7 +36,7 @@ Summary: High Availability cluster command-line interface License: GPL-2.0-or-later Group: %{pkg_group} -Version: 4.6.0+20240124.feea55a9 +Version: 4.6.0+20240126.e227ccf6 Release: 0 URL: http://crmsh.github.io Source0: %{name}-%{version}.tar.bz2 ++++++ _servicedata ++++++ --- /var/tmp/diff_new_pack.XD70jX/_old 2024-01-26 22:47:59.473762352 +0100 +++ /var/tmp/diff_new_pack.XD70jX/_new 2024-01-26 22:47:59.477762497 +0100 @@ -9,7 +9,7 @@ </service> <service name="tar_scm"> <param name="url">https://github.com/ClusterLabs/crmsh.git</param> - <param name="changesrevision">feea55a985585b794983c629dfb12a4c0d99270b</param> + <param name="changesrevision">e227ccf6be0b408bf63d7e949ea207cad08c76d8</param> </service> </servicedata> (No newline at EOF) ++++++ crmsh-4.6.0+20240124.feea55a9.tar.bz2 -> crmsh-4.6.0+20240126.e227ccf6.tar.bz2 ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/crmsh/report/collect.py new/crmsh-4.6.0+20240126.e227ccf6/crmsh/report/collect.py --- old/crmsh-4.6.0+20240124.feea55a9/crmsh/report/collect.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/crmsh/report/collect.py 2024-01-26 08:23:47.000000000 +0100 @@ -386,6 +386,9 @@ logger.debug(f"Touch file 'RUNNING' in {utils.real_path(workdir)}") else: # TODO should determine offline node was ha node + if not os.path.isfile(os.path.join(context.cib_dir, constants.CIB_F)): + logger.warning(f"Cannot find cib.xml in {context.cib_dir}") + return shutil.copy2(os.path.join(context.cib_dir, constants.CIB_F), workdir) crmutils.str2file("", os.path.join(workdir, "STOPPED")) logger.debug(f"Touch file 'STOPPED' in {utils.real_path(workdir)}") diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/crmsh/report/utils.py new/crmsh-4.6.0+20240126.e227ccf6/crmsh/report/utils.py --- old/crmsh-4.6.0+20240124.feea55a9/crmsh/report/utils.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/crmsh/report/utils.py 2024-01-26 08:23:47.000000000 +0100 @@ -500,7 +500,9 @@ """ Prepare the data and files for the sanitization process """ - self._load_cib_from_work_dir() + self.cib_data = self._load_cib_from_work_dir() + if not self.cib_data: + return False self._parse_sensitive_set() self._extract_sensitive_value_list() @@ -508,11 +510,13 @@ if not self.context.sanitize: logger.warning("Some PE/CIB/log files contain possibly sensitive data") logger.warning("Using \"-s\" option can replace sensitive data") - return + return False self._get_file_list_in_work_dir() else: self.context.sanitize = False + return True + def _include_sensitive_data(self) -> List[str]: """ Check whether contain sensitive data @@ -533,11 +537,8 @@ """ cib_file_list = glob.glob(f"{self.context.work_dir}/*/{constants.CIB_F}") if not cib_file_list: - raise ReportGenericError(f"CIB file {constants.CIB_F} was not collected") - data = crmutils.read_from_file(cib_file_list[0]) - if not data: - raise ReportGenericError(f"File {cib_file_list[0]} is empty") - self.cib_data = data + return None + return crmutils.read_from_file(cib_file_list[0]) def _parse_sensitive_set(self) -> None: """ @@ -619,8 +620,8 @@ Perform sanitization by replacing sensitive information in CIB/PE/other logs data with '*' """ inst = Sanitizer(context) - inst.prepare() - inst.sanitize() + if inst.prepare(): + inst.sanitize() class Package: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/crmsh/ui_node.py new/crmsh-4.6.0+20240126.e227ccf6/crmsh/ui_node.py --- old/crmsh-4.6.0+20240124.feea55a9/crmsh/ui_node.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/crmsh/ui_node.py 2024-01-26 08:23:47.000000000 +0100 @@ -244,14 +244,11 @@ context.fatal_error("Should either use --all or specific node(s)") # return local node - if not options.all and not args: + if (not options.all and not args) or (len(args) == 1 and args[0] == utils.this_node()): return [utils.this_node()] member_list = utils.list_cluster_nodes() if not member_list: context.fatal_error("Cannot get the node list from cluster") - for node in args: - if node not in member_list: - context.fatal_error("Node \"{}\" is not a cluster node".format(node)) node_list = member_list if options.all else args for node in node_list: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/crmsh/utils.py new/crmsh-4.6.0+20240126.e227ccf6/crmsh/utils.py --- old/crmsh-4.6.0+20240124.feea55a9/crmsh/utils.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/crmsh/utils.py 2024-01-26 08:23:47.000000000 +0100 @@ -1766,6 +1766,14 @@ print("{}\n".format(out)) +def get_address_list_from_corosync_conf(): + """ + Return a list of addresses configured in corosync.conf + """ + from . import corosync + return corosync.get_values("nodelist.node.ring0_addr") + + def list_cluster_nodes(no_reg=False): ''' Returns a list of nodes in the cluster. @@ -1780,17 +1788,18 @@ else: cib_path = os.getenv('CIB_file', constants.CIB_RAW_FILE) if not os.path.isfile(cib_path): - return None + return get_address_list_from_corosync_conf() cib = xmlutil.file2cib_elem(cib_path) if cib is None: - return None + return get_address_list_from_corosync_conf() node_list = [] for node in cib.xpath(constants.XML_NODE_PATH): name = node.get('uname') or node.get('id') + # exclude remote node if node.get('type') == 'remote': - srv = cib.xpath("//primitive[@id='%s']/instance_attributes/nvpair[@name='server']" % (name)) - if srv: + xpath = f"//primitive[@provider='pacemaker' and @type='remote']/instance_attributes/nvpair[@name='server' and @value='{name}']" + if cib.xpath(xpath): continue node_list.append(name) return node_list diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/test/features/bootstrap_bugs.feature new/crmsh-4.6.0+20240126.e227ccf6/test/features/bootstrap_bugs.feature --- old/crmsh-4.6.0+20240124.feea55a9/test/features/bootstrap_bugs.feature 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/test/features/bootstrap_bugs.feature 2024-01-26 08:23:47.000000000 +0100 @@ -133,6 +133,25 @@ Then Service "corosync" is "stopped" on "hanode1" @clean + Scenario: Can't start cluster with --all option if no cib(bsc#1219052) + Given Cluster service is "stopped" on "hanode1" + And Cluster service is "stopped" on "hanode2" + When Run "crm cluster init -y" on "hanode1" + Then Cluster service is "started" on "hanode1" + When Run "crm cluster join -c hanode1 -y" on "hanode2" + Then Cluster service is "started" on "hanode2" + And Online nodes are "hanode1 hanode2" + + When Run "crm cluster stop --all" on "hanode1" + Then Cluster service is "stopped" on "hanode1" + And Cluster service is "stopped" on "hanode2" + When Run "rm -f /var/lib/pacemaker/cib/*" on "hanode1" + When Run "rm -f /var/lib/pacemaker/cib/*" on "hanode2" + And Run "crm cluster start --all" on "hanode1" + Then Cluster service is "started" on "hanode1" + Then Cluster service is "started" on "hanode2" + + @clean Scenario: Can't stop all nodes' cluster service when local node's service is down(bsc#1213889) Given Cluster service is "stopped" on "hanode1" And Cluster service is "stopped" on "hanode2" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/test/features/crm_report_normal.feature new/crmsh-4.6.0+20240126.e227ccf6/test/features/crm_report_normal.feature --- old/crmsh-4.6.0+20240124.feea55a9/test/features/crm_report_normal.feature 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/test/features/crm_report_normal.feature 2024-01-26 08:23:47.000000000 +0100 @@ -105,5 +105,4 @@ When Run "crm cluster stop --all" on "hanode1" When Run "rm -f /var/lib/pacemaker/cib/cib*" on "hanode1" When Run "rm -f /var/lib/pacemaker/cib/cib*" on "hanode2" - When Try "crm report" on "hanode1" - Then Expected "Could not figure out a list of nodes; is this a cluster node" in stderr + When Run "crm report" OK diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/test/unittests/test_report_utils.py new/crmsh-4.6.0+20240126.e227ccf6/test/unittests/test_report_utils.py --- old/crmsh-4.6.0+20240124.feea55a9/test/unittests/test_report_utils.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/test/unittests/test_report_utils.py 2024-01-26 08:23:47.000000000 +0100 @@ -138,27 +138,16 @@ @mock.patch('glob.glob') def test_load_cib_from_work_dir_no_cib(self, mock_glob): mock_glob.return_value = [] - with self.assertRaises(utils.ReportGenericError) as err: - self.s_inst._load_cib_from_work_dir() - self.assertEqual(f"CIB file {constants.CIB_F} was not collected", str(err.exception)) - - @mock.patch('glob.glob') - @mock.patch('crmsh.utils.read_from_file') - def test_load_cib_from_work_dir_empty(self, mock_read, mock_glob): - mock_glob.return_value = [f"/opt/node1/{constants.CIB_F}"] - mock_read.return_value = None - with self.assertRaises(utils.ReportGenericError) as err: - self.s_inst._load_cib_from_work_dir() - self.assertEqual(f"File /opt/node1/{constants.CIB_F} is empty", str(err.exception)) - mock_read.assert_called_once_with(f"/opt/node1/{constants.CIB_F}") + res = self.s_inst._load_cib_from_work_dir() + self.assertIsNone(res) @mock.patch('glob.glob') @mock.patch('crmsh.utils.read_from_file') def test_load_cib_from_work_dir(self, mock_read, mock_glob): mock_glob.return_value = [f"/opt/node1/{constants.CIB_F}"] mock_read.return_value = "data" - self.s_inst._load_cib_from_work_dir() - self.assertEqual(self.s_inst.cib_data, "data") + res = self.s_inst._load_cib_from_work_dir() + self.assertEqual(res, "data") mock_read.assert_called_once_with(f"/opt/node1/{constants.CIB_F}") @mock.patch('crmsh.report.utils.logger', spec=crmsh.log.DEBUG2Logger) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.6.0+20240124.feea55a9/test/unittests/test_utils.py new/crmsh-4.6.0+20240126.e227ccf6/test/unittests/test_utils.py --- old/crmsh-4.6.0+20240124.feea55a9/test/unittests/test_utils.py 2024-01-24 02:10:16.000000000 +0100 +++ new/crmsh-4.6.0+20240126.e227ccf6/test/unittests/test_utils.py 2024-01-26 08:23:47.000000000 +0100 @@ -1232,37 +1232,43 @@ mock_run_inst.get_stdout_or_raise_error.assert_called_once_with("corosync-quorumtool -s", None, success_exit_status={0, 2}) +@mock.patch('crmsh.utils.get_address_list_from_corosync_conf') @mock.patch('crmsh.utils.etree.fromstring') @mock.patch('crmsh.sh.ShellUtils.get_stdout_stderr') -def test_list_cluster_nodes_none(mock_run, mock_etree): +def test_list_cluster_nodes_none(mock_run, mock_etree, mock_corosync): mock_run.return_value = (0, "data", None) mock_etree.return_value = None + mock_corosync.return_value = ["node1", "node2"] res = utils.list_cluster_nodes() - assert res is None + assert res == ["node1", "node2"] mock_run.assert_called_once_with(constants.CIB_QUERY, no_reg=False) mock_etree.assert_called_once_with("data") +@mock.patch('crmsh.utils.get_address_list_from_corosync_conf') @mock.patch('crmsh.utils.etree.fromstring') @mock.patch('crmsh.sh.ShellUtils.get_stdout_stderr') -def test_list_cluster_nodes_none_no_reg(mock_run, mock_etree): +def test_list_cluster_nodes_none_no_reg(mock_run, mock_etree, mock_corosync): mock_run.return_value = (0, "data", None) mock_etree.return_value = None + mock_corosync.return_value = ["node1", "node2"] res = utils.list_cluster_nodes(no_reg=True) - assert res is None + assert res == ["node1", "node2"] mock_run.assert_called_once_with(constants.CIB_QUERY, no_reg=True) mock_etree.assert_called_once_with("data") +@mock.patch('crmsh.utils.get_address_list_from_corosync_conf') @mock.patch('os.path.isfile') @mock.patch('os.getenv') @mock.patch('crmsh.sh.ShellUtils.get_stdout_stderr') -def test_list_cluster_nodes_cib_not_exist(mock_run, mock_env, mock_isfile): +def test_list_cluster_nodes_cib_not_exist(mock_run, mock_env, mock_isfile, mock_corosync): mock_run.return_value = (1, None, None) mock_env.return_value = constants.CIB_RAW_FILE mock_isfile.return_value = False + mock_corosync.return_value = ["node1", "node2"] res = utils.list_cluster_nodes() - assert res is None + assert res == ["node1", "node2"] mock_run.assert_called_once_with(constants.CIB_QUERY, no_reg=False) mock_env.assert_called_once_with("CIB_file", constants.CIB_RAW_FILE) mock_isfile.assert_called_once_with(constants.CIB_RAW_FILE) @@ -1293,7 +1299,7 @@ mock_file2elem.assert_called_once_with(constants.CIB_RAW_FILE) mock_cib_inst.xpath.assert_has_calls([ mock.call(constants.XML_NODE_PATH), - mock.call("//primitive[@id='node1']/instance_attributes/nvpair[@name='server']") + mock.call("//primitive[@provider='pacemaker' and @type='remote']/instance_attributes/nvpair[@name='server' and @value='node1']") ])