Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package crmsh for openSUSE:Factory checked 
in at 2022-12-05 18:01:30
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/crmsh (Old)
 and      /work/SRC/openSUSE:Factory/.crmsh.new.1835 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "crmsh"

Mon Dec  5 18:01:30 2022 rev:269 rq:1040141 version:4.4.1+20221203.9bb5442e

Changes:
--------
--- /work/SRC/openSUSE:Factory/crmsh/crmsh.changes      2022-12-01 
17:22:05.914520486 +0100
+++ /work/SRC/openSUSE:Factory/.crmsh.new.1835/crmsh.changes    2022-12-05 
18:01:38.652726341 +0100
@@ -1,0 +2,17 @@
+Sat Dec 03 15:05:11 UTC 2022 - xli...@suse.com
+
+- Update to version 4.4.1+20221203.9bb5442e:
+  * Dev: doc: ui_cluster: adapt usage text to asciidoc format
+  * Dev: behave: add assert_eq to print expected and actual values when 
assertions fail
+  * Fix: behave: specified destination dir explicitly when source code is 
copied into a container
+  * Dev: ui_context: redirect `foo -h`/`foo --help` to `help foo` (bsc#1205735)
+
+-------------------------------------------------------------------
+Fri Dec 02 09:56:49 UTC 2022 - xli...@suse.com
+
+- Update to version 4.4.1+20221202.ec9fec0e:
+  * Dev: unittest: Adjust unit test based on previous changes
+  * Dev: behave: Add functional test based on previous changes
+  * Dev: bootstrap: remove more configuration files while removing node from 
cluster
+
+-------------------------------------------------------------------

Old:
----
  crmsh-4.4.1+20221201.bdfb0f2c.tar.bz2

New:
----
  crmsh-4.4.1+20221203.9bb5442e.tar.bz2

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ crmsh.spec ++++++
--- /var/tmp/diff_new_pack.xRhw2s/_old  2022-12-05 18:01:39.284729782 +0100
+++ /var/tmp/diff_new_pack.xRhw2s/_new  2022-12-05 18:01:39.288729804 +0100
@@ -36,7 +36,7 @@
 Summary:        High Availability cluster command-line interface
 License:        GPL-2.0-or-later
 Group:          %{pkg_group}
-Version:        4.4.1+20221201.bdfb0f2c
+Version:        4.4.1+20221203.9bb5442e
 Release:        0
 URL:            http://crmsh.github.io
 Source0:        %{name}-%{version}.tar.bz2

++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.xRhw2s/_old  2022-12-05 18:01:39.336730065 +0100
+++ /var/tmp/diff_new_pack.xRhw2s/_new  2022-12-05 18:01:39.340730087 +0100
@@ -9,7 +9,7 @@
 </service>
 <service name="tar_scm">
   <param name="url">https://github.com/ClusterLabs/crmsh.git</param>
-  <param 
name="changesrevision">bdfb0f2ca98a8d652455b883049070054e677869</param>
+  <param 
name="changesrevision">d450e2c49093db2e6ba718b6e92800bd3f843128</param>
 </service>
 </servicedata>
 (No newline at EOF)

++++++ crmsh-4.4.1+20221201.bdfb0f2c.tar.bz2 -> 
crmsh-4.4.1+20221203.9bb5442e.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/bootstrap.py 
new/crmsh-4.4.1+20221203.9bb5442e/crmsh/bootstrap.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/bootstrap.py        2022-12-01 
07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/crmsh/bootstrap.py        2022-12-03 
15:47:59.000000000 +0100
@@ -132,7 +132,9 @@
         self.local_ip_list = []
         self.local_network_list = []
         self.rm_list = [SYSCONFIG_SBD, CSYNC2_CFG, corosync.conf(), CSYNC2_KEY,
-                COROSYNC_AUTH, "/var/lib/heartbeat/crm/*", 
"/var/lib/pacemaker/cib/*"]
+                COROSYNC_AUTH, "/var/lib/heartbeat/crm/*", 
"/var/lib/pacemaker/cib/*",
+                "/var/lib/corosync/*", "/var/lib/pacemaker/pengine/*", 
PCMK_REMOTE_AUTH,
+                "/var/lib/csync2/*"]
 
     @classmethod
     def set_context(cls, options):
@@ -1959,6 +1961,18 @@
             utils.stop_service(service, disable=True, remote_addr=remote_addr)
 
 
+def rm_configuration_files(remote=None):
+    """
+    Delete configuration files from the node to be removed
+    """
+    utils.get_stdout_or_raise_error("rm -f {}".format(' 
'.join(_context.rm_list)), remote=remote)
+    # restore original sbd configuration file from 
/usr/share/fillup-templates/sysconfig.sbd
+    if utils.package_is_installed("sbd", remote_addr=remote):
+        from .sbd import SBDManager
+        cmd = "cp {} {}".format(SBDManager.SYSCONFIG_SBD_TEMPLATE, 
SYSCONFIG_SBD)
+        utils.get_stdout_or_raise_error(cmd, remote=remote)
+
+
 def remove_node_from_cluster():
     """
     Remove node from running cluster and the corosync / pacemaker 
configuration.
@@ -1968,11 +1982,7 @@
 
     stop_services(SERVICES_STOP_LIST, remote_addr=node)
     qdevice.QDevice.remove_qdevice_db([node])
-
-    # delete configuration files from the node to be removed
-    rc, _, err = invoke('ssh {} root@{} "bash -c \\\"rm -f 
{}\\\""'.format(SSH_OPTION, node, " ".join(_context.rm_list)))
-    if not rc:
-        utils.fatal("Deleting the configuration files failed: {}".format(err))
+    rm_configuration_files(node)
 
     # execute the command : crm node delete $HOSTNAME
     logger.info("Removing the node {}".format(node))
@@ -2305,10 +2315,7 @@
         stop_services(SERVICES_STOP_LIST)
         qdevice.QDevice.remove_certification_files_on_qnetd()
         qdevice.QDevice.remove_qdevice_db([utils.this_node()])
-        # remove all trace of cluster from this node
-        # delete configuration files from the node to be removed
-        if not invokerc('bash -c "rm -f {}"'.format(" 
".join(_context.rm_list))):
-            utils.fatal("Deleting the configuration files failed")
+        rm_configuration_files()
 
 
 def init_common_geo():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/help.py 
new/crmsh-4.4.1+20221203.9bb5442e/crmsh/help.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/help.py     2022-12-01 
07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/crmsh/help.py     2022-12-03 
15:47:59.000000000 +0100
@@ -348,7 +348,7 @@
             if lvl not in _COMMANDS:
                 _COMMANDS[lvl] = odict()
             if entry['from_cli']:
-                _, help_output, _ = get_stdout_stderr("crm {} {} 
--help".format(lvl, name))
+                _, help_output, _ = get_stdout_stderr("crm {} {} 
--help-without-redirect".format(lvl, name))
                 if help_output:
                     helpobj.set_long_help(help_output.rstrip())
             _COMMANDS[lvl][name] = helpobj
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/ui_cluster.py 
new/crmsh-4.4.1+20221203.9bb5442e/crmsh/ui_cluster.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/ui_cluster.py       2022-12-01 
07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/crmsh/ui_cluster.py       2022-12-03 
15:47:59.000000000 +0100
@@ -321,7 +321,7 @@
   # Setup the cluster on the current node, with SBD+OCFS2
   crm cluster init -s <share disk1> -o <share disk2> -y
 
-  # Setup the cluster on the current node, with SBD+OCFS2+Cluster LVM
+  # Setup the cluster on the current node, with SBD++OCFS2++Cluster LVM
   crm cluster init -s <share disk1> -o <share disk2> -o <share disk3> -C -y
 
   # Add SBD on a running cluster
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/ui_context.py 
new/crmsh-4.4.1+20221203.9bb5442e/crmsh/ui_context.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/crmsh/ui_context.py       2022-12-01 
07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/crmsh/ui_context.py       2022-12-03 
15:47:59.000000000 +0100
@@ -47,13 +47,6 @@
         '''
         Execute the given command line.
         '''
-        def trans_to_help(line):
-            line_list = line.split()
-            if line_list[-1] in ["-h", "--help"] and \
-               line_list[-2] == "property":
-                return " ".join(line_list[:-2] + ["help", "property"])
-            else:
-                return line
         promptstr = "crm({}/{}){}# ".format(main.cib_prompt(), 
utils.this_node(), self.prompt())
         logger_utils.log_only_to_file("{}{}".format(promptstr, line))
 
@@ -61,7 +54,7 @@
         if not line or line.startswith('#'):
             return True
 
-        line = trans_to_help(line)
+        line = _try_redirect_help_argument_to_subcommand(line)
 
         self._mark = len(self.stack)
         self._in_transit = False
@@ -403,4 +396,21 @@
         logger.info("%s: %s", self.get_qualified_name(), msg)
 
 
+def _try_redirect_help_argument_to_subcommand(line):
+    tokens = shlex.split(line)
+    if "--help-without-redirect" in tokens:
+        # Help text of some subcommands are read from `--help` generated by 
argparse.
+        # They should not be redirected, or a circular redirect will be formed.
+        # See crmsh.help._load_help.
+        return ' '.join(("--help" if token == "--help-without-redirect" else 
token for token in tokens))
+    elif tokens[-1] in ["-h", "--help"]:
+        if len(tokens) == 2 and tokens[0] == 'report':
+            # subcommand report has --help implementation, but _load_help 
doest not load from it.
+            return line
+        else:
+            return 'help ' + ' '.join(tokens[:-1])
+    else:
+        return line
+
+
 # vim:ts=4:sw=4:et:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/bootstrap_init_join_remove.feature
 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/bootstrap_init_join_remove.feature
--- 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/bootstrap_init_join_remove.feature
  2022-12-01 07:11:27.000000000 +0100
+++ 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/bootstrap_init_join_remove.feature
  2022-12-03 15:47:59.000000000 +0100
@@ -49,15 +49,55 @@
 
   @clean
   Scenario: Remove peer node "hanode2"
+    When    Run "crm configure primitive d1 Dummy" on "hanode1"
+    When    Run "crm configure primitive d2 Dummy" on "hanode2"
+    Then    File "/etc/csync2/csync2.cfg" exists on "hanode2"
+    Then    File "/etc/csync2/key_hagroup" exists on "hanode2"
+    Then    File "/etc/corosync/authkey" exists on "hanode2"
+    Then    File "/etc/corosync/corosync.conf" exists on "hanode2"
+    Then    File "/etc/pacemaker/authkey" exists on "hanode2"
+    Then    Directory "/var/lib/csync2/" not empty on "hanode2"
+    Then    Directory "/var/lib/pacemaker/cib/" not empty on "hanode2"
+    Then    Directory "/var/lib/pacemaker/pengine/" not empty on "hanode2"
+    Then    Directory "/var/lib/corosync/" not empty on "hanode2"
     When    Run "crm cluster remove hanode2 -y" on "hanode1"
     Then    Cluster service is "started" on "hanode1"
     And     Cluster service is "stopped" on "hanode2"
     And     Online nodes are "hanode1"
     And     Show cluster status on "hanode1"
+    Then    File "/etc/csync2/csync2.cfg" not exist on "hanode2"
+    Then    File "/etc/csync2/key_hagroup" not exist on "hanode2"
+    Then    File "/etc/corosync/authkey" not exist on "hanode2"
+    Then    File "/etc/corosync/corosync.conf" not exist on "hanode2"
+    Then    File "/etc/pacemaker/authkey" not exist on "hanode2"
+    Then    Directory "/var/lib/csync2/" is empty on "hanode2"
+    Then    Directory "/var/lib/pacemaker/cib/" is empty on "hanode2"
+    Then    Directory "/var/lib/pacemaker/pengine/" is empty on "hanode2"
+    Then    Directory "/var/lib/corosync/" is empty on "hanode2"
 
   @clean
   Scenario: Remove local node "hanode1"
+    When    Run "crm configure primitive d1 Dummy" on "hanode1"
+    When    Run "crm configure primitive d2 Dummy" on "hanode1"
+    Then    File "/etc/csync2/csync2.cfg" exists on "hanode1"
+    Then    File "/etc/csync2/key_hagroup" exists on "hanode1"
+    Then    File "/etc/corosync/authkey" exists on "hanode1"
+    Then    File "/etc/corosync/corosync.conf" exists on "hanode1"
+    Then    File "/etc/pacemaker/authkey" exists on "hanode1"
+    Then    Directory "/var/lib/csync2/" not empty on "hanode1"
+    Then    Directory "/var/lib/pacemaker/cib/" not empty on "hanode1"
+    Then    Directory "/var/lib/pacemaker/pengine/" not empty on "hanode1"
+    Then    Directory "/var/lib/corosync/" not empty on "hanode1"
     When    Run "crm cluster remove hanode1 -y --force" on "hanode1"
     Then    Cluster service is "stopped" on "hanode1"
     And     Cluster service is "started" on "hanode2"
     And     Show cluster status on "hanode2"
+    Then    File "/etc/csync2/csync2.cfg" not exist on "hanode1"
+    Then    File "/etc/csync2/key_hagroup" not exist on "hanode1"
+    Then    File "/etc/corosync/authkey" not exist on "hanode1"
+    Then    File "/etc/corosync/corosync.conf" not exist on "hanode1"
+    Then    File "/etc/pacemaker/authkey" not exist on "hanode1"
+    Then    Directory "/var/lib/csync2/" is empty on "hanode1"
+    Then    Directory "/var/lib/pacemaker/cib/" is empty on "hanode1"
+    Then    Directory "/var/lib/pacemaker/pengine/" is empty on "hanode1"
+    Then    Directory "/var/lib/corosync/" is empty on "hanode1"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/const.py 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/const.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/const.py      
2022-12-01 07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/const.py      
2022-12-03 15:47:59.000000000 +0100
@@ -53,7 +53,9 @@
                         +-o+ multiple times.'''
 
 
-CRM_CLUSTER_INIT_H_OUTPUT = '''usage: init [options] [STAGE]
+CRM_CLUSTER_INIT_H_OUTPUT = '''Initializes a new HA cluster
+
+usage: init [options] [STAGE]
 
 Initialize a cluster from scratch. This command configures
 a complete cluster, and can also add additional cluster
@@ -194,7 +196,9 @@
   crm cluster init ocfs2 -o <share disk1> -o <share disk2> -C -y'''
 
 
-CRM_CLUSTER_JOIN_H_OUTPUT = '''usage: join [options] [STAGE]
+CRM_CLUSTER_JOIN_H_OUTPUT = '''Join existing cluster
+
+usage: join [options] [STAGE]
 
 Join the current node to an existing cluster. The
 current node cannot be a member of a cluster already.
@@ -234,7 +238,9 @@
   crm cluster join -c <node> -i eth1 -i eth2 -y'''
 
 
-CRM_CLUSTER_REMOVE_H_OUTPUT = '''usage: remove [options] [<node> ...]
+CRM_CLUSTER_REMOVE_H_OUTPUT = '''Remove node(s) from the cluster
+
+usage: remove [options] [<node> ...]
 
 Remove one or more nodes from the cluster.
 
@@ -254,7 +260,9 @@
   --qdevice             Remove QDevice configuration and service from 
cluster'''
 
 
-CRM_CLUSTER_GEO_INIT_H_OUTPUT = '''usage: geo-init [options]
+CRM_CLUSTER_GEO_INIT_H_OUTPUT = '''Configure cluster as geo cluster
+
+usage: geo-init [options]
 
 Create a new geo cluster with the current cluster as the
 first member. Pass the complete geo cluster topology as
@@ -288,7 +296,9 @@
   crm bootstrap init.'''
 
 
-CRM_CLUSTER_GEO_JOIN_H_OUTPUT = '''usage: geo-join [options]
+CRM_CLUSTER_GEO_JOIN_H_OUTPUT = '''Join cluster to existing geo cluster
+
+usage: geo-join [options]
 
 This command should be run from one of the nodes in a cluster
 which is currently not a member of a geo cluster. The geo
@@ -311,7 +321,9 @@
                         Geo cluster description (see geo-init for details)'''
 
 
-CRM_CLUSTER_GEO_INIT_ARBIT_H_OUTPUT = '''usage: geo-init-arbitrator [options]
+CRM_CLUSTER_GEO_INIT_ARBIT_H_OUTPUT = '''Initialize node as geo cluster 
arbitrator
+
+usage: geo-init-arbitrator [options]
 
 Configure the current node as a geo arbitrator. The command
 requires an existing geo cluster or geo arbitrator from which
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/step_implementation.py 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/step_implementation.py
--- 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/step_implementation.py    
    2022-12-01 07:11:27.000000000 +0100
+++ 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/step_implementation.py    
    2022-12-03 15:47:59.000000000 +0100
@@ -7,7 +7,8 @@
 from crmsh import corosync, parallax, sbd
 from crmsh import utils as crmutils
 from utils import check_cluster_state, check_service_state, online, 
run_command, me, \
-                  run_command_local_or_remote, file_in_archive
+                  run_command_local_or_remote, file_in_archive, \
+                  assert_eq
 import const
 
 @when('Write multi lines to file "{f}"')
@@ -281,7 +282,7 @@
     cmd_help["crm_cluster_geo-join"] = const.CRM_CLUSTER_GEO_JOIN_H_OUTPUT
     cmd_help["crm_cluster_geo-init-arbitrator"] = 
const.CRM_CLUSTER_GEO_INIT_ARBIT_H_OUTPUT
     key = '_'.join(cmd.split())
-    assert context.stdout == cmd_help[key]
+    assert_eq(cmd_help[key], context.stdout)
 
 
 @then('Corosync working on "{transport_type}" mode')
@@ -289,12 +290,12 @@
     if transport_type == "multicast":
         assert corosync.get_value("totem.transport") is None
     if transport_type == "unicast":
-        assert corosync.get_value("totem.transport") == "udpu"
+        assert_eq("udpu", corosync.get_value("totem.transport"))
 
 
 @then('Expected votes will be "{votes}"')
 def step_impl(context, votes):
-    assert int(corosync.get_value("quorum.expected_votes")) == int(votes)
+    assert_eq(int(votes), int(corosync.get_value("quorum.expected_votes")))
 
 
 @then('Default crm_report tar file created')
@@ -328,7 +329,7 @@
 def step_impl(context, f):
     cmd = "crm cluster diff {}".format(f)
     rc, out = run_command(context, cmd)
-    assert out == ""
+    assert_eq("", out)
 
 
 @given('Resource "{res_id}" is started on "{node}"')
@@ -346,25 +347,27 @@
 @then('SBD option "{key}" value is "{value}"')
 def step_impl(context, key, value):
     res = sbd.SBDManager.get_sbd_value_from_config(key)
-    assert res == value
+    assert_eq(value, res)
 
 
 @then('SBD option "{key}" value for "{dev}" is "{value}"')
 def step_impl(context, key, dev, value):
     res = sbd.SBDTimeout.get_sbd_msgwait(dev)
-    assert res == int(value)
+    assert_eq(int(value), res)
 
 
 @then('Cluster property "{key}" is "{value}"')
 def step_impl(context, key, value):
     res = crmutils.get_property(key)
-    assert res is not None and str(res) == value
+    assert res is not None
+    assert_eq(value, str(res))
 
 
 @then('Property "{key}" in "{type}" is "{value}"')
 def step_impl(context, key, type, value):
     res = crmutils.get_property(key, type)
-    assert res is not None and str(res) == value
+    assert res is not None
+    assert_eq(value, str(res))
 
 
 @then('Parameter "{param_name}" not configured in "{res_id}"')
@@ -387,7 +390,7 @@
     with open(yaml_file) as f:
         data = yaml.load(f, Loader=yaml.SafeLoader)
     sec_name, key = path.split(':')
-    assert str(data[sec_name][key]) == str(value)
+    assert_eq(str(value), str(data[sec_name][key]))
 
 
 @when('Wait for DC')
@@ -409,3 +412,18 @@
 @then('File "{path}" exists on "{node}"')
 def step_impl(context, path, node):
     parallax.parallax_call([node], '[ -f {} ]'.format(path))
+
+
+@then('File "{path}" not exist on "{node}"')
+def step_impl(context, path, node):
+    parallax.parallax_call([node], '[ ! -f {} ]'.format(path))
+
+
+@then('Directory "{path}" is empty on "{node}"')
+def step_impl(context, path, node):
+    parallax.parallax_call([node], '[ ! "$(ls -A {})" ]'.format(path))
+
+
+@then('Directory "{path}" not empty on "{node}"')
+def step_impl(context, path, node):
+    parallax.parallax_call([node], '[ "$(ls -A {})" ]'.format(path))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/utils.py 
new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/utils.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/test/features/steps/utils.py      
2022-12-01 07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/test/features/steps/utils.py      
2022-12-03 15:47:59.000000000 +0100
@@ -1,3 +1,4 @@
+import difflib
 import tarfile
 import glob
 import re
@@ -96,3 +97,22 @@
             rc = False
             context.logger.error("\nNode \"{}\" not online\n".format(node))
     return rc
+
+def assert_eq(expected, actual):
+    if expected != actual:
+        msg = "\033[32m" "Expected" "\033[31m" " != Actual" "\033[0m" "\n" \
+              "\033[32m" "Expected:" "\033[0m" " {}\n" \
+              "\033[31m" "Actual:" "\033[0m" " {}".format(expected, actual)
+        if isinstance(expected, str) and '\n' in expected:
+            try:
+                diff = '\n'.join(difflib.unified_diff(
+                    expected.splitlines(),
+                    actual.splitlines(),
+                    fromfile="expected",
+                    tofile="actual",
+                    lineterm="",
+                ))
+                msg = "{}\n" "\033[31m" "Diff:" "\033[0m" "\n{}".format(msg, 
diff)
+            except Exception:
+                pass
+        raise AssertionError(msg)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/run-functional-tests 
new/crmsh-4.4.1+20221203.9bb5442e/test/run-functional-tests
--- old/crmsh-4.4.1+20221201.bdfb0f2c/test/run-functional-tests 2022-12-01 
07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/test/run-functional-tests 2022-12-03 
15:47:59.000000000 +0100
@@ -197,7 +197,7 @@
        fi
        docker_exec $node_name "rm -rf /run/nologin"
        docker_exec $node_name "echo 'StrictHostKeyChecking no' >> 
/etc/ssh/ssh_config"
-       docker cp $PROJECT_PATH $node_name:/opt
+       docker cp $PROJECT_PATH $node_name:/opt/crmsh
        info "Building crmsh on \"$node_name\"..."
        docker_exec $node_name "$make_cmd" 1> /dev/null || \
                fatal "Building failed on $node_name!"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.4.1+20221201.bdfb0f2c/test/unittests/test_bootstrap.py 
new/crmsh-4.4.1+20221203.9bb5442e/test/unittests/test_bootstrap.py
--- old/crmsh-4.4.1+20221201.bdfb0f2c/test/unittests/test_bootstrap.py  
2022-12-01 07:11:27.000000000 +0100
+++ new/crmsh-4.4.1+20221203.9bb5442e/test/unittests/test_bootstrap.py  
2022-12-03 15:47:59.000000000 +0100
@@ -1442,25 +1442,16 @@
         mock_ext.assert_called_once_with("ssh {} node2 'crm cluster remove -y 
-c node1'".format(constants.SSH_OPTION))
         mock_error.assert_called_once_with("Failed to remove this node from 
node2")
 
-    @mock.patch('crmsh.qdevice.QDevice.remove_qdevice_db')
-    @mock.patch('crmsh.qdevice.QDevice.remove_certification_files_on_qnetd')
-    @mock.patch('crmsh.utils.fatal')
-    @mock.patch('crmsh.bootstrap.invokerc')
-    @mock.patch('crmsh.bootstrap.stop_services')
-    @mock.patch('crmsh.xmlutil.listnodes')
-    def test_remove_self_rm_failed(self, mock_list, mock_stop_service, 
mock_invoke, mock_error, mock_rm_files, mock_rm_db):
-        mock_list.return_value = ["node1"]
-        mock_invoke.return_value = False
-        mock_error.side_effect = SystemExit
-
-        with self.assertRaises(SystemExit):
-            bootstrap._context = mock.Mock(cluster_node="node1", 
yes_to_all=True, rm_list=["file1", "file2"])
-            bootstrap.remove_self()
-
-        mock_list.assert_called_once_with(include_remote_nodes=False)
-        mock_stop_service.assert_called_once_with(bootstrap.SERVICES_STOP_LIST)
-        mock_invoke.assert_called_once_with('bash -c "rm -f file1 file2"')
-        mock_error.assert_called_once_with("Deleting the configuration files 
failed")
+    @mock.patch('crmsh.utils.package_is_installed')
+    @mock.patch('crmsh.utils.get_stdout_or_raise_error')
+    def test_rm_configuration_files(self, mock_run, mock_installed):
+        bootstrap._context = mock.Mock(rm_list=["file1", "file2"])
+        mock_installed.return_value = True
+        bootstrap.rm_configuration_files()
+        mock_run.assert_has_calls([
+            mock.call('rm -f file1 file2', remote=None),
+            mock.call('cp /usr/share/fillup-templates/sysconfig.sbd 
/etc/sysconfig/sbd', remote=None)
+            ])
 
     @mock.patch('crmsh.utils.get_iplist_from_name')
     @mock.patch('crmsh.corosync.get_values')
@@ -1506,30 +1497,14 @@
             mock.call("csync2.socket", disable=True, remote_addr=None)
             ])
 
-    @mock.patch('crmsh.utils.fatal')
-    @mock.patch('crmsh.bootstrap.invoke')
-    @mock.patch('crmsh.bootstrap.stop_services')
-    @mock.patch('crmsh.bootstrap.set_cluster_node_ip')
-    def test_remove_node_from_cluster_rm_failed(self, mock_get_ip, mock_stop, 
mock_invoke, mock_error):
-        mock_invoke.return_value = (False, None, "error")
-        mock_error.side_effect = SystemExit
-
-        with self.assertRaises(SystemExit):
-            bootstrap._context = mock.Mock(cluster_node="node1", 
rm_list=["file1", "file2"])
-            bootstrap.remove_node_from_cluster()
-
-        mock_get_ip.assert_called_once_with()
-        mock_stop.assert_called_once_with(bootstrap.SERVICES_STOP_LIST, 
remote_addr="node1")
-        mock_invoke.assert_called_once_with('ssh {} root@node1 "bash -c \\"rm 
-f file1 file2\\""'.format(constants.SSH_OPTION))
-        mock_error.assert_called_once_with("Deleting the configuration files 
failed: error")
-
+    @mock.patch('crmsh.bootstrap.rm_configuration_files')
     @mock.patch('crmsh.utils.fatal')
     @mock.patch('crmsh.bootstrap.invoke')
     @mock.patch('logging.Logger.info')
     @mock.patch('crmsh.bootstrap.stop_services')
     @mock.patch('crmsh.bootstrap.set_cluster_node_ip')
-    def test_remove_node_from_cluster_rm_node_failed(self, mock_get_ip, 
mock_stop, mock_status, mock_invoke, mock_error):
-        mock_invoke.side_effect = [(True, None, None), (False, None, "error 
data")]
+    def test_remove_node_from_cluster_rm_node_failed(self, mock_get_ip, 
mock_stop, mock_status, mock_invoke, mock_error, mock_rm_conf_files):
+        mock_invoke.side_effect = [(False, None, "error data")]
         mock_error.side_effect = SystemExit
 
         with self.assertRaises(SystemExit):
@@ -1540,19 +1515,19 @@
         mock_status.assert_called_once_with("Removing the node node1")
         mock_stop.assert_called_once_with(bootstrap.SERVICES_STOP_LIST, 
remote_addr="node1")
         mock_invoke.assert_has_calls([
-            mock.call('ssh {} root@node1 "bash -c \\"rm -f file1 
file2\\""'.format(constants.SSH_OPTION)),
             mock.call('crm node delete node1')
             ])
         mock_error.assert_called_once_with("Failed to remove node1: error 
data")
 
+    @mock.patch('crmsh.bootstrap.rm_configuration_files')
     @mock.patch('crmsh.utils.fatal')
     @mock.patch('crmsh.bootstrap.invokerc')
     @mock.patch('crmsh.bootstrap.invoke')
     @mock.patch('logging.Logger.info')
     @mock.patch('crmsh.bootstrap.stop_services')
     @mock.patch('crmsh.bootstrap.set_cluster_node_ip')
-    def test_remove_node_from_cluster_rm_csync_failed(self, mock_get_ip, 
mock_stop, mock_status, mock_invoke, mock_invokerc, mock_error):
-        mock_invoke.side_effect = [(True, None, None), (True, None, None)]
+    def test_remove_node_from_cluster_rm_csync_failed(self, mock_get_ip, 
mock_stop, mock_status, mock_invoke, mock_invokerc, mock_error, 
mock_rm_conf_files):
+        mock_invoke.side_effect = [(True, None, None)]
         mock_invokerc.return_value = False
         mock_error.side_effect = SystemExit
 
@@ -1564,7 +1539,6 @@
         mock_status.assert_called_once_with("Removing the node node1")
         mock_stop.assert_called_once_with(bootstrap.SERVICES_STOP_LIST, 
remote_addr="node1")
         mock_invoke.assert_has_calls([
-            mock.call('ssh {} root@node1 "bash -c \\"rm -f file1 
file2\\""'.format(constants.SSH_OPTION)),
             mock.call('crm node delete node1')
             ])
         mock_invokerc.assert_has_calls([
@@ -1572,6 +1546,7 @@
             ])
         mock_error.assert_called_once_with("Removing the node node1 from {} 
failed".format(bootstrap.CSYNC2_CFG))
 
+    @mock.patch('crmsh.bootstrap.rm_configuration_files')
     @mock.patch('crmsh.bootstrap.adjust_priority_fencing_delay')
     @mock.patch('crmsh.bootstrap.adjust_priority_in_rsc_defaults')
     @mock.patch('crmsh.bootstrap.csync2_update')
@@ -1585,8 +1560,8 @@
     @mock.patch('crmsh.bootstrap.stop_services')
     @mock.patch('crmsh.bootstrap.set_cluster_node_ip')
     def test_remove_node_from_cluster_hostname(self, mock_get_ip, mock_stop, 
mock_status,
-            mock_invoke, mock_invokerc, mock_error, mock_get_values, mock_del, 
mock_decrease, mock_csync2, mock_adjust_priority, mock_adjust_fence_delay):
-        mock_invoke.side_effect = [(True, None, None), (True, None, None), 
(True, None, None)]
+            mock_invoke, mock_invokerc, mock_error, mock_get_values, mock_del, 
mock_decrease, mock_csync2, mock_adjust_priority, mock_adjust_fence_delay, 
mock_rm_conf_files):
+        mock_invoke.side_effect = [(True, None, None), (True, None, None)]
         mock_invokerc.return_value = True
         mock_get_values.return_value = ["10.10.10.1"]
 
@@ -1600,7 +1575,6 @@
             ])
         mock_stop.assert_called_once_with(bootstrap.SERVICES_STOP_LIST, 
remote_addr="node1")
         mock_invoke.assert_has_calls([
-            mock.call('ssh {} root@node1 "bash -c \\"rm -f file1 
file2\\""'.format(constants.SSH_OPTION)),
             mock.call('crm node delete node1'),
             mock.call("corosync-cfgtool -R")
             ])

Reply via email to