Hello community,

here is the log from the commit of package crmsh for openSUSE:Factory checked 
in at 2020-09-12 00:10:21
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/crmsh (Old)
 and      /work/SRC/openSUSE:Factory/.crmsh.new.4249 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "crmsh"

Sat Sep 12 00:10:21 2020 rev:192 rq:833704 version:4.2.0+git.1599810948.3db12a7a

Changes:
--------
--- /work/SRC/openSUSE:Factory/crmsh/crmsh.changes      2020-09-10 
22:55:18.128417366 +0200
+++ /work/SRC/openSUSE:Factory/.crmsh.new.4249/crmsh.changes    2020-09-12 
00:10:42.221110285 +0200
@@ -1,0 +2,8 @@
+Fri Sep 11 08:10:15 UTC 2020 - [email protected]
+
+- Update to version 4.2.0+git.1599810948.3db12a7a:
+  * Dev: unittest: unit test for collect archived logs
+  * Dev: behave: functional test for collect archived logs
+  * Fix: hb_report: collect archived logs(bsc#1148873)
+
+-------------------------------------------------------------------

Old:
----
  crmsh-4.2.0+git.1599702667.157fc6b5.tar.bz2

New:
----
  crmsh-4.2.0+git.1599810948.3db12a7a.tar.bz2

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ crmsh.spec ++++++
--- /var/tmp/diff_new_pack.cU0yfp/_old  2020-09-12 00:10:43.537111545 +0200
+++ /var/tmp/diff_new_pack.cU0yfp/_new  2020-09-12 00:10:43.537111545 +0200
@@ -36,7 +36,7 @@
 Summary:        High Availability cluster command-line interface
 License:        GPL-2.0-or-later
 Group:          %{pkg_group}
-Version:        4.2.0+git.1599702667.157fc6b5
+Version:        4.2.0+git.1599810948.3db12a7a
 Release:        0
 Url:            http://crmsh.github.io
 Source0:        %{name}-%{version}.tar.bz2

++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.cU0yfp/_old  2020-09-12 00:10:43.577111583 +0200
+++ /var/tmp/diff_new_pack.cU0yfp/_new  2020-09-12 00:10:43.577111583 +0200
@@ -5,4 +5,4 @@
                 <param 
name="url">https://github.com/liangxin1300/crmsh.git</param>
               <param 
name="changesrevision">d8dc51b4cb34964aa72e918999ebc7f03b48f3c9</param></service><service
 name="tar_scm">
                 <param 
name="url">https://github.com/ClusterLabs/crmsh.git</param>
-              <param 
name="changesrevision">14cf53e1d0e49747fbf0b93a60f75589d28d04ef</param></service></servicedata>
\ No newline at end of file
+              <param 
name="changesrevision">78b300261b5edb0b0b85703acc2fe3b67df95b33</param></service></servicedata>
\ No newline at end of file

++++++ crmsh-4.2.0+git.1599702667.157fc6b5.tar.bz2 -> 
crmsh-4.2.0+git.1599810948.3db12a7a.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.2.0+git.1599702667.157fc6b5/.travis.yml 
new/crmsh-4.2.0+git.1599810948.3db12a7a/.travis.yml
--- old/crmsh-4.2.0+git.1599702667.157fc6b5/.travis.yml 2020-09-10 
03:51:07.000000000 +0200
+++ new/crmsh-4.2.0+git.1599810948.3db12a7a/.travis.yml 2020-09-11 
09:55:48.000000000 +0200
@@ -24,6 +24,12 @@
       script:
         - docker run -t -v "$(pwd):/app" $IMAGE /bin/sh -c "cd /app; 
TOXENV=py38-codeclimate; tox"
 
+    - name: "functional test for hb_report"
+      before_install:
+        - $FUNCTIONAL_TEST hb_report before_install
+      script:
+        - $FUNCTIONAL_TEST hb_report run bugs 
+
     - name: "regression test for bootstrap bugs"
       before_install:
         - $FUNCTIONAL_TEST bootstrap before_install
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/crmsh-4.2.0+git.1599702667.157fc6b5/data-manifest 
new/crmsh-4.2.0+git.1599810948.3db12a7a/data-manifest
--- old/crmsh-4.2.0+git.1599702667.157fc6b5/data-manifest       2020-09-10 
03:51:07.000000000 +0200
+++ new/crmsh-4.2.0+git.1599810948.3db12a7a/data-manifest       2020-09-11 
09:55:48.000000000 +0200
@@ -71,6 +71,7 @@
 test/features/bootstrap_sbd.feature
 test/features/environment.py
 test/features/geo_setup.feature
+test/features/hb_report_bugs.feature
 test/features/qdevice_options.feature
 test/features/qdevice_setup_remove.feature
 test/features/qdevice_usercase.feature
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.2.0+git.1599702667.157fc6b5/hb_report/utillib.py 
new/crmsh-4.2.0+git.1599810948.3db12a7a/hb_report/utillib.py
--- old/crmsh-4.2.0+git.1599702667.157fc6b5/hb_report/utillib.py        
2020-09-10 03:51:07.000000000 +0200
+++ new/crmsh-4.2.0+git.1599810948.3db12a7a/hb_report/utillib.py        
2020-09-11 09:55:48.000000000 +0200
@@ -2,6 +2,7 @@
 # See COPYING for license information.
 
 import bz2
+import lzma
 import datetime
 import glob
 import gzip
@@ -76,8 +77,9 @@
     """
     ret = []
     files = [logf]
-    #files += glob.glob(logf+"*[0-9z]")
-    for f in sorted(files, key=os.path.getctime):
+    files += glob.glob(logf+"*[0-9z]")
+    # like ls -t, newest first
+    for f in sorted(files, key=os.path.getmtime, reverse=True):
         # reset this var to check every file's format
         constants.GET_STAMP_FUNC = None
         res = is_our_log(f, from_time, to_time)
@@ -501,7 +503,7 @@
     else:
         out_string += print_logseg(oldest, from_time, 0)
         for f in mid_logfiles:
-            out_string += print_log(f)
+            out_string += print_logseg(f, 0, 0)
             log_debug("including complete %s logfile" % f)
         out_string += print_logseg(newest, 0, to_time)
 
@@ -591,19 +593,15 @@
     return ts
 
 
-def filter_lines(logf, from_line, to_line=None):
+def filter_lines(data, from_line, to_line):
     out_string = ""
-    if not to_line:
-        to_line = sum(1 for l in open(logf, 'r', encoding='utf-8', 
errors='replace'))
-
     count = 1
-    with open(logf, 'r', encoding='utf-8', errors='replace') as f:
-        for line in f.readlines():
-            if count >= from_line and count <= to_line:
-                out_string += line
-            if count > to_line:
-                break
-            count += 1
+    for line in data.split('\n'):
+        if count >= from_line and count <= to_line:
+            out_string += line + '\n'
+        if count > to_line:
+            break
+        count += 1
     return out_string
 
 
@@ -717,43 +715,47 @@
         constants.SSH_USER = ssh_user
 
 
-def findln_by_time(logf, tm):
-    tmid = None
-    first = 1
-    last = sum(1 for l in open(logf, 'r', encoding='utf-8', errors='replace'))
+def findln_by_time(data, ts):
+    '''
+    Get line number of the specific time stamp
+    '''
+    data_list = data.split('\n')
+
+    first= 1
+    last= len(data_list)
+    time_middle = None
+
     while first <= last:
-        mid = (last+first)//2
+        middle = (last + first) // 2
         trycnt = 10
         while trycnt > 0:
-            res = line_time(logf, mid)
+            res = line_time(data_list, middle)
             if res:
-                tmid = int(res)
+                time_middle = res
                 break
-            log_debug("cannot extract time: %s:%d; will try the next one" % 
(logf, mid))
             trycnt -= 1
             # shift the whole first-last segment
-            prevmid = mid
-            while prevmid == mid:
+            prevmid = middle
+            while prevmid == middle:
                 first -= 1
                 if first < 1:
                     first = 1
                 last -= 1
                 if last < first:
                     last = first
-                prevmid = mid
-                mid = (last+first)//2
+                prevmid = middle
+                middle = (last + first) // 2
                 if first == last:
                     break
-        if not tmid:
-            log_warning("giving up on log...")
-            return
-        if int(tmid) > tm:
-            last = mid - 1
-        elif int(tmid) < tm:
-            first = mid + 1
+        if not time_middle:
+            return None
+        if time_middle > ts:
+            last = middle - 1
+        elif time_middle < ts:
+            first = middle + 1
         else:
             break
-    return mid
+    return middle
 
 
 def get_backtraces():
@@ -1178,10 +1180,12 @@
     """
     check if the log contains a piece of our segment
     """
-    with open(logf, 'r', encoding='utf-8', errors="replace") as fd:
-        data = fd.read()
-        first_time = find_first_ts(head(10, data))
-        last_time = find_first_ts(tail(10, data))
+    data = read_from_file(logf)
+    if not data:
+        log_debug("Found empty file \"{}\"; exclude".format(logf))
+        return 0
+    first_time = find_first_ts(head(10, data))
+    last_time = find_first_ts(tail(10, data))
 
     if (not first_time) or (not last_time):
         if os.stat(logf).st_size > 0:
@@ -1198,13 +1202,12 @@
         return 0  # don't include this log
 
 
-def line_time(logf, line_num):
-    ts = None
-    with open(logf, 'r', encoding='utf-8', errors='replace') as fd:
-        line_res = head(line_num, fd.read())
-        if line_res:
-            ts = get_ts(line_res[-1])
-    return ts
+
+def line_time(data_list, line_num):
+    '''
+    Get time stamp of the specific line
+    '''
+    return get_ts(data_list[line_num-1])
 
 
 def load_ocf_dirs():
@@ -1363,38 +1366,24 @@
 
 
 def print_logseg(logf, from_time, to_time):
-    cat = find_decompressor(logf)
-    if cat != "cat":
-        tmp = create_tempfile()
-        add_tempfiles(tmp)
-
-        cmd = "%s %s > %s" % (cat, logf, tmp)
-        code, out, err = crmutils.get_stdout_stderr(cmd)
-        if code != 0:
-            log_fatal("maybe disk full: %s" % err)
-        sourcef = tmp
-    else:
-        sourcef = logf
-        tmp = ""
+    data = read_from_file(logf)
 
     if from_time == 0:
-        FROM_LINE = 1
+        from_line = 1
     else:
-        FROM_LINE = findln_by_time(sourcef, from_time)
-
-    if not FROM_LINE:
-        log_warning("couldn't find line for time %d; corrupt log file?" % 
from_time)
-        return ""
+        from_line = findln_by_time(data, from_time)
+        if from_line is None:
+            return ""
 
-    TO_LINE = ""
-    if to_time != 0:
-        TO_LINE = findln_by_time(sourcef, to_time)
-        if not TO_LINE:
-            log_warning("couldn't find line for time %d; corrupt log file?" % 
to_time)
+    if to_time == 0:
+        to_line = len(data.split('\n'))
+    else:
+        to_line = findln_by_time(data, to_time)
+        if to_line is None:
             return ""
 
-    log_debug("including segment [%s-%s] from %s" % (FROM_LINE, TO_LINE, 
sourcef))
-    return dump_log(sourcef, FROM_LINE, TO_LINE)
+    log_debug("Including segment [{}-{}] from {}".format(from_line, to_line, 
logf))
+    return filter_lines(data, from_line, to_line)
 
 
 def ra_build_info():
@@ -1768,4 +1757,33 @@
     else:
         return False
 
+
+def get_open_method(infile):
+    file_type_open_dict = {
+            "gz": gzip.open,
+            "bz2": bz2.open,
+            "xz": lzma.open
+            }
+    try:
+        _open = file_type_open_dict[infile.split('.')[-1]]
+    except KeyError:
+        _open = open
+    return _open
+
+
+def read_from_file(infile):
+    data = None
+    _open = get_open_method(infile)
+    with _open(infile, 'rt', encoding='utf-8', errors='replace') as f:
+        data = f.read()
+    return crmutils.to_ascii(data)
+
+
+def write_to_file(tofile, data):
+    _open = get_open_method(tofile)
+    with _open(tofile, 'w') as f:
+        if _open == open:
+            f.write(data)
+        else:
+            f.write(data.encode('utf-8'))
 # vim:ts=4:sw=4:et:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/hb_report_bugs.feature 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/hb_report_bugs.feature
--- 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/hb_report_bugs.feature    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/hb_report_bugs.feature    
    2020-09-11 09:55:48.000000000 +0200
@@ -0,0 +1,46 @@
+@hb_report
+Feature: hb_report functional test
+
+  Tag @clean means need to stop cluster service if the service is available
+
+  @clean
+  Scenario: Verify hb_report options
+    Given   Cluster service is "stopped" on "hanode1"
+    And     Cluster service is "stopped" on "hanode2"
+    When    Run "crm cluster init -y" on "hanode1"
+    Then    Cluster service is "started" on "hanode1"
+    When    Run "crm cluster join -c hanode1 -y" on "hanode2"
+    Then    Cluster service is "started" on "hanode2"
+    And     Online nodes are "hanode1 hanode2"
+
+    When    Run "hb_report" on "hanode1"
+    Then    Default hb_report tar file created
+    When    Remove default hb_report tar file
+
+    @clean
+    Scenario: Include archived logs(bsc#1148873)
+    When    Write multi lines to file "/var/log/log1"
+      """
+      Sep 08 08:36:34 node1 log message line1
+      Sep 08 08:37:01 node1 log message line2
+      Sep 08 08:37:02 node1 log message line3
+      """
+    And     Run "xz /var/log/log1" on "hanode1"
+    When    Write multi lines to file "/var/log/log1"
+      """
+      Sep 08 09:37:02 node1 log message line4
+      Sep 08 09:37:12 node1 log message line5
+      """
+    And     Run "hb_report -f 20200901 -E /var/log/log1 report1" on "hanode1"
+    Then    File "log1" in "report1.tar.bz2"
+    When    Run "tar jxf report1.tar.bz2" on "hanode1"
+    And     Run "cat report1/hanode1/log1" on "hanode1"
+    Then    Expected multiple lines in output
+      """
+      Sep 08 08:36:34 node1 log message line1
+      Sep 08 08:37:01 node1 log message line2
+      Sep 08 08:37:02 node1 log message line3
+      Sep 08 09:37:02 node1 log message line4
+      Sep 08 09:37:12 node1 log message line5
+      """
+    When    Run "rm -rf report1.tar.gz report1" on "hanode1"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/steps/step_implenment.py 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/steps/step_implenment.py
--- 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/steps/step_implenment.py  
    2020-09-10 03:51:07.000000000 +0200
+++ 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/steps/step_implenment.py  
    2020-09-11 09:55:48.000000000 +0200
@@ -1,9 +1,11 @@
 import re
 import time
+import os
+import datetime
 from behave import given, when, then
 from crmsh import corosync, parallax
 from utils import check_cluster_state, check_service_state, online, 
run_command, me, \
-                  run_command_local_or_remote
+                  run_command_local_or_remote, file_in_archive
 import const
 
 @when('Write multi lines to file "{f}"')
@@ -35,7 +37,7 @@
 @given('IP "{addr}" is belong to "{iface}"')
 def step_impl(context, addr, iface):
     cmd = 'ip address show dev {}'.format(iface)
-    res = re.search(r' {}/'.format(addr), run_command(context, cmd))
+    res = re.search(r' {}/'.format(addr), run_command(context, cmd)[1])
     assert bool(res) is True
 
 
@@ -52,7 +54,8 @@
 
 @when('Try "{cmd}"')
 def step_impl(context, cmd):
-    run_command(context, cmd, err_record=True)
+    rc, out = run_command(context, cmd, err_record=True)
+    context.return_code = rc
 
 
 @when('Wait "{second}" seconds')
@@ -85,6 +88,11 @@
     context.stdout = None
 
 
+@then('Expected return code is "{num}"')
+def step_impl(context, num):
+    assert context.return_code == int(num)
+
+
 @then('Except "{msg}"')
 def step_impl(context, msg):
     assert context.command_error_output == msg
@@ -97,6 +105,12 @@
     context.command_error_output = None
 
 
+@then('Expected multiple lines in output')
+def step_impl(context):
+    assert context.text in context.stdout
+    context.stdout = None
+
+
 @then('Except "{msg}" in stderr')
 def step_impl(context, msg):
     assert msg in context.command_error_output
@@ -127,13 +141,13 @@
 
 @then('Cluster name is "{name}"')
 def step_impl(context, name):
-    out = run_command(context, 'corosync-cmapctl -b totem.cluster_name')
+    _, out = run_command(context, 'corosync-cmapctl -b totem.cluster_name')
     assert out.split()[-1] == name
 
 
 @then('Cluster virtual IP is "{addr}"')
 def step_impl(context, addr):
-    out = run_command(context, 'crm configure show|grep -A1 IPaddr2')
+    _, out = run_command(context, 'crm configure show|grep -A1 IPaddr2')
     res = re.search(r' ip={}'.format(addr), out)
     assert bool(res) is True
 
@@ -152,21 +166,21 @@
 
 @then('Show corosync ring status')
 def step_impl(context):
-    out = run_command(context, 'crm corosync status ring')
+    _, out = run_command(context, 'crm corosync status ring')
     if out:
         context.logger.info("\n{}".format(out))
 
 
 @then('Show status from qnetd')
 def step_impl(context):
-    out = run_command(context, 'crm corosync status qnetd')
+    _, out = run_command(context, 'crm corosync status qnetd')
     if out:
         context.logger.info("\n{}".format(out))
 
 
 @then('Show corosync qdevice configuration')
 def step_impl(context):
-    out = run_command(context, "sed -n -e '/quorum/,/^}/ p' 
/etc/corosync/corosync.conf")
+    _, out = run_command(context, "sed -n -e '/quorum/,/^}/ p' 
/etc/corosync/corosync.conf")
     if out:
         context.logger.info("\n{}".format(out))
 
@@ -177,7 +191,7 @@
     result = None
     while try_count < 5:
         time.sleep(1)
-        out = run_command(context, "crm_mon -1")
+        _, out = run_command(context, "crm_mon -1")
         if out:
             result = re.search(r'\s{}\s+.*:{}\):\s+{} '.format(res, res_type, 
state), out)
             if not result:
@@ -190,7 +204,7 @@
 @then('Resource "{res}" failcount on "{node}" is "{number}"')
 def step_impl(context, res, node, number):
     cmd = "crm resource failcount {} show {}".format(res, node)
-    out = run_command(context, cmd)
+    _, out = run_command(context, cmd)
     if out:
         result = re.search(r'name=fail-count-{} value={}'.format(res, number), 
out)
         assert result is not None
@@ -198,7 +212,7 @@
 
 @then('Resource "{res_type}" not configured')
 def step_impl(context, res_type):
-    out = run_command(context, "crm configure show")
+    _, out = run_command(context, "crm configure show")
     result = re.search(r' {} '.format(res_type), out)
     assert result is None
 
@@ -229,3 +243,25 @@
 @then('Expected votes will be "{votes}"')
 def step_impl(context, votes):
     assert int(corosync.get_value("quorum.expected_votes")) == int(votes)
+
+
+@then('Default hb_report tar file created')
+def step_impl(context):
+    default_file_name = 
'hb_report-{}.tar.bz2'.format(datetime.datetime.now().strftime("%w-%d-%m-%Y"))
+    assert os.path.exists(default_file_name) is True
+
+
+@when('Remove default hb_report tar file')
+def step_impl(context):
+    default_file_name = 
'hb_report-{}.tar.bz2'.format(datetime.datetime.now().strftime("%w-%d-%m-%Y"))
+    os.remove(default_file_name)
+
+
+@then('File "{f}" in "{archive}"')
+def step_impl(context, f, archive):
+    assert file_in_archive(f, archive) is True
+
+
+@then('File "{f}" not in "{archive}"')
+def step_impl(context, f, archive):
+    assert file_in_archive(f, archive) is False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/steps/utils.py 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/steps/utils.py
--- old/crmsh-4.2.0+git.1599702667.157fc6b5/test/features/steps/utils.py        
2020-09-10 03:51:07.000000000 +0200
+++ new/crmsh-4.2.0+git.1599810948.3db12a7a/test/features/steps/utils.py        
2020-09-11 09:55:48.000000000 +0200
@@ -1,7 +1,35 @@
+import tarfile
+import glob
+import re
 import socket
 from crmsh import utils, bootstrap, parallax
 
 
+def get_file_type(file_path):
+    rc, out, _ = utils.get_stdout_stderr("file {}".format(file_path))
+    if re.search(r'{}: bzip2'.format(file_path), out):
+        return "bzip2"
+    if re.search(r'{}: directory'.format(file_path), out):
+        return "directory"
+
+
+def get_all_files(archive_path):
+    archive_type = get_file_type(archive_path)
+    if archive_type == "bzip2":
+        with tarfile.open(archive_path) as tar:
+            return tar.getnames()
+    if archive_type == "directory":
+        all_files = glob.glob("{}/*".format(archive_path)) + 
glob.glob("{}/*/*".format(archive_path))
+        return all_files
+
+
+def file_in_archive(f, archive_path):
+    for item in get_all_files(archive_path):
+        if re.search(r'/{}$'.format(f), item):
+            return True
+    return False
+
+
 def me():
     return socket.gethostname()
 
@@ -11,17 +39,17 @@
     if rc != 0 and err:
         if err_record:
             context.command_error_output = err
-            return
+            return rc, out
         if out:
             context.logger.info("\n{}\n".format(out))
         context.logger.error("\n{}\n".format(err))
         context.failed = True
-    return out
+    return rc, out
 
 
 def run_command_local_or_remote(context, cmd, addr, err_record=False):
     if addr == me():
-        out = run_command(context, cmd, err_record)
+        _, out = run_command(context, cmd, err_record)
         return out
     else:
         try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/crmsh-4.2.0+git.1599702667.157fc6b5/test/unittests/test_report.py 
new/crmsh-4.2.0+git.1599810948.3db12a7a/test/unittests/test_report.py
--- old/crmsh-4.2.0+git.1599702667.157fc6b5/test/unittests/test_report.py       
2020-09-10 03:51:07.000000000 +0200
+++ new/crmsh-4.2.0+git.1599810948.3db12a7a/test/unittests/test_report.py       
2020-09-11 09:55:48.000000000 +0200
@@ -96,25 +96,11 @@
 
 
 def test_filter_lines():
-    begin_line = findln_by_time(pacemaker_log, time_before)
-    end_line = findln_by_time(pacemaker_log, time_between)
-
-    out1 = filter_lines(pacemaker_log, begin_line)
-    out2 = filter_lines(pacemaker_log, begin_line, end_line)
-
-    assert len(out1.split('\n')) == 924
-    assert len(out2.split('\n')) == 804
-
-
-def test_filter_lines_unicode():
-    with open(evil_unicode_log, 'wb') as f:
-        f.write(invalid_utf8)
-    out1 = filter_lines(evil_unicode_log, 1, 3)
-    assert len(out1.split('\n')) == 2
-    os.remove(evil_unicode_log)
-
-    out2 = filter_lines(pacemaker_unicode_log, 1, 30)
-    assert len(out2.split('\n')) == 31
+    with open('pacemaker.log') as f:
+        data = f.read()
+    res = filter_lines(data, 140, 143)
+    _, expected = crmsh.utils.get_stdout("sed -n '140, 143p' pacemaker.log")
+    assert res == expected + '\n'
 
 
 def test_find_decompressor():
@@ -202,21 +188,21 @@
 
 
 def test_findln_by_time():
-    # time before log happen
-    assert findln_by_time(pacemaker_log, time_before) == 1
-    # time after log happen
-    assert findln_by_time(pacemaker_log, time_after) == 923
-    # time between log happen
-    assert findln_by_time(pacemaker_log, time_between) == 803
-
-
-def test_findln_by_time():
-    assert findln_by_time(pacemaker_unicode_log, time_before) == 1
-
-    with open(evil_unicode_log, 'wb') as f:
-        f.write(invalid_utf8)
-    assert findln_by_time(evil_unicode_log, time_before) == 1
-    os.remove(evil_unicode_log)
+    target_time = "Apr 03 13:10"
+    target_time_stamp = crmsh.utils.parse_to_timestamp(target_time)
+    with open('pacemaker.log') as f:
+        data = f.read()
+    result_line = findln_by_time(data, target_time_stamp)
+    result_line_stamp = line_time(data.split('\n'), result_line)
+    assert result_line_stamp > target_time_stamp
+    result_pre_line_stamp = line_time(data.split('\n'), result_line-1)
+    assert result_pre_line_stamp < target_time_stamp
+
+    target_time = "Apr 03 11:01:19"
+    target_time_stamp = crmsh.utils.parse_to_timestamp(target_time)
+    result_line = findln_by_time(data, target_time_stamp)
+    result_time = ' '.join(data.split('\n')[result_line-1].split()[:3])
+    assert result_time == target_time
 
 
 def test_get_stamp_rfc5424():
@@ -301,17 +287,15 @@
     os.remove(evil_unicode_log)
 
 
-def test_line_time():
-    assert ts_to_dt(line_time(pacemaker_log, 2)).strftime("%Y/%m/%d %H:%M:%S") 
== "%d/04/03 11:01:18" % year
-    assert ts_to_dt(line_time(pacemaker_log, 195)).strftime("%Y/%m/%d 
%H:%M:%S") == "%d/04/03 11:01:40" % year
-
[email protected]('hb_report.utillib.get_ts')
+def test_line_time(mock_get_ts):
+    mock_get_ts.return_value = 12345
+
+    data_list = ["Feb 13 13:28:57 15sp1-1 pacemaker-based", "Feb 13 13:28:57 
15sp1-1 pacemaker-based"]
+    res = line_time(data_list, 2)
+    assert res == mock_get_ts.return_value
 
-def test_line_time_unicode():
-    assert ts_to_dt(line_time(pacemaker_unicode_log, 3)).strftime("%Y/%m/%d 
%H:%M:%S") == "%d/04/03 11:01:18" % year
-    with open(evil_unicode_log, 'wb') as f:
-        f.write(invalid_utf8)
-    assert ts_to_dt(line_time(evil_unicode_log, 1)).strftime("%Y/%m/%d 
%H:%M:%S") == "%d/04/03 11:01:18" % year
-    os.remove(evil_unicode_log)
+    mock_get_ts.assert_called_once_with("Feb 13 13:28:57 15sp1-1 
pacemaker-based")
 
 
 def test_random_string():


Reply via email to