commit:     b5356f7674336d08ecbbe6a27602d12bb2beea5e
Author:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sat Jun 10 10:04:53 2023 +0000
Commit:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sat Jun 10 10:04:53 2023 +0000
URL:        
https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=b5356f76

Compression logs with xz

Signed-off-by: Magnus Granberg <zorry <AT> gentoo.org>

 buildbot_gentoo_ci/steps/builders.py | 33 ++++++++++++++++++++++-----------
 py/log_parser.py                     | 11 +++++++----
 2 files changed, 29 insertions(+), 15 deletions(-)

diff --git a/buildbot_gentoo_ci/steps/builders.py 
b/buildbot_gentoo_ci/steps/builders.py
index a5638b5..117fb70 100644
--- a/buildbot_gentoo_ci/steps/builders.py
+++ b/buildbot_gentoo_ci/steps/builders.py
@@ -74,7 +74,7 @@ def PersOutputOfEmerge(rc, stdout, stderr):
         if line.startswith('>>>'):
             if line.startswith('>>> Failed to'):
                 emerge_output['failed'] = line.split(' ')[4][:-1]
-            if line.endswith('.log.gz') and emerge_output['failed']:
+            if line.endswith('.log') and emerge_output['failed']:
                 log_path_list.append(line.split(' ')[2])
             #FIXME: Handling of >>> output
             pass
@@ -83,7 +83,7 @@ def PersOutputOfEmerge(rc, stdout, stderr):
             if line.startswith('!!! existing preserved libs'):
                 pass
         if line.startswith(' * '):
-            if line.endswith('.log.gz'):
+            if line.endswith('.log'):
                 log_path_list.append(line.split(' ')[4])
         #FIXME: Handling of depclean output dict of packages that get removed 
or saved
     emerge_output['packages'] = package_dict
@@ -122,7 +122,7 @@ def PersOutputOfEmerge(rc, stdout, stderr):
                 change_use[cpv_split[0]] = change_use_list
                 emerge_output['change_use'] = change_use
         if line.startswith(' * '):
-            if line.endswith('.log.gz'):
+            if line.endswith('.log'):
                 log_path = line.split(' ')[3]
                 if log_path not in inlog_path_list:
                     log_path_list.append(log_path)
@@ -681,13 +681,13 @@ class CheckBuildWorkDirs(BuildStep):
     def run(self):
         cpv = self.getProperty('faild_cpv')
         cpv_build_dir = yield os.path.join('/', 'var', 'tmp', 'portage', 
self.getProperty('cpv_build_dir'))
-        compressed_log_file = cpv.replace('/', '_') + '.' + 
str(self.getProperty("buildnumber")) + '.logs.tar.bz2'
+        compressed_log_file = cpv.replace('/', '_') + '.' + 
str(self.getProperty("buildnumber")) + '.logs.tar.xz'
         masterdest_file = yield os.path.join(self.getProperty('masterdest'), 
compressed_log_file)
         # cpv_build_work_dir = yield os.path.join(cpv_build_dir, 'work')
         if self.getProperty('build_workdir_find_output')['build_workdir_find'] 
!= []:
             shell_commad_list = []
             shell_commad_list.append('tar')
-            shell_commad_list.append('-cjpf')
+            shell_commad_list.append('-cJvf')
             shell_commad_list.append(compressed_log_file)
             for find_line in 
sorted(self.getProperty('build_workdir_find_output')['build_workdir_find']):
                 print(find_line)
@@ -695,7 +695,7 @@ class CheckBuildWorkDirs(BuildStep):
                 shell_commad_list.append(filename)
             self.aftersteps_list.append(
                 steps.ShellCommand(
-                        name = 'Tar logs',
+                        name = 'Tar/xz logs',
                         command = shell_commad_list,
                         workdir = cpv_build_dir
             ))
@@ -761,12 +761,23 @@ class CheckEmergeLogs(BuildStep):
 
     @defer.inlineCallbacks
     def getLogFile(self, cpv, log_dict):
-        file = log_dict[cpv]['full_logname']
-        destfile = yield os.path.join(self.getProperty('masterdest'), file)
-        sourcefile = log_dict[cpv]['log_path']
+        compressed_log_file = log_dict[cpv]['full_logname'] + '.xz'
+        destfile = yield os.path.join(self.getProperty('masterdest'), 
compressed_log_file)
+        sourcefile = log_dict[cpv]['log_path'] + '.xz'
         name = 'Upload build log'
-        url = '/'.join([hosturl, self.getProperty('workername'), 
str(self.getProperty("buildnumber")), file])
-        urlText = file
+        url = '/'.join([hosturl, self.getProperty('workername'), 
str(self.getProperty("buildnumber")), compressed_log_file])
+        urlText = compressed_log_file
+        shell_commad_list = []
+        shell_commad_list.append('xz')
+        shell_commad_list.append('-zv')
+        shell_commad_list.append(log_dict[cpv]['full_logname'])
+        self.aftersteps_list.append(
+                steps.ShellCommand(
+                        name = 'Compress build log with xz',
+                        descriptionDone = log_dict[cpv]['full_logname'],
+                        command = shell_commad_list,
+                        workdir = os.path.dirname(log_dict[cpv]['log_path']) + 
'/'
+            ))
         self.addFileUploade(sourcefile, destfile, name, url, urlText)
 
     @defer.inlineCallbacks

diff --git a/py/log_parser.py b/py/log_parser.py
index f5c4eb5..eb081a9 100644
--- a/py/log_parser.py
+++ b/py/log_parser.py
@@ -5,7 +5,6 @@ import sys
 from multiprocessing import Pool, cpu_count
 import re
 import io
-import gzip
 import json
 import os
 from sqlalchemy.ext.declarative import declarative_base
@@ -138,9 +137,13 @@ def runLogParser(args):
     log_search_pattern = get_log_search_pattern(Session, args.uuid, 
config['default_uuid'])
     Session.close()
     # read the log file to dict
-    for text_line in io.TextIOWrapper(io.BufferedReader(gzip.open(args.file)), 
encoding='utf8', errors='ignore'):
-        logfile_text_dict[index] = text_line.strip('\n')
-        index = index + 1
+    with open(args.file, encoding='utf8', errors='ignore') as f:
+        for text_line in f:
+            logfile_text_dict[index] = text_line.strip('\n')
+            index = index + 1
+    #for text_line in io.TextIOWrapper(io.BufferedReader(open(args.file)), 
encoding='utf8', errors='ignore'):
+    #    logfile_text_dict[index] = text_line.strip('\n')
+    #    index = index + 1
     # run the search parse pattern on the text lines
     #params = [(log_search_pattern, text, line_index,) for line_index, text in 
logfile_text_dict.items()]
     with getMultiprocessingPool(config) as pool:

Reply via email to