Hello community, here is the log from the commit of package python-Logbook for openSUSE:Factory checked in at 2019-03-12 09:52:11 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-Logbook (Old) and /work/SRC/openSUSE:Factory/.python-Logbook.new.28833 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-Logbook" Tue Mar 12 09:52:11 2019 rev:3 rq:681853 version:1.4.3 Changes: -------- --- /work/SRC/openSUSE:Factory/python-Logbook/python-Logbook.changes 2018-08-12 20:55:43.281509722 +0200 +++ /work/SRC/openSUSE:Factory/.python-Logbook.new.28833/python-Logbook.changes 2019-03-12 09:52:14.811552029 +0100 @@ -1,0 +2,10 @@ +Tue Mar 5 16:39:48 UTC 2019 - Tomáš Chvátal <[email protected]> + +- Update to version 1.4.3: + * Use correct record delimiters (null for UNIX, newline for network) in SyslogHandler (thanks Jonathan Kamens) + * Try to reconnect to SyslogHandler TCP sockets when they are disconnected (thanks Jonathan Kamens) + * Use RFC 5424 format for networking logging in SyslogHandler (thanks Jonathan Kamens) + * Fixed deprecated regular expression pattern (thanks Tsuyoshi Hombashi) + * Fixed TimedRotatingFileHandler rotation (thanks Tucker Beck) + +------------------------------------------------------------------- Old: ---- Logbook-1.4.0.tar.gz New: ---- Logbook-1.4.3.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-Logbook.spec ++++++ --- /var/tmp/diff_new_pack.q9seBm/_old 2019-03-12 09:52:15.591551874 +0100 +++ /var/tmp/diff_new_pack.q9seBm/_new 2019-03-12 09:52:15.591551874 +0100 @@ -1,7 +1,7 @@ # # spec file for package python-Logbook # -# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany. +# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany. # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -12,13 +12,13 @@ # license that conforms to the Open Source Definition (Version 1.9) # published by the Open Source Initiative. -# Please submit bugfixes or comments via http://bugs.opensuse.org/ +# Please submit bugfixes or comments via https://bugs.opensuse.org/ # %{?!python_module:%define python_module() python-%{**} python3-%{**}} Name: python-Logbook -Version: 1.4.0 +Version: 1.4.3 Release: 0 Summary: A logging replacement for Python License: BSD-3-Clause @@ -59,7 +59,7 @@ sed -i 's/\r$//' LICENSE %build -export CFLAGS="%{optflags}" +export CFLAGS="%{optflags} -fno-strict-aliasing" cython-%{python2_version} logbook/_speedups.pyx %python2_build rm logbook/_speedups.c ++++++ Logbook-1.4.0.tar.gz -> Logbook-1.4.3.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/CHANGES new/Logbook-1.4.3/CHANGES --- old/Logbook-1.4.0/CHANGES 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/CHANGES 2019-01-16 21:34:24.000000000 +0100 @@ -1,8 +1,32 @@ Logbook Changelog ================= +Version 1.4.3 +------------- + +Released on January 16th, 2019 + +- Fixed Pypi release for 1.4.2 + +Version 1.4.2 +------------- + +Released on December 11th, 2018 + +- Use correct record delimiters (null for UNIX, newline for network) in SyslogHandler (thanks Jonathan Kamens) +- Try to reconnect to SyslogHandler TCP sockets when they are disconnected (thanks Jonathan Kamens) +- Use RFC 5424 format for networking logging in SyslogHandler (thanks Jonathan Kamens) + Here you can see the full list of changes between each Logbook release. +Version 1.4.1 +------------- + +Released on October 14th, 2018 + +- Fixed deprecated regular expression pattern (thanks Tsuyoshi Hombashi) +- Fixed TimedRotatingFileHandler rotation (thanks Tucker Beck) + Version 1.4.0 ------------- diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/Logbook.egg-info/PKG-INFO new/Logbook-1.4.3/Logbook.egg-info/PKG-INFO --- old/Logbook-1.4.0/Logbook.egg-info/PKG-INFO 2018-05-15 15:49:42.000000000 +0200 +++ new/Logbook-1.4.3/Logbook.egg-info/PKG-INFO 2019-01-16 21:35:24.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: Logbook -Version: 1.4.0 +Version: 1.4.3 Summary: A logging replacement for Python Home-page: http://logbook.pocoo.org/ Author: Armin Ronacher, Georg Brandl @@ -64,12 +64,12 @@ Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 -Provides-Extra: all -Provides-Extra: execnet -Provides-Extra: jinja -Provides-Extra: redis -Provides-Extra: dev -Provides-Extra: sqlalchemy Provides-Extra: test +Provides-Extra: jinja Provides-Extra: zmq +Provides-Extra: redis +Provides-Extra: execnet +Provides-Extra: all Provides-Extra: compression +Provides-Extra: sqlalchemy +Provides-Extra: dev diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/Logbook.egg-info/requires.txt new/Logbook-1.4.3/Logbook.egg-info/requires.txt --- old/Logbook-1.4.0/Logbook.egg-info/requires.txt 2018-05-15 15:49:42.000000000 +0200 +++ new/Logbook-1.4.3/Logbook.egg-info/requires.txt 2019-01-16 21:35:24.000000000 +0100 @@ -1,24 +1,22 @@ [all] -cython -sqlalchemy +pytest>4.0 execnet>=1.0.9 -pytest-cov -redis -brotli -pytest Jinja2 +brotli +redis +cython +pytest-cov>=2.6 pyzmq -mock +sqlalchemy [compression] brotli [dev] +pytest-cov>=2.6 cython -pytest -pytest-cov -mock +pytest>4.0 [execnet] execnet>=1.0.9 @@ -33,9 +31,8 @@ sqlalchemy [test] -pytest -pytest-cov -mock +pytest-cov>=2.6 +pytest>4.0 [zmq] pyzmq diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/PKG-INFO new/Logbook-1.4.3/PKG-INFO --- old/Logbook-1.4.0/PKG-INFO 2018-05-15 15:49:42.000000000 +0200 +++ new/Logbook-1.4.3/PKG-INFO 2019-01-16 21:35:25.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: Logbook -Version: 1.4.0 +Version: 1.4.3 Summary: A logging replacement for Python Home-page: http://logbook.pocoo.org/ Author: Armin Ronacher, Georg Brandl @@ -64,12 +64,12 @@ Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 -Provides-Extra: all -Provides-Extra: execnet -Provides-Extra: jinja -Provides-Extra: redis -Provides-Extra: dev -Provides-Extra: sqlalchemy Provides-Extra: test +Provides-Extra: jinja Provides-Extra: zmq +Provides-Extra: redis +Provides-Extra: execnet +Provides-Extra: all Provides-Extra: compression +Provides-Extra: sqlalchemy +Provides-Extra: dev diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/logbook/__version__.py new/Logbook-1.4.3/logbook/__version__.py --- old/Logbook-1.4.0/logbook/__version__.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/logbook/__version__.py 2019-01-16 21:34:24.000000000 +0100 @@ -1 +1 @@ -__version__ = "1.4.0" +__version__ = "1.4.3" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/logbook/handlers.py new/Logbook-1.4.3/logbook/handlers.py --- old/Logbook-1.4.0/logbook/handlers.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/logbook/handlers.py 2019-01-16 21:34:24.000000000 +0100 @@ -498,8 +498,7 @@ if (not suppression_count and len(self._record_limits) >= self.max_record_cache): - cache_items = self._record_limits.items() - cache_items.sort() + cache_items = sorted(self._record_limits.items()) del cache_items[:int(self._record_limits) * self.record_cache_prune] self._record_limits = dict(cache_items) @@ -901,10 +900,14 @@ self.timed_filename_for_current = timed_filename_for_current self._timestamp = self._get_timestamp(_datetime_factory()) - timed_filename = self.generate_timed_filename(self._timestamp) - if self.timed_filename_for_current: - filename = timed_filename + filename = self.generate_timed_filename(self._timestamp) + elif os.path.exists(filename): + self._timestamp = self._get_timestamp( + datetime.fromtimestamp( + os.stat(filename).st_mtime + ) + ) FileHandler.__init__(self, filename, mode, encoding, level, format_string, True, filter, bubble) @@ -932,14 +935,14 @@ """ directory = os.path.dirname(self._filename) files = [] + rollover_regex = re.compile(self.rollover_format.format( + basename=re.escape(self.basename), + timestamp='.+', + ext=re.escape(self.ext), + )) for filename in os.listdir(directory): filename = os.path.join(directory, filename) - regex = self.rollover_format.format( - basename=re.escape(self.basename), - timestamp='.+', - ext=re.escape(self.ext), - ) - if re.match(regex, filename): + if rollover_regex.match(filename): files.append((os.path.getmtime(filename), filename)) files.sort() if self.backup_count > 1: @@ -951,15 +954,19 @@ if self.stream is not None: self.stream.close() + if ( + not self.timed_filename_for_current + and os.path.exists(self._filename) + ): + filename = self.generate_timed_filename(self._timestamp) + os.rename(self._filename, filename) + if self.backup_count > 0: for time, filename in self.files_to_delete(): os.remove(filename) if self.timed_filename_for_current: self._filename = self.generate_timed_filename(new_timestamp) - else: - filename = self.generate_timed_filename(self._timestamp) - os.rename(self._filename, filename) self._timestamp = new_timestamp self._open('w') @@ -1054,7 +1061,7 @@ def has_debugs(self): """`True` if any :data:`DEBUG` records were found.""" return any(r.level == DEBUG for r in self.records) - + @property def has_traces(self): """`True` if any :data:`TRACE` records were found.""" @@ -1107,7 +1114,7 @@ """ kwargs['level'] = DEBUG return self._test_for(*args, **kwargs) - + def has_trace(self, *args, **kwargs): """`True` if a specific :data:`TRACE` log record exists. @@ -1527,7 +1534,7 @@ def __init__(self, application_name=None, address=None, facility='user', socktype=socket.SOCK_DGRAM, level=NOTSET, format_string=None, filter=None, - bubble=False): + bubble=False, record_delimiter=None): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.application_name = application_name @@ -1538,14 +1545,24 @@ else: address = '/dev/log' - self.address = address + self.remote_address = self.address = address self.facility = facility self.socktype = socktype if isinstance(address, string_types): self._connect_unixsocket() + self.enveloper = self.unix_envelope + default_delimiter = u'\x00' else: self._connect_netsocket() + self.enveloper = self.net_envelope + default_delimiter = u'\n' + + self.record_delimiter = default_delimiter \ + if record_delimiter is None else record_delimiter + + self.connection_exception = getattr( + __builtins__, 'BrokenPipeError', socket.error) def _connect_unixsocket(self): self.unixsocket = True @@ -1561,7 +1578,7 @@ self.unixsocket = False self.socket = socket.socket(socket.AF_INET, self.socktype) if self.socktype == socket.SOCK_STREAM: - self.socket.connect(self.address) + self.socket.connect(self.remote_address) self.address = self.socket.getsockname() def encode_priority(self, record): @@ -1570,15 +1587,44 @@ self.LOG_WARNING) return (facility << 3) | priority - def emit(self, record): - prefix = u('') - if self.application_name is not None: - prefix = self.application_name + u(':') - self.send_to_socket((u('<%d>%s%s\x00') % ( + def wrap_segments(self, record, before): + msg = self.format(record) + segments = [segment for segment in msg.split(self.record_delimiter)] + return (before + segment + self.record_delimiter + for segment in segments) + + def unix_envelope(self, record): + before = u'<{}>{}'.format( self.encode_priority(record), - prefix, - self.format(record) - )).encode('utf-8')) + self.application_name + ':' if self.application_name else '') + return self.wrap_segments(record, before) + + def net_envelope(self, record): + # Gross but effective + try: + format_string = self.format_string + application_name = self.application_name + if not application_name and record.channel and \ + '{record.channel}: ' in format_string: + self.format_string = format_string.replace( + '{record.channel}: ', '') + self.application_name = record.channel + # RFC 5424: <PRIVAL>version timestamp hostname app-name procid + # msgid structured-data message + before = u'<{}>1 {}Z {} {} {} - - '.format( + self.encode_priority(record), + record.time.isoformat(), + socket.gethostname(), + self.application_name if self.application_name else '-', + record.process) + return self.wrap_segments(record, before) + finally: + self.format_string = format_string + self.application_name = application_name + + def emit(self, record): + for segment in self.enveloper(record): + self.send_to_socket(segment.encode('utf-8')) def send_to_socket(self, data): if self.unixsocket: @@ -1591,7 +1637,11 @@ # the flags are no longer optional on Python 3 self.socket.sendto(data, 0, self.address) else: - self.socket.sendall(data) + try: + self.socket.sendall(data) + except self.connection_exception: + self._connect_netsocket() + self.socket.send(data) def close(self): self.socket.close() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/logbook/more.py new/Logbook-1.4.3/logbook/more.py --- old/Logbook-1.4.0/logbook/more.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/logbook/more.py 2019-01-16 21:34:24.000000000 +0100 @@ -38,7 +38,7 @@ else: from urllib.parse import parse_qsl, urlencode -_ws_re = re.compile(r'(\s+)(?u)') +_ws_re = re.compile(r'(\s+)', re.UNICODE) TWITTER_FORMAT_STRING = u( '[{record.channel}] {record.level_name}: {record.message}') TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/setup.py new/Logbook-1.4.3/setup.py --- old/Logbook-1.4.0/setup.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/setup.py 2019-01-16 21:34:24.000000000 +0100 @@ -157,7 +157,10 @@ exec(version_file.read()) # pylint: disable=W0122 extras_require = dict() -extras_require['test'] = set(['pytest', 'pytest-cov']) +if sys.version_info[:2] < (3, 0): + extras_require['test'] = set(['pytest', 'pytest-cov<2.6']) +else: + extras_require['test'] = set(['pytest>4.0', 'pytest-cov>=2.6']) if sys.version_info[:2] < (3, 3): extras_require['test'] |= set(['mock']) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/tests/test_file_handler.py new/Logbook-1.4.3/tests/test_file_handler.py --- old/Logbook-1.4.0/tests/test_file_handler.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/tests/test_file_handler.py 2019-01-16 21:34:24.000000000 +0100 @@ -1,5 +1,6 @@ import os import pytest +import time from datetime import datetime import logbook @@ -167,16 +168,27 @@ assert f.readline().rstrip() == '[01:00] Third One' assert f.readline().rstrip() == '[02:00] Third One' + @pytest.mark.parametrize("backup_count", [1, 3]) -def test_timed_rotating_file_handler__not_timed_filename_for_current(tmpdir, activation_strategy, backup_count): [email protected]("preexisting_file", [True, False]) +def test_timed_rotating_file_handler__not_timed_filename_for_current( + tmpdir, activation_strategy, backup_count, preexisting_file +): basename = str(tmpdir.join('trot.log')) + + if preexisting_file: + with open(basename, 'w') as file: + file.write('contents') + jan_first = time.mktime(datetime(2010, 1, 1).timetuple()) + os.utime(basename, (jan_first, jan_first)) + handler = logbook.TimedRotatingFileHandler( - basename, backup_count=backup_count, + basename, + format_string='[{record.time:%H:%M}] {record.message}', + backup_count=backup_count, rollover_format='{basename}{ext}.{timestamp}', timed_filename_for_current=False, ) - handler._timestamp = handler._get_timestamp(datetime(2010, 1, 5)) - handler.format_string = '[{record.time:%H:%M}] {record.message}' def fake_record(message, year, month, day, hour=0, minute=0, second=0): @@ -195,10 +207,15 @@ for x in xrange(20): handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) - files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) + computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')] + + expected_files = ['trot.log.2010-01-01'] if preexisting_file else [] + expected_files += ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 8)] + expected_files += ['trot.log'] + expected_files = expected_files[-backup_count:] + + assert sorted(computed_files) == sorted(expected_files) - assert files == ['trot.log'] + ['trot.log.2010-01-0{0}'.format(i) - for i in xrange(5, 8)][-backup_count:] with open(str(tmpdir.join('trot.log'))) as f: assert f.readline().rstrip() == '[01:00] Last One' assert f.readline().rstrip() == '[02:00] Last One' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/tests/test_logging_compat.py new/Logbook-1.4.3/tests/test_logging_compat.py --- old/Logbook-1.4.0/tests/test_logging_compat.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/tests/test_logging_compat.py 2019-01-16 21:34:24.000000000 +0100 @@ -33,7 +33,7 @@ with redirected_logging(set_root_logger_level): logger.debug('This is from the old system') logger.info('This is from the old system') - logger.warn('This is from the old %s', 'system') + logger.warning('This is from the old %s', 'system') logger.error('This is from the old system') logger.critical('This is from the old system') logger.error('This is a %(what)s %(where)s', {'what': 'mapping', 'where': 'test'}) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/tests/test_mail_handler.py new/Logbook-1.4.3/tests/test_mail_handler.py --- old/Logbook-1.4.0/tests/test_mail_handler.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/tests/test_mail_handler.py 2019-01-16 21:34:24.000000000 +0100 @@ -40,11 +40,11 @@ header, data = mail.split('\n\n', 1) if 'Content-Transfer-Encoding: base64' in header: data = base64.b64decode(data).decode('utf-8') - assert re.search('Message type:\s+ERROR', data) - assert re.search('Location:.*%s' % + assert re.search(r'Message type:\s+ERROR', data) + assert re.search(r'Location:.*%s' % re.escape(__file_without_pyc__), data) - assert re.search('Module:\s+%s' % __name__, data) - assert re.search('Function:\s+test_mail_handler', data) + assert re.search(r'Module:\s+%s' % __name__, data) + assert re.search(r'Function:\s+test_mail_handler', data) body = u('Viva la Espa\xf1a') if sys.version_info < (3, 0): body = body.encode('utf-8') @@ -72,14 +72,14 @@ body, rest = pieces rest = rest.replace('\r', '') - assert re.search('Message type:\s+ERROR', body) - assert re.search('Module:\s+%s' % __name__, body) - assert re.search('Function:\s+test_mail_handler_batching', body) + assert re.search(r'Message type:\s+ERROR', body) + assert re.search(r'Module:\s+%s' % __name__, body) + assert re.search(r'Function:\s+test_mail_handler_batching', body) related = rest.strip().split('\n\n') assert len(related) == 2 - assert re.search('Message type:\s+WARNING', related[0]) - assert re.search('Message type:\s+DEBUG', related[1]) + assert re.search(r'Message type:\s+WARNING', related[0]) + assert re.search(r'Message type:\s+DEBUG', related[1]) assert 'And this triggers it again' in mail_handler.mails[1][2] @@ -101,14 +101,14 @@ body, rest = pieces rest = rest.replace('\r', '') - assert re.search('Message type:\\s+ERROR', body) - assert re.search('Module:\s+' + __name__, body) - assert re.search('Function:\s+test_group_handler_mail_combo', body) + assert re.search(r'Message type:\s+ERROR', body) + assert re.search(r'Module:\s+' + __name__, body) + assert re.search(r'Function:\s+test_group_handler_mail_combo', body) related = rest.strip().split('\n\n') assert len(related) == 2 - assert re.search('Message type:\s+WARNING', related[0]) - assert re.search('Message type:\s+DEBUG', related[1]) + assert re.search(r'Message type:\s+WARNING', related[0]) + assert re.search(r'Message type:\s+DEBUG', related[1]) def test_mail_handler_arguments(): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Logbook-1.4.0/tests/test_syslog_handler.py new/Logbook-1.4.3/tests/test_syslog_handler.py --- old/Logbook-1.4.0/tests/test_syslog_handler.py 2018-05-15 15:48:39.000000000 +0200 +++ new/Logbook-1.4.3/tests/test_syslog_handler.py 2019-01-16 21:34:24.000000000 +0100 @@ -1,4 +1,5 @@ import os +import re import socket from contextlib import closing @@ -7,33 +8,59 @@ import pytest +unix_socket = "/tmp/__unixsock_logbook.test" -def test_syslog_handler(logger, activation_strategy, unix_sock_path): - to_test = [ - (socket.AF_INET, ('127.0.0.1', 0)), - ] - if hasattr(socket, 'AF_UNIX'): - to_test.append((socket.AF_UNIX, unix_sock_path)) - for sock_family, address in to_test: - with closing(socket.socket(sock_family, socket.SOCK_DGRAM)) as inc: - inc.bind(address) - inc.settimeout(1) - for app_name in [None, 'Testing']: - handler = logbook.SyslogHandler(app_name, inc.getsockname()) - with activation_strategy(handler): - logger.warn('Syslog is weird') - try: +to_test = [ + (socket.AF_INET, socket.SOCK_DGRAM, ('127.0.0.1', 0)), + (socket.AF_INET, socket.SOCK_STREAM, ('127.0.0.1', 0)), +] +if hasattr(socket, 'AF_UNIX'): + to_test.append((socket.AF_UNIX, socket.SOCK_DGRAM, unix_socket)) + [email protected]("unix_sock_path") [email protected]("sock_family,socktype,address", to_test) +def test_syslog_handler(logger, activation_strategy, + sock_family, socktype, address): + delimiter = {socket.AF_UNIX: '\x00', + socket.AF_INET: '\n'}[sock_family] + with closing(socket.socket(sock_family, socktype)) as inc: + inc.bind(address) + if socktype == socket.SOCK_STREAM: + inc.listen(0) + inc.settimeout(1) + for app_name in [None, 'Testing']: + if sock_family == socket.AF_UNIX: + expected = (r'^<12>%stestlogger: Syslog is weird%s$' % + (app_name + ':' if app_name else '', + delimiter)) + else: + expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d ' + '- - %sSyslog is weird%s$' % + (socket.gethostname(), + app_name if app_name else 'testlogger', + os.getpid(), 'testlogger: ' if app_name else '', + delimiter)) + + handler = logbook.SyslogHandler(app_name, inc.getsockname(), + socktype=socktype) + with activation_strategy(handler): + logger.warn('Syslog is weird') + try: + if socktype == socket.SOCK_STREAM: + with closing(inc.accept()[0]) as inc2: + rv = inc2.recv(1024) + else: rv = inc.recvfrom(1024)[0] - except socket.error: - assert False, 'got timeout on socket' - assert rv == ( - u('<12>%stestlogger: Syslog is weird\x00') % - ((app_name and (app_name + u(':'))) or u(''))).encode('utf-8') + except socket.error: + assert False, 'got timeout on socket' + rv = rv.decode('utf-8') + assert re.match(expected, rv), \ + 'expected {}, got {}'.format(expected, rv) @pytest.fixture def unix_sock_path(request): - returned = "/tmp/__unixsock_logbook.test" + returned = unix_socket @request.addfinalizer def cleanup():
