Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-dbf for openSUSE:Factory checked in at 2023-01-04 20:18:26 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-dbf (Old) and /work/SRC/openSUSE:Factory/.python-dbf.new.1563 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-dbf" Wed Jan 4 20:18:26 2023 rev:9 rq:1055946 version:0.99.3 Changes: -------- --- /work/SRC/openSUSE:Factory/python-dbf/python-dbf.changes 2022-10-12 18:27:38.246078963 +0200 +++ /work/SRC/openSUSE:Factory/.python-dbf.new.1563/python-dbf.changes 2023-01-04 20:18:29.173590971 +0100 @@ -1,0 +2,6 @@ +Wed Jan 4 16:19:13 UTC 2023 - Dirk Müller <[email protected]> + +- update to 0.99.3: + * no upstream changelog available + +------------------------------------------------------------------- Old: ---- dbf-0.99.2.tar.gz New: ---- dbf-0.99.3.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-dbf.spec ++++++ --- /var/tmp/diff_new_pack.4sC6aW/_old 2023-01-04 20:18:29.769594515 +0100 +++ /var/tmp/diff_new_pack.4sC6aW/_new 2023-01-04 20:18:29.773594539 +0100 @@ -1,7 +1,7 @@ # # spec file for package python-dbf # -# Copyright (c) 2022 SUSE LLC +# Copyright (c) 2023 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -18,7 +18,7 @@ %{?!python_module:%define python_module() python-%{**} python3-%{**}} Name: python-dbf -Version: 0.99.2 +Version: 0.99.3 Release: 0 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf License: BSD-3-Clause ++++++ dbf-0.99.2.tar.gz -> dbf-0.99.3.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.2/PKG-INFO new/dbf-0.99.3/PKG-INFO --- old/dbf-0.99.2/PKG-INFO 2022-05-09 21:04:40.991866000 +0200 +++ new/dbf-0.99.3/PKG-INFO 2022-12-27 21:18:42.309202700 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: dbf -Version: 0.99.2 +Version: 0.99.3 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos) Home-page: https://github.com/ethanfurman/dbf Author: Ethan Furman diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.2/dbf/__init__.py new/dbf-0.99.3/dbf/__init__.py --- old/dbf-0.99.2/dbf/__init__.py 2022-05-09 20:49:00.000000000 +0200 +++ new/dbf-0.99.3/dbf/__init__.py 2022-12-27 21:02:48.000000000 +0100 @@ -75,7 +75,7 @@ xrange = range import collections.abc as collections_abc -version = 0, 99, 2 +version = 0, 99, 3 NoneType = type(None) @@ -5706,15 +5706,16 @@ else: version = 'unknown - ' + hex(self._meta.header.version) str = """ - Table: %s - Type: %s - Codepage: %s [%s] - Status: %s - Last updated: %s - Record count: %d - Field count: %d - Record length: %d """ % (self.filename, version, - self.codepage, encoder, status, + Table: %s + Type: %s + Listed Codepage: %s + Used Codepage: %s + Status: %s + Last updated: %s + Record count: %d + Field count: %d + Record length: %d """ % (self.filename, version, + code_pages[self._meta.header.codepage()][1], self.codepage, status, self.last_update, len(self), self.field_count, self.record_length) str += "\n --Fields--\n" for i in range(len(self.field_names)): @@ -5726,7 +5727,7 @@ """ code page used for text translation """ - return CodePage(code_pages[self._meta.header.codepage()][0]) + return CodePage(self._meta.codepage) @codepage.setter def codepage(self, codepage): @@ -6344,7 +6345,11 @@ meta = self._meta if meta.status != READ_WRITE: raise DbfError('%s not in read/write mode, unable to change field size' % meta.filename) - if not 0 < new_size < 256: + if self._versionabbr == 'clp': + max_size = 65535 + else: + max_size = 255 + if not 0 < new_size <= max_size: raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)") chosen = self._list_fields(chosen) for candidate in chosen: @@ -6748,8 +6753,6 @@ old_fields[name]['type'] = meta[name][TYPE] old_fields[name]['empty'] = meta[name][EMPTY] old_fields[name]['class'] = meta[name][CLASS] - meta.fields[:] = [] - offset = 1 fieldsdef = meta.header.fields if len(fieldsdef) % 32 != 0: raise BadDataError( @@ -6761,43 +6764,57 @@ % (meta.header.field_count, len(fieldsdef) // 32)) total_length = meta.header.record_length nulls_found = False - for i in range(meta.header.field_count): - fieldblock = fieldsdef[i*32:(i+1)*32] - name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] - type = fieldblock[11] - if not type in meta.fieldtypes: - raise BadDataError("Unknown field type: %s" % type) - start = unpack_long_int(fieldblock[12:16]) - length = fieldblock[16] - decimals = fieldblock[17] - if type == CHAR: - length += decimals * 256 - offset += length - end = start + length - flags = fieldblock[18] - null = flags & NULLABLE - if null: - nulls_found = True - if name in meta.fields: - raise BadDataError('Duplicate field name found: %s' % name) - meta.fields.append(name) - if name in old_fields and old_fields[name]['type'] == type: - cls = old_fields[name]['class'] - empty = old_fields[name]['empty'] + starters = set() # keep track of starting values in case header is poorly created + for starter in ('header', 'offset'): + meta.fields[:] = [] + offset = 1 + for i in range(meta.header.field_count): + fieldblock = fieldsdef[i*32:(i+1)*32] + name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] + type = fieldblock[11] + if not type in meta.fieldtypes: + raise BadDataError("Unknown field type: %s" % type) + if starter == 'header': + start = unpack_long_int(fieldblock[12:16]) + if start in starters: + # poor header + break + starters.add(start) + else: + start = offset + length = fieldblock[16] + decimals = fieldblock[17] + if type == CHAR: + length += decimals * 256 + end = start + length + offset += length + flags = fieldblock[18] + null = flags & NULLABLE + if null: + nulls_found = True + if name in meta.fields: + raise BadDataError('Duplicate field name found: %s' % name) + meta.fields.append(name) + if name in old_fields and old_fields[name]['type'] == type: + cls = old_fields[name]['class'] + empty = old_fields[name]['empty'] + else: + cls = meta.fieldtypes[type]['Class'] + empty = meta.fieldtypes[type]['Empty'] + meta[name] = ( + type, + start, + length, + end, + decimals, + flags, + cls, + empty, + null + ) else: - cls = meta.fieldtypes[type]['Class'] - empty = meta.fieldtypes[type]['Empty'] - meta[name] = ( - type, - start, - length, - end, - decimals, - flags, - cls, - empty, - null - ) + # made it through all the fields + break if offset != total_length: raise BadDataError( "Header shows record length of %d, but calculated record length is %d" @@ -8741,18 +8758,28 @@ filename = base + "." + format with codecs.open(filename, 'w', encoding=encoding) as fd: if format == 'csv': - csvfile = csv.writer(fd, dialect=dialect) - if header: - csvfile.writerow(header_names) + if header is True: + fd.write(','.join(header_names)) + fd.write('\n') + elif header: + fd.write(','.join(header)) + fd.write('\n') for record in table_or_records: fields = [] for fieldname in field_names: data = record[fieldname] + if isinstance(data, basestring) and data: + data = '"%s"' % data.replace('"','""') + elif data is None: + data = '' fields.append(unicode(data)) - csvfile.writerow(fields) + fd.write(','.join(fields)) + fd.write('\n') elif format == 'tab': - if header: + if header is True: fd.write('\t'.join(header_names) + '\n') + elif header: + fd.write(','.join(header)) for record in table_or_records: fields = [] for fieldname in field_names: @@ -8760,15 +8787,30 @@ fields.append(unicode(data)) fd.write('\t'.join(fields) + '\n') else: # format == 'fixed' - with codecs.open("%s_layout.txt" % os.path.splitext(filename)[0], 'w', encoding=encoding) as header: - header.write("%-15s Size\n" % "Field Name") - header.write("%-15s ----\n" % ("-" * 15)) + if header is True: + header = False # don't need it + elif header: + # names to use as field names + header = list(header) # in case header is an iterator + with codecs.open("%s_layout.txt" % os.path.splitext(filename)[0], 'w', encoding=encoding) as layout: + layout.write("%-15s Size Comment\n" % "Field Name") + layout.write("%-15s ---- -------------------------\n" % ("-" * 15)) sizes = [] - for field in field_names: - size = table.field_info(field).length + for i, field in enumerate(field_names): + info = table.field_info(field) + if info.field_type == ord('D'): + size = 10 + elif info.field_type in (ord('T'), ord('@')): + size = 19 + else: + size = info.length sizes.append(size) - header.write("%-15s %3d\n" % (field, size)) - header.write('\nTotal Records in file: %d\n' % len(table_or_records)) + comment = '' + if header and i < len(header): + # use overridden field name as comment + comment = header[i] + layout.write("%-15s %4d %s\n" % (field, size, comment)) + layout.write('\nTotal Records in file: %d\n' % len(table_or_records)) for record in table_or_records: fields = [] for i, fieldname in enumerate(field_names): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.2/dbf.egg-info/PKG-INFO new/dbf-0.99.3/dbf.egg-info/PKG-INFO --- old/dbf-0.99.2/dbf.egg-info/PKG-INFO 2022-05-09 21:04:40.000000000 +0200 +++ new/dbf-0.99.3/dbf.egg-info/PKG-INFO 2022-12-27 21:18:42.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: dbf -Version: 0.99.2 +Version: 0.99.3 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos) Home-page: https://github.com/ethanfurman/dbf Author: Ethan Furman diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.2/setup.py new/dbf-0.99.3/setup.py --- old/dbf-0.99.2/setup.py 2022-05-09 20:49:00.000000000 +0200 +++ new/dbf-0.99.3/setup.py 2022-12-27 21:02:48.000000000 +0100 @@ -21,7 +21,7 @@ data = dict( name='dbf', - version='0.99.2', + version='0.99.3', license='BSD License', description='Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)', long_description=long_desc,
