Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-dbf for openSUSE:Factory checked in at 2021-10-16 22:47:18 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-dbf (Old) and /work/SRC/openSUSE:Factory/.python-dbf.new.1890 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-dbf" Sat Oct 16 22:47:18 2021 rev:7 rq:925631 version:0.99.1 Changes: -------- --- /work/SRC/openSUSE:Factory/python-dbf/python-dbf.changes 2020-10-07 14:18:37.505492301 +0200 +++ /work/SRC/openSUSE:Factory/.python-dbf.new.1890/python-dbf.changes 2021-10-16 22:47:54.540705508 +0200 @@ -1,0 +2,11 @@ +Thu Sep 2 12:49:17 UTC 2021 - John Paul Adrian Glaubitz <adrian.glaub...@suse.com> + +- Update to 0.99.1: + * fix: encoding error, warnings + * add TimeStamp type to DB3 tables + * Import ABC from collections.abc for Python 3.9 compatibility. + * support date/time/datetime as argument to Period + * make Vapor objects Falsey + * add more null tests + +------------------------------------------------------------------- Old: ---- dbf-0.99.0.tar.gz New: ---- dbf-0.99.1.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-dbf.spec ++++++ --- /var/tmp/diff_new_pack.4wS64G/_old 2021-10-16 22:47:54.864705781 +0200 +++ /var/tmp/diff_new_pack.4wS64G/_new 2021-10-16 22:47:54.868705785 +0200 @@ -1,7 +1,7 @@ # # spec file for package python-dbf # -# Copyright (c) 2020 SUSE LLC +# Copyright (c) 2021 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -18,7 +18,7 @@ %{?!python_module:%define python_module() python-%{**} python3-%{**}} Name: python-dbf -Version: 0.99.0 +Version: 0.99.1 Release: 0 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf License: BSD-3-Clause ++++++ dbf-0.99.0.tar.gz -> dbf-0.99.1.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/PKG-INFO new/dbf-0.99.1/PKG-INFO --- old/dbf-0.99.0/PKG-INFO 2020-08-13 20:35:25.211380500 +0200 +++ new/dbf-0.99.1/PKG-INFO 2021-03-03 16:35:11.956314000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: dbf -Version: 0.99.0 +Version: 0.99.1 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos) Home-page: https://github.com/ethanfurman/dbf Author: Ethan Furman diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/dbf/WHATSNEW new/dbf-0.99.1/dbf/WHATSNEW --- old/dbf-0.99.0/dbf/WHATSNEW 2020-08-13 20:06:56.000000000 +0200 +++ new/dbf-0.99.1/dbf/WHATSNEW 2021-03-03 16:35:10.000000000 +0100 @@ -1,6 +1,15 @@ What's New ========== + +0.99.001 +-------- + +Vapor objects are now Falsey +'@' TimeStamp column type added to db3 tables +fix encoding error when exporting csv files + + 0.99.000 -------- diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/dbf/__init__.py new/dbf-0.99.1/dbf/__init__.py --- old/dbf-0.99.0/dbf/__init__.py 2020-08-13 20:06:56.000000000 +0200 +++ new/dbf-0.99.1/dbf/__init__.py 2021-03-03 16:35:11.000000000 +0100 @@ -33,7 +33,6 @@ from __future__ import with_statement, print_function import codecs -import collections import csv import datetime import decimal @@ -66,6 +65,7 @@ unicode = unicode basestring = bytes, unicode baseinteger = int, long + import collections as collections_abc else: bytes = bytes unicode = str @@ -73,8 +73,9 @@ baseinteger = int, long = int xrange = range + import collections.abc as collections_abc -version = 0, 99, 0 +version = 0, 99, 1 NoneType = type(None) @@ -624,6 +625,7 @@ MEMO = b'M' NUMERIC = b'N' PICTURE = b'P' + TIMESTAMP = b'@' @export(module) class FieldFlag(IntFlag): @@ -760,7 +762,7 @@ self.data = data -class FieldMissingError(KeyError, DbfError): +class FieldMissingError(AttributeError, KeyError, DbfError): """ Field does not exist in table """ @@ -803,7 +805,7 @@ self.data = data -class DbfWarning(Exception): +class DbfWarning(UserWarning): """ Normal operations elicit this response """ @@ -923,6 +925,19 @@ def __ne__(self, other): return True + if py_ver < (3, 0): + def __nonzero__(self): + """ + Vapor objects are always False + """ + return False + else: + def __bool__(self): + """ + Vapor objects are always False + """ + return False + Vapor = Vapor() @@ -1962,6 +1977,19 @@ "for matching various time ranges" def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None): + # + if year: + attrs = [] + if isinstance(year, (Date, datetime.date)): + attrs = ['year','month','day'] + elif isinstance(year, (DateTime, datetime.datetime)): + attrs = ['year','month','day','hour','minute','second'] + elif isinstance(year, (Time, datetime.time)): + attrs = ['hour','minute','second'] + for attr in attrs: + value = getattr(year, attr) + self._mask[attr] = value + # params = vars() self._mask = {} for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'): @@ -4383,6 +4411,58 @@ data[:4] = update_integer(moment.toordinal() + VFPTIME) return to_bytes(data) +def retrieve_clp_timestamp(bytes, fielddef, *ignore): + """ + returns the timestamp stored in bytes + """ + # First long repesents date and second long time. + # Date is the number of days since January 1st, 4713 BC. + # Time is hours * 3600000L + minutes * 60000L + seconds * 1000L + # http://www.manmrk.net/tutorials/database/xbase/data_types.html + if bytes == array('B', [0] * 8): + cls = fielddef[EMPTY] + if cls is NoneType: + return None + return cls() + cls = fielddef[CLASS] + days = unpack_long_int(bytes[:4]) + # how many days between -4713-01-01 and 0001-01-01 ? going to guess 1,721,425 + BC = 1721425 + if days < BC: + # bail + cls = fielddef[EMPTY] + if cls is NoneType: + return None + return cls() + date = datetime.date.fromordinal(days-BC) + time = unpack_long_int(bytes[4:]) + microseconds = (time % 1000) * 1000 + time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds + hours = time // 3600 + mins = time % 3600 // 60 + secs = time % 3600 % 60 + time = datetime.time(hours, mins, secs, microseconds) + return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) + +def update_clp_timestamp(moment, *ignore): + """ + Sets the timestamp stored in moment + moment must have fields: + year, month, day, hour, minute, second, microsecond + """ + data = [0] * 8 + if moment: + BC = 1721425 + days = BC + moment.toordinal() + hour = moment.hour + minute = moment.minute + second = moment.second + millisecond = moment.microsecond // 1000 # convert from millionths to thousandths + time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond + data[:4] = pack_long_int(days) + data[4:] = pack_long_int(time) + return to_bytes(data) + def retrieve_vfp_memo(bytes, fielddef, memo, decoder): """ Returns the block of data from a memo file @@ -4591,6 +4671,16 @@ raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals)) return length, decimals, flag +def add_clp_timestamp(format, flags): + if any(f not in flags for f in format): + raise FieldSpecError("Format for TimeStamp field creation is '@%s', not '@%s'" % field_spec_error_text(format, flags)) + length = 8 + decimals = 0 + flag = 0 + for f in format[1:]: + flag |= FieldFlag.lookup(f) + return length, decimals, flag + def field_spec_error_text(format, flags): """ generic routine for error text for the add...() functions @@ -4884,6 +4974,7 @@ Container class for storing per table metadata """ blankrecord = None + codepage = None # code page being used (can be overridden when table is opened) dfd = None # file handle fields = None # field names field_count = 0 # number of fields @@ -5479,6 +5570,7 @@ if codepage is not None: header.codepage(codepage) cp, sd, ld = _codepage_lookup(codepage) + self._meta.codepage = sd self._meta.decoder, self._meta.encoder = unicode_error_handler(codecs.getdecoder(sd), codecs.getencoder(sd), unicode_errors) if field_specs: meta.status = READ_WRITE @@ -5488,6 +5580,7 @@ if codepage is None: header.codepage(default_codepage) cp, sd, ld = _codepage_lookup(header.codepage()) + self._meta.codepage = sd self._meta.decoder, self._meta.encoder = unicode_error_handler(codecs.getdecoder(sd), codecs.getencoder(sd), unicode_errors) self.add_fields(field_specs) else: @@ -5495,37 +5588,41 @@ try: dfd = meta.dfd = open(meta.filename, 'rb') except IOError: - e= sys.exc_info()[1] + e = sys.exc_info()[1] raise DbfError(unicode(e)).from_exc(None) dfd.seek(0) - meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) - if not header.version in self._supported_tables: + try: + meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) + if not header.version in self._supported_tables: + dfd.close() + raise DbfError( + "%s does not support %s [%x]" % + (self._version, + version_map.get(header.version, 'Unknown: %s' % header.version), + header.version)) + if codepage is None: + cp, sd, ld = _codepage_lookup(header.codepage()) + self._meta.codepage = sd + self._meta.decoder, self._meta.encoder = unicode_error_handler(codecs.getdecoder(sd), codecs.getencoder(sd), unicode_errors) + fieldblock = array('B', dfd.read(header.start - 32)) + for i in range(len(fieldblock) // 32 + 1): + fieldend = i * 32 + if fieldblock[fieldend] == CR: + break + else: + raise BadDataError("corrupt field structure in header") + if len(fieldblock[:fieldend]) % 32 != 0: + raise BadDataError("corrupt field structure in header") + old_length = header.data[10:12] + header.fields = fieldblock[:fieldend] + header.data = header.data[:10] + old_length + header.data[12:] # restore original for testing + header.extra = fieldblock[fieldend + 1:] # skip trailing \r + self._initialize_fields() + self._check_memo_integrity() + dfd.seek(0) + except DbfError: dfd.close() - dfd = None - raise DbfError( - "%s does not support %s [%x]" % - (self._version, - version_map.get(header.version, 'Unknown: %s' % header.version), - header.version)) - if codepage is None: - cp, sd, ld = _codepage_lookup(header.codepage()) - self._meta.decoder, self._meta.encoder = unicode_error_handler(codecs.getdecoder(sd), codecs.getencoder(sd), unicode_errors) - fieldblock = array('B', dfd.read(header.start - 32)) - for i in range(len(fieldblock) // 32 + 1): - fieldend = i * 32 - if fieldblock[fieldend] == CR: - break - else: - raise BadDataError("corrupt field structure in header") - if len(fieldblock[:fieldend]) % 32 != 0: - raise BadDataError("corrupt field structure in header") - old_length = header.data[10:12] - header.fields = fieldblock[:fieldend] - header.data = header.data[:10] + old_length + header.data[12:] # restore original for testing - header.extra = fieldblock[fieldend + 1:] # skip trailing \r - self._initialize_fields() - self._check_memo_integrity() - dfd.seek(0) + raise for field in meta.fields: field_type = meta[field][TYPE] @@ -5780,7 +5877,7 @@ # ignore break elif frame[0] != __file__ or frame[2] not in ('__init__','add_fields'): - warnings.warn('"%s invalid: field names should start with a letter, and only contain letters, digits, and _' % name, FieldNameWarning, stacklevel=i) + warnings.warn('%r is invalid: field names should start with a letter, and only contain letters, digits, and _' % name, FieldNameWarning, stacklevel=i) break if name in meta.fields: raise DbfError("Field '%s' already exists" % name) @@ -6346,19 +6443,24 @@ FLOAT: { 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 'Class':'default', 'Empty':none, 'flags':tuple(), - } } + }, + TIMESTAMP: { + 'Type':'TimeStamp', 'Retrieve':retrieve_clp_timestamp, 'Update':update_clp_timestamp, 'Blank':lambda x: b'\x00' * 8, 'Init':add_clp_timestamp, + 'Class':datetime.datetime, 'Empty':none, 'flags':tuple(), + }, + } _memoext = '.dbt' _memoClass = _Db3Memo _yesMemoMask = 0x80 _noMemoMask = 0x7f - _binary_types = () + _binary_types = (TIMESTAMP, ) _character_types = (CHAR, MEMO) _currency_types = tuple() _date_types = (DATE, ) - _datetime_types = tuple() + _datetime_types = (TIMESTAMP, ) _decimal_types = (NUMERIC, FLOAT) - _fixed_types = (DATE, LOGICAL, MEMO) + _fixed_types = (DATE, LOGICAL, MEMO, TIMESTAMP) _logical_types = (LOGICAL, ) _memo_types = (MEMO, ) _numeric_types = (NUMERIC, FLOAT) @@ -8631,15 +8733,9 @@ if format == 'fixed': format = 'txt' if encoding is None: - encoding = table.codepage.name + encoding = table._meta.codepage encoder = codecs.getencoder(encoding) header_names = field_names - # encoding = table.codepage.name - # encoder = codecs.getencoder(encoding) - if isinstance(field_names[0], unicode): - header_names = [encoder(f) for f in field_names] - else: - header_names = field_names base, ext = os.path.splitext(filename) if ext.lower() in ('', '.dbf'): filename = base + "." + format @@ -9054,7 +9150,7 @@ except no_more_records: return False -def scatter(record, as_type=create_template, _mappings=getattr(collections, 'Mapping', dict)): +def scatter(record, as_type=create_template, _mappings=getattr(collections_abc, 'Mapping', dict)): """ returns as_type() of [fieldnames and] values. """ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/dbf/test.py new/dbf-0.99.1/dbf/test.py --- old/dbf-0.99.0/dbf/test.py 2020-08-13 20:06:56.000000000 +0200 +++ new/dbf-0.99.1/dbf/test.py 2021-03-03 16:35:10.000000000 +0100 @@ -6,7 +6,8 @@ import tempfile import shutil import stat -from unittest import skipIf, TestCase as unittest_TestCase +import warnings +from unittest import skipIf, skipUnless, TestCase as unittest_TestCase py_ver = sys.version_info[:2] module = globals() @@ -29,6 +30,12 @@ MISC = ''.join([chr(i) for i in range(256)]).encode('latin-1') PHOTO = ''.join(reversed([chr(i) for i in range(256)])).encode('latin-1') +try: + with warnings.catch_warnings(): + warnings.warn('test if warning is an exception', dbf.DbfWarning, stacklevel=1) + warnings_are_exceptions = False +except dbf.DbfWarning: + warnings_are_exceptions = True print("\nTesting dbf version %d.%02d.%03d on %s with Python %s\n" % ( dbf.version[:3] + (sys.platform, sys.version) )) @@ -42,19 +49,6 @@ self.assertRaisesRegex = getattr(self, 'assertRaisesRegexp') super(TestCase, self).__init__(*args, **kwds) - @classmethod - def setUpClass(cls, *args, **kwds): - super(TestCase, cls).setUpClass(*args, **kwds) - ## filter warnings (example from scription) - # warnings.filterwarnings( - # 'ignore', - # 'inspect\.getargspec\(\) is deprecated', - # DeprecationWarning, - # 'scription', - # 0, - # ) - # double check existence of temp dir - # Walker in Leaves -- by Scot Noel -- http://www.scienceandfantasyfiction.com/sciencefiction/Walker-in-Leaves/walker-in-leaves.htm @@ -2690,7 +2684,10 @@ fields.append('a%03d C(10)' % i) table = Table(':test:', ';'.join(fields), on_disk=False) table.open(mode=READ_WRITE) - self.assertRaises(DbfError, table.allow_nulls, 'a001') + try: + self.assertRaises(DbfError, table.allow_nulls, 'a001') + finally: + table.close() def test_adding_existing_field_to_table(self): table = Table(':blah:', 'name C(50)', on_disk=False) @@ -2774,6 +2771,7 @@ class TestWarnings(TestCase): + @skipIf(warnings_are_exceptions, '-W error specified') def test_field_name_warning(self): with warnings.catch_warnings(record=True) as w: huh = dbf.Table('cloud', 'p^type C(25)', on_disk=False).open(dbf.READ_WRITE) @@ -2787,6 +2785,17 @@ warning = w[-1] self.assertTrue(issubclass(warning.category, dbf.FieldNameWarning)) + @skipUnless(warnings_are_exceptions, 'warnings are just warnings') + def test_field_name_exceptions(self): + with self.assertRaisesRegex(dbf.FieldNameWarning, "is invalid"): + huh = dbf.Table('cloud', 'p^type C(25)', on_disk=False).open(dbf.READ_WRITE) + with self.assertRaisesRegex(dbf.FieldNameWarning, "is invalid"): + huh = dbf.Table('cloud', 'name C(25)', on_disk=False).open(dbf.READ_WRITE) + try: + huh.add_fields('c^word C(50)') + finally: + huh.close() + class TestIndexLocation(TestCase): @@ -2900,9 +2909,7 @@ for i in range(1, len(fields)+1): for fieldlist in combinate(fields, i): table = Table(os.path.join(tempdir, 'tempvfp'), u';'.join(fieldlist), dbf_type='vfp') - table.close() table = Table(os.path.join(tempdir, 'tempvfp'), dbf_type='vfp') - table.close() actualFields = table.structure() fieldlist = [f.replace('nocptrans','BINARY') for f in fieldlist] self.assertEqual(fieldlist, actualFields) @@ -2989,10 +2996,19 @@ dbf_type='vfp', default_data_types='enhanced', ) + self.null_vfp_table = null_table = Table( + os.path.join(tempdir, 'null_vfp_table'), + 'first C(25) null; last C(25); height N(3,1) null; age N(3,0); life_story M null; plans M', + dbf_type='vfp', + ) + null_table.open(dbf.READ_WRITE) + null_table.append() + null_table.close() def tearDown(self): self.dbf_table.close() self.vfp_table.close() + self.null_vfp_table.close() def test_slicing(self): table = self.dbf_table @@ -3082,6 +3098,7 @@ self.assertEqual(record.desc, desclist[i]) i += 1 self.assertEqual(i, len(table)) + table.close() def test_vfp_adding_records(self): "vfp table: adding records" @@ -3222,6 +3239,7 @@ self.assertEqual(record.price, 0) self.assertEqual(table[i].price, 0) i += 1 + table.close() def test_char_memo_return_type(self): "check character fields return type" @@ -3641,21 +3659,71 @@ table.close() def test_blank_record_template_uses_null(self): + nullable = self.null_vfp_table + with nullable: + rec = nullable[-1] + self.assertTrue(rec.first is Null, "rec.first is %r" % (rec.first, )) + self.assertTrue(rec.last == ' '*25, "rec.last is %r" % (rec.last, )) + self.assertTrue(rec.height is Null, "rec.height is %r" % (rec.height, )) + self.assertTrue(rec.age is None, "rec.age is %r" % (rec.age, )) + self.assertTrue(rec.life_story is Null, "rec.life_story is %r" % (rec.life_story, )) + self.assertTrue(rec.plans == '', "rec.plans is %r" % (rec.plans, )) + nullable.close() nullable = Table( - 'nowhere', - 'name C(25) null; age N(3,0); life_story M null', - dbf_type='vfp', + self.null_vfp_table.filename, default_data_types='enhanced', - on_disk=False, ) with nullable: - nullable.append() rec = nullable[-1] - self.assertTrue(rec.name is Null, "rec.name is %r" % (rec.name, )) + self.assertTrue(rec.first is Null, "rec.first is %r" % (rec.first, )) + self.assertTrue(rec.last == '', "rec.last is %r" % (rec.last, )) + self.assertTrue(rec.height is Null, "rec.height is %r" % (rec.height, )) self.assertTrue(rec.age is None, "rec.age is %r" % (rec.age, )) - # nullable.append(('ethan', 50, Null)) - # rec = nullable[-1] self.assertTrue(rec.life_story is Null, "rec.life_story is %r" % (rec.life_story, )) + self.assertTrue(rec.plans == '', "rec.plans is %r" % (rec.plans, )) + nullable.close() + nullable = Table( + self.null_vfp_table.filename, + default_data_types=dict( + C=(Char, NoneType, NullType), + L=(Logical, NoneType, NullType), + D=(Date, NoneType, NullType), + T=(DateTime, NoneType, NullType), + M=(Char, NoneType, NullType), + ), + ) + with nullable: + rec = nullable[-1] + self.assertTrue(rec.first is Null, "rec.first is %r" % (rec.first, )) + self.assertTrue(rec.last is None, "rec.last is %r" % (rec.last, )) + self.assertTrue(rec.height is Null, "rec.height is %r" % (rec.height, )) + self.assertTrue(rec.age is None, "rec.age is %r" % (rec.age, )) + self.assertTrue(rec.life_story is Null, "rec.life_story is %r" % (rec.life_story, )) + self.assertTrue(rec.plans is None, "rec.plans is %r" % (rec.plans, )) + + def test_new_record_with_partial_fields_respects_null(self): + nullable = self.null_vfp_table + nullable.close() + nullable = Table( + self.null_vfp_table.filename, + default_data_types=dict( + C=(Char, NoneType, NullType), + L=(Logical, NoneType, NullType), + D=(Date, NoneType, NullType), + T=(DateTime, NoneType, NullType), + M=(Char, NoneType, NullType), + ), + ) + with nullable: + nullable.append({'first': 'ethan', 'last':'doe'}) + rec = nullable[-1] + self.assertTrue(rec.first == 'ethan', "rec.first is %r" % (rec.first, )) + self.assertTrue(rec.last == 'doe', "rec.last is %r" % (rec.last, )) + self.assertTrue(rec.height is Null, "rec.height is %r" % (rec.height, )) + self.assertTrue(rec.age is None, "rec.age is %r" % (rec.age, )) + self.assertTrue(rec.life_story is Null, "rec.life_story is %r" % (rec.life_story, )) + self.assertTrue(rec.plans is None, "rec.plans is %r" % (rec.plans, )) + nullable.close() def test_flux_internal(self): "commit and rollback of flux record (implementation detail)" @@ -4589,6 +4657,7 @@ for i in index(table): for j in index(table.field_names): self.assertEqual(str(table[i][j]).strip(), csvtable[i][j].strip()) + csvtable.close() def test_resize_empty(self): "resize" @@ -5388,6 +5457,13 @@ "test DBC handling" +class TestVapor(TestCase): + "test Vapor objects" + + def test_falsey(self): + self.assertFalse(dbf.Vapor) + + class TestMisc(TestCase): "miscellaneous tests" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/dbf.egg-info/PKG-INFO new/dbf-0.99.1/dbf.egg-info/PKG-INFO --- old/dbf-0.99.0/dbf.egg-info/PKG-INFO 2020-08-13 20:35:24.000000000 +0200 +++ new/dbf-0.99.1/dbf.egg-info/PKG-INFO 2021-03-03 16:35:11.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: dbf -Version: 0.99.0 +Version: 0.99.1 Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos) Home-page: https://github.com/ethanfurman/dbf Author: Ethan Furman diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dbf-0.99.0/setup.py new/dbf-0.99.1/setup.py --- old/dbf-0.99.0/setup.py 2020-08-13 20:06:56.000000000 +0200 +++ new/dbf-0.99.1/setup.py 2021-03-03 16:35:11.000000000 +0100 @@ -21,7 +21,7 @@ data = dict( name='dbf', - version='0.99.0', + version='0.99.1', license='BSD License', description='Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)', long_description=long_desc,