Hello community,

here is the log from the commit of package python-arf for openSUSE:Factory 
checked in at 2020-10-05 19:41:51
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-arf (Old)
 and      /work/SRC/openSUSE:Factory/.python-arf.new.4249 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-arf"

Mon Oct  5 19:41:51 2020 rev:3 rq:839512 version:2.6.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-arf/python-arf.changes    2020-07-24 
10:00:29.901673322 +0200
+++ /work/SRC/openSUSE:Factory/.python-arf.new.4249/python-arf.changes  
2020-10-05 19:43:11.177789941 +0200
@@ -1,0 +2,7 @@
+Mon Oct  5 06:18:37 UTC 2020 - Matej Cepl <[email protected]>
+
+- Update to 2.6.0:
+  - Remove our denose.patch, because it was included in the
+    upstream tarball.
+
+-------------------------------------------------------------------

Old:
----
  arf-2.5.1.tar.gz
  denose.patch

New:
----
  arf-2.6.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-arf.spec ++++++
--- /var/tmp/diff_new_pack.8Gv8yq/_old  2020-10-05 19:43:11.629790383 +0200
+++ /var/tmp/diff_new_pack.8Gv8yq/_new  2020-10-05 19:43:11.633790387 +0200
@@ -17,8 +17,11 @@
 
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
+# This version of the package intentionally doesn't work with
+# Python < 3.5.
+%define skip_python2 1
 Name:           python-arf
-Version:        2.5.1
+Version:        2.6.0
 Release:        0
 # Note: I know that "advertisement" words are frowned on, but in this case
 # the package name is an acronym so "advanced" needs to stay in
@@ -26,9 +29,6 @@
 License:        GPL-2.0-only
 URL:            https://github.com/melizalab/arf
 Source:         
https://files.pythonhosted.org/packages/source/a/arf/arf-%{version}.tar.gz
-# PATCH-FIX-UPSTREAM denose.patch gh#melizalab/arf#7 [email protected]
-# Remove the dependency on the nose package.
-Patch0:         denose.patch
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros

++++++ arf-2.5.1.tar.gz -> arf-2.6.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/PKG-INFO new/arf-2.6.0/PKG-INFO
--- old/arf-2.5.1/PKG-INFO      2019-08-29 19:13:12.000000000 +0200
+++ new/arf-2.6.0/PKG-INFO      2020-09-29 03:06:14.071360000 +0200
@@ -1,12 +1,12 @@
 Metadata-Version: 1.2
 Name: arf
-Version: 2.5.1
+Version: 2.6.0
 Summary: Advanced Recording Format for acoustic, behavioral, and physiological 
data
 Home-page: https://github.com/melizalab/arf
 Author: Dan Meliza
 Maintainer: Dan Meliza
 License: UNKNOWN
-Download-URL: https://github.com/melizalab/arf/archive/2.5.1.tar.gz
+Download-URL: https://github.com/melizalab/arf/archive/2.6.0.tar.gz
 Description: 
         Library for reading and writing Advanced Recording Format files. ARF 
files
         are HDF5 files used to store audio and neurophysiological recordings 
in a
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/README.md new/arf-2.6.0/README.md
--- old/arf-2.5.1/README.md     2018-03-03 02:38:32.000000000 +0100
+++ new/arf-2.6.0/README.md     2020-09-29 01:42:52.000000000 +0200
@@ -46,7 +46,7 @@
 
 ARF files require HDF5>=1.8 (<http://www.hdfgroup.org/HDF5>).
 
-The python interface requires Python 2.7 or >=3.4, numpy>=1.3, and h5py>=2.2. 
To install the module:
+The python interface requires Python 3.6 or greater, numpy>=1.19, and 
h5py>=2.10. The last version to support Python 2 was `2.5.1`. To install the 
module:
 
 ```bash
 pip install arf
@@ -61,9 +61,6 @@
 make install
 ```
 
-To install the MATLAB interface, add the matlab subdirectory to MATLAB's search
-path. The MATLAB interface is not yet up to the `2.0` specification.
-
 ### version information
 
 The specification and implementations provided in this project use a form of
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/arf.egg-info/PKG-INFO 
new/arf-2.6.0/arf.egg-info/PKG-INFO
--- old/arf-2.5.1/arf.egg-info/PKG-INFO 2019-08-29 19:13:12.000000000 +0200
+++ new/arf-2.6.0/arf.egg-info/PKG-INFO 2020-09-29 03:06:13.000000000 +0200
@@ -1,12 +1,12 @@
 Metadata-Version: 1.2
 Name: arf
-Version: 2.5.1
+Version: 2.6.0
 Summary: Advanced Recording Format for acoustic, behavioral, and physiological 
data
 Home-page: https://github.com/melizalab/arf
 Author: Dan Meliza
 Maintainer: Dan Meliza
 License: UNKNOWN
-Download-URL: https://github.com/melizalab/arf/archive/2.5.1.tar.gz
+Download-URL: https://github.com/melizalab/arf/archive/2.6.0.tar.gz
 Description: 
         Library for reading and writing Advanced Recording Format files. ARF 
files
         are HDF5 files used to store audio and neurophysiological recordings 
in a
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/arf.egg-info/SOURCES.txt 
new/arf-2.6.0/arf.egg-info/SOURCES.txt
--- old/arf-2.5.1/arf.egg-info/SOURCES.txt      2019-08-29 19:13:12.000000000 
+0200
+++ new/arf-2.6.0/arf.egg-info/SOURCES.txt      2020-09-29 03:06:14.000000000 
+0200
@@ -30,5 +30,6 @@
 matlab/daqtoarf.m
 matlab/private/make_attr.m
 matlab/private/make_attrs.m
+tests/__init__.py
 tests/test_arf.cpp
 tests/test_arf.py
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/arf.egg-info/requires.txt 
new/arf-2.6.0/arf.egg-info/requires.txt
--- old/arf-2.5.1/arf.egg-info/requires.txt     2019-08-29 19:13:12.000000000 
+0200
+++ new/arf-2.6.0/arf.egg-info/requires.txt     2020-09-29 03:06:13.000000000 
+0200
@@ -1,2 +1 @@
-numpy>=1.3
-h5py>=2.2
+h5py>=2.10
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/arf.py new/arf-2.6.0/arf.py
--- old/arf-2.5.1/arf.py        2019-08-29 18:52:36.000000000 +0200
+++ new/arf-2.6.0/arf.py        2020-09-29 01:32:20.000000000 +0200
@@ -8,7 +8,8 @@
 from __future__ import unicode_literals
 
 spec_version = "2.1"
-__version__ = version = "2.5.1"
+__version__ = version = "2.6.0"
+
 
 def version_info():
     from h5py.version import version as h5py_version, hdf5_version
@@ -230,7 +231,10 @@
     else:
         t = dset[:]
         idx = (t >= begin) & (t < end)
-        data = dset[idx] - begin
+        if idx.size > 0:
+            data = dset[idx] - begin
+        else:
+            data = idx
     return data, begin
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/setup.cfg new/arf-2.6.0/setup.cfg
--- old/arf-2.5.1/setup.cfg     2019-08-29 19:13:12.000000000 +0200
+++ new/arf-2.6.0/setup.cfg     2020-09-29 03:06:14.072973700 +0200
@@ -1,6 +1,3 @@
-[bdist_wheel]
-universal = 1
-
 [pep8]
 ignore = E221,E501,E701
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/setup.py new/arf-2.6.0/setup.py
--- old/arf-2.5.1/setup.py      2018-06-06 18:10:18.000000000 +0200
+++ new/arf-2.6.0/setup.py      2020-09-29 01:43:28.000000000 +0200
@@ -3,14 +3,12 @@
 # -*- mode: python -*-
 import sys
 import os
-
-if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 4):
-    raise RuntimeError("Python version 2.7 or >= 3.4 required.")
-
 from setuptools import setup
-
 from arf import __version__
 
+if sys.version_info[:2] < (3, 6):
+    raise RuntimeError("Python version 3.5 or greater required.")
+
 cls_txt = """
 Development Status :: 5 - Production/Stable
 Intended Audience :: Science/Research
@@ -36,7 +34,7 @@
 Requires h5py (at least 2.2) and numpy (at least 1.3).
 """
 
-install_requires = ["numpy>=1.3", "h5py>=2.2"]
+install_requires = ["h5py>=2.10"]
 if (os.environ.get('TRAVIS') == 'true' and 
os.environ.get('TRAVIS_PYTHON_VERSION').startswith('2.6')):
     install_requires.append('unittest2>=0.5.1')
 
@@ -53,7 +51,7 @@
     install_requires=install_requires,
 
     py_modules=['arf'],
-    test_suite='nose.collector'
+    test_suite='tests'
 )
 # Variables:
 # End:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/tests/__init__.py 
new/arf-2.6.0/tests/__init__.py
--- old/arf-2.5.1/tests/__init__.py     1970-01-01 01:00:00.000000000 +0100
+++ new/arf-2.6.0/tests/__init__.py     2020-09-28 23:48:23.000000000 +0200
@@ -0,0 +1 @@
+# This file intentionally left blank.
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/arf-2.5.1/tests/test_arf.py 
new/arf-2.6.0/tests/test_arf.py
--- old/arf-2.5.1/tests/test_arf.py     2019-08-29 02:44:36.000000000 +0200
+++ new/arf-2.6.0/tests/test_arf.py     2020-09-29 01:33:08.000000000 +0200
@@ -1,22 +1,20 @@
 # -*- coding: utf-8 -*-
 # -*- mode: python -*-
 
-# test harness for arf interface. assumes the underlying hdf5 and h5py 
libraries
-# are working.
+# test harness for arf interface. assumes the underlying hdf5 and h5py
+# libraries are working.
 from __future__ import division
 from __future__ import unicode_literals
 
-from nose.tools import *
-from nose.plugins.skip import SkipTest
+import unittest
 from distutils import version
-from h5py.version import version as h5py_version, hdf5_version
+from h5py.version import version as h5py_version
 
 import numpy as nx
 import arf
 import time
 from numpy.random import randn, randint
 
-fp = arf.open_file("test", 'w', driver="core", backing_store=False)
 entry_base = "entry_%03d"
 tstamp = time.mktime(time.localtime())
 entry_attributes = {'intattr': 1,
@@ -51,8 +49,8 @@
                  ),
             dict(name="events",
                  data=nx.rec.fromrecords(
-                 [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
-                 names=("start", "state", "name")),  # 'start' required
+                     [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
+                     names=("start", "state", "name")),  # 'start' required
                  datatype=arf.DataTypes.EVENT,
                  units=(b"s",b"",b"")) # only bytes supported by h5py
             ]
@@ -72,142 +70,139 @@
                      units="s"),
                 dict(name="missing units for complex dtype",
                      data=nx.rec.fromrecords(
-                     [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
-                     names=(
-                     "start", "state", "name"))),
+                         [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
+                         names=("start", "state", "name"))),
                 dict(name="wrong length units for complex dtype",
                      data=nx.rec.fromrecords(
-                     [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
-                     names=(
-                     "start", "state", "name")),
+                         [(1.0, 1, b"stimulus"), (5.0, 0, b"stimulus")],
+                         names=("start", "state", "name")),
                      units=("seconds",)),
                 ]
 
 
-def create_entry(name):
-    g = arf.create_entry(fp, name, tstamp, **entry_attributes)
-    assert_true(name in fp)
-    assert_true(arf.is_entry(g))
-    assert_true(arf.timestamp_to_float(g.attrs['timestamp']) > 0)
-    for k in entry_attributes:
-        assert_true(k in g.attrs)
-
-
-def create_dataset(g, dset):
-    d = arf.create_dataset(g, **dset)
-    assert_equal(d.shape, dset['data'].shape)
-    assert_false(arf.is_entry(d))
-
-
-def test00_create_entries():
-    N = 5
-    for i in range(N):
-        yield create_entry, entry_base % i
-    assert_equal(len(fp), N)
-
+class TestArfCreation(unittest.TestCase):
+    def setUp(self):
+        self.fp = arf.open_file("test", 'w', driver="core", 
backing_store=False)
+        self.entry = arf.create_entry(self.fp, "entry", tstamp)
+        self.dataset = arf.create_dataset(self.entry, **datasets[2])
+
+    def tearDown(self):
+        self.fp.close()
+
+    def create_entry(self, name):
+        g = arf.create_entry(self.fp, name, tstamp, **entry_attributes)
+        self.assertTrue(name in self.fp)
+        self.assertTrue(arf.is_entry(g))
+        self.assertTrue(arf.timestamp_to_float(g.attrs['timestamp']) > 0)
+        for k in entry_attributes:
+            self.assertTrue(k in g.attrs)
+
+    def create_dataset(self, g, dset):
+        d = arf.create_dataset(g, **dset)
+        self.assertEqual(d.shape, dset['data'].shape)
+        self.assertFalse(arf.is_entry(d))
+
+    def test00_create_entries(self):
+        N = 5
+        for i in range(N):
+            yield self.create_entry, entry_base % i
+        self.assertEqual(len(self.fp), N)
+
+    def test01_create_existing_entry(self):
+        with self.assertRaises(ValueError):
+            arf.create_entry(self.fp, "entry", tstamp, **entry_attributes)
 
-@raises(ValueError)
-def test01_create_existing_entry():
-    arf.create_entry(fp, entry_base % 0, tstamp, **entry_attributes)
-
-
-def test02_create_datasets():
-    for name in arf.keys_by_creation(fp):
-        entry = fp[name]
+    def test02_create_datasets(self):
         for dset in datasets:
-            yield create_dataset, entry, dset
-        assert_equal(len(entry), len(datasets))
-        assert_equal(set(entry.keys()), set(dset['name'] for dset in datasets))
-
-
-def test03_set_attributes():
-    # tests the set_attributes convenience function
-    arf.set_attributes(fp["entry_001/spikes"], mystr="myvalue", myint=5000)
-    assert_equal(fp["entry_001/spikes"].attrs['myint'], 5000)
-    assert_equal(fp["entry_001/spikes"].attrs['mystr'], "myvalue")
-    arf.set_attributes(fp["entry_001/spikes"], mystr=None)
-    assert_false("mystr" in fp["entry_001/spikes"].attrs)
-
-
-def test04_create_bad_dataset():
-    f = raises(ValueError)(create_dataset)
-    e = fp['entry_001']
-    for dset in bad_datasets:
-        yield f, e, dset
-
-
-def test05_null_uuid():
-    # nulls in a uuid can make various things barf
-    from uuid import UUID
-    uuid = UUID(bytes=b''.rjust(16, b'\0'))
-    e = fp['entry_001']
-    arf.set_uuid(e, uuid)
-
-    assert_equal(arf.get_uuid(e), uuid)
-
-
-def test06_creation_iter():
-    fp = arf.open_file("test06", mode="a", driver="core", backing_store=False)
-    entry_names = ['z', 'y', 'a', 'q', 'zzyfij']
-    for name in entry_names:
-        g = arf.create_entry(fp, name, 0)
-        arf.create_dataset(g, "dset", (1,), sampling_rate=1)
-    assert_equal(list(arf.keys_by_creation(fp)), entry_names)
-
-if version.StrictVersion(h5py_version) < version.StrictVersion("2.2"):
-    test06_creation_iter = SkipTest(test06_creation_iter)
-
-
-def test07_append_to_table():
-    fp = arf.open_file("test07", mode="a", driver="core", backing_store=False)
-    dtype = nx.dtype({'names': ("f1","f2"), 'formats': [nx.uint, nx.int32]})
-    dset = arf.create_table(fp, 'test', dtype=dtype)
-    assert_equal(dset.shape[0], 0)
-    arf.append_data(dset, (5, 10))
-    assert_equal(dset.shape[0], 1)
-
-
-def test08_check_file_version():
-    fp = arf.open_file("test08", mode="a", driver="core", backing_store=False)
-    arf.check_file_version(fp)
-
-
-def test09_timestamp_conversion():
-    from datetime import datetime
-
-    dt = datetime.now()
-    ts = arf.convert_timestamp(dt)
-    assert_equal(arf.timestamp_to_datetime(ts), dt)
-    assert_true(all(arf.convert_timestamp(ts) == ts))
-
-    ts = arf.convert_timestamp(1000)
-    assert_equal(int(arf.timestamp_to_float(ts)), 1000)
-
-
-def test10_select_from_timeseries():
-    entry = fp[entry_base % 0]
-    for data in datasets:
-        dset = entry[data["name"]]
-        if data.get("units", None) == "samples":
-            selected, offset = arf.select_interval(dset, 0, 
data["sampling_rate"])
-        else:
-            selected, offset = arf.select_interval(dset, 0.0, 1.0)
-        if arf.is_time_series(dset):
-            nx.testing.assert_array_equal(selected, 
data["data"][:data["sampling_rate"]])
+            yield self.create_dataset, self.entry, dset
+        self.assertEqual(len(self.entry), len(datasets))
+        self.assertEqual(set(self.entry.keys()), set(dset['name'] for dset in 
datasets))
+
+    def test04_create_bad_dataset(self):
+        for dset in bad_datasets:
+            with self.assertRaises(ValueError):
+                self.create_dataset(self.entry, dset)
+
+    def test05_set_attributes(self):
+        """ tests the set_attributes convenience function """
+        arf.set_attributes(self.entry, mystr="myvalue", myint=5000)
+        self.assertEqual(self.entry.attrs['myint'], 5000)
+        self.assertEqual(self.entry.attrs['mystr'], "myvalue")
+        arf.set_attributes(self.entry, mystr=None)
+        self.assertFalse("mystr" in self.entry.attrs)
+
+    def test06_null_uuid(self):
+        # nulls in a uuid can make various things barf
+        from uuid import UUID
+        uuid = UUID(bytes=b''.rjust(16, b'\0'))
+        arf.set_uuid(self.entry, uuid)
+        self.assertEqual(arf.get_uuid(self.entry), uuid)
+
+    def test07_copy_entry_with_attrs(self):
+        src_entry_attrs = dict(self.entry.attrs)
+        src_entry_timestamp = src_entry_attrs.pop("timestamp")
+        tgt_entry = arf.create_entry(self.fp, "copied_entry", 
src_entry_timestamp, **src_entry_attrs)
+        self.assertEqual(self.entry.attrs['uuid'], tgt_entry.attrs['uuid'])
+
+    def test08_check_file_version(self):
+        arf.check_file_version(self.fp)
+
+    def test09_append_to_table(self):
+        dtype = nx.dtype({'names': ("f1","f2"), 'formats': [nx.uint, 
nx.int32]})
+        dset = arf.create_table(self.fp, 'test', dtype=dtype)
+        self.assertEqual(dset.shape[0], 0)
+        arf.append_data(dset, (5, 10))
+        self.assertEqual(dset.shape[0], 1)
+
+
[email protected](version.StrictVersion(h5py_version) < 
version.StrictVersion("2.2"), "not supported on h5py < 2.2")
+class TestArfNavigation(unittest.TestCase):
+    def setUp(self):
+        self.fp = arf.open_file("test", 'w', driver="core", 
backing_store=False)
+
+    def tearDown(self):
+        self.fp.close()
+
+    def test01_creation_iter(self):
+        self.fp = arf.open_file("test06", mode="a", driver="core", 
backing_store=False)
+        entry_names = ['z', 'y', 'a', 'q', 'zzyfij']
+        for name in entry_names:
+            g = arf.create_entry(self.fp, name, 0)
+            arf.create_dataset(g, "dset", (1,), sampling_rate=1)
+        self.assertEqual(list(arf.keys_by_creation(self.fp)), entry_names)
+
+    def test10_select_from_timeseries(self):
+        entry = arf.create_entry(self.fp, "entry", tstamp)
+        for data in datasets:
+            arf.create_dataset(entry, **data)
+            dset = entry[data["name"]]
+            if data.get("units", None) == "samples":
+                selected, offset = arf.select_interval(dset, 0, 
data["sampling_rate"])
+            else:
+                selected, offset = arf.select_interval(dset, 0.0, 1.0)
+            if arf.is_time_series(dset):
+                nx.testing.assert_array_equal(selected, 
data["data"][:data["sampling_rate"]])
+
+
+class TestArfUtility(unittest.TestCase):
+
+    def test01_timestamp_conversion(self):
+        from datetime import datetime
+        dt = datetime.now()
+        ts = arf.convert_timestamp(dt)
+        self.assertEqual(arf.timestamp_to_datetime(ts), dt)
+        self.assertTrue(all(arf.convert_timestamp(ts) == ts))
+        ts = arf.convert_timestamp(1000)
+        self.assertEqual(int(arf.timestamp_to_float(ts)), 1000)
+
+    def test99_various(self):
+        # test some functions difficult to cover otherwise
+        arf.DataTypes._doc()
+        arf.DataTypes._todict()
 
-def test11_copy_entry_attrs():
-    src_entry = fp[entry_base % 0]
-    src_entry_attrs = dict(src_entry.attrs)
-    src_entry_timestamp = src_entry_attrs.pop("timestamp")
-    tgt_entry = arf.create_entry(fp, "copied_entry", src_entry_timestamp, 
**src_entry_attrs)
-    assert_equal(src_entry.attrs['uuid'], tgt_entry.attrs['uuid'])
 
 
-def test99_various():
-    # test some functions difficult to cover otherwise
-    arf.DataTypes._doc()
-    arf.DataTypes._todict()
 
 
-# Variables:
-# End:
+# # Variables:
+# # End:


Reply via email to