Hello community,

here is the log from the commit of package python-pytools for openSUSE:Factory 
checked in at 2017-06-12 15:34:50
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pytools (Old)
 and      /work/SRC/openSUSE:Factory/.python-pytools.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pytools"

Mon Jun 12 15:34:50 2017 rev:2 rq:502859 version:2017.3

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pytools/python-pytools.changes    
2016-02-16 09:18:23.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.python-pytools.new/python-pytools.changes       
2017-06-12 15:34:56.963055381 +0200
@@ -1,0 +2,13 @@
+Sun Jun 11 10:26:02 UTC 2017 - mplus...@suse.com
+
+- Update to version 2017.3
+  * No changelog available
+
+-------------------------------------------------------------------
+Sat Mar  4 10:20:56 UTC 2017 - mplus...@suse.com
+
+- Update to version 2016.2.6
+  * No changelog available
+- Convert to python singlespec
+
+-------------------------------------------------------------------

Old:
----
  LICENSE.mit
  pytools-2016.1.tar.gz

New:
----
  pytools-2017.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pytools.spec ++++++
--- /var/tmp/diff_new_pack.THSlih/_old  2017-06-12 15:35:00.122609750 +0200
+++ /var/tmp/diff_new_pack.THSlih/_new  2017-06-12 15:35:00.126609186 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-pytools
 #
-# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -16,31 +16,25 @@
 #
 
 
-%if 0%{?suse_version} && 0%{?suse_version} <= 1110
-%{!?python_sitelib: %global python_sitelib %(python -c "from 
distutils.sysconfig import get_python_lib; print get_python_lib()")}
-%else
-BuildArch:      noarch
-%endif
 Name:           python-pytools
-Version:        2016.1
+Version:        2017.3
 Release:        0
 Summary:        A collection of tools for Python
 License:        MIT
 Group:          Development/Languages/Python
-Url:            http://pypi.python.org/pypi/pytools
-Source0:        
http://pypi.python.org/packages/source/p/pytools/pytools-%{version}.tar.gz
-Source1:        LICENSE.mit
-BuildRequires:  python-appdirs >= 1.4.0
-BuildRequires:  python-decorator >= 3.2.0
-BuildRequires:  python-devel
-BuildRequires:  python-numpy >= 1.6.0
-BuildRequires:  python-setuptools
-BuildRequires:  python-six >= 1.8.0
-Requires:       python-appdirs >= 1.4.0
-Requires:       python-decorator >= 3.2.0
-Requires:       python-numpy >= 1.6.0
-Requires:       python-six >= 1.8.0
-BuildRoot:      %{_tmppath}/%{name}-%{version}-build
+Url:            https://pypi.python.org/pypi/pytools
+Source0:        
https://files.pythonhosted.org/packages/source/p/pytools/pytools-%{version}.tar.gz
+BuildRequires:  %{python_module appdirs}
+BuildRequires:  %{python_module base}
+BuildRequires:  %{python_module decorator}
+BuildRequires:  %{python_module devel}
+BuildRequires:  %{python_module numpy}
+BuildRequires:  %{python_module setuptools}
+BuildRequires:  %{python_module six}
+BuildRequires:  fdupes
+BuildRequires:  python-rpm-macros
+BuildArch:      noarch
+%python_subpackages
 
 %description
 Pytools is a big bag of things that are "missing" from the Python standard 
library.
@@ -57,20 +51,20 @@
 
 %prep
 %setup -q -n pytools-%{version}
-cp %{SOURCE1} .
 
 %build
-python setup.py build
+%python_build
 
 %install
-python setup.py install --prefix=%{_prefix} --root=%{buildroot}
+%python_install
+%fdupes %{buildroot}
 
 %check
-python setup.py test
+python2 setup.py test
 
-%files
+%files %{python_files}
 %defattr(-,root,root,-)
-%doc README LICENSE.mit
+%doc README LICENSE
 %{python_sitelib}/*
 
 %changelog

++++++ pytools-2016.1.tar.gz -> pytools-2017.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/LICENSE new/pytools-2017.3/LICENSE
--- old/pytools-2016.1/LICENSE  1970-01-01 01:00:00.000000000 +0100
+++ new/pytools-2017.3/LICENSE  2016-10-10 00:22:06.000000000 +0200
@@ -0,0 +1,24 @@
+pytools is licensed to you under the MIT/X Consortium license:
+
+Copyright (c) 2009-16 Andreas Klöckner and Contributors.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/MANIFEST.in 
new/pytools-2017.3/MANIFEST.in
--- old/pytools-2016.1/MANIFEST.in      2015-10-17 21:26:02.000000000 +0200
+++ new/pytools-2017.3/MANIFEST.in      2016-10-10 00:22:44.000000000 +0200
@@ -0,0 +1 @@
+include LICENSE
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/PKG-INFO new/pytools-2017.3/PKG-INFO
--- old/pytools-2016.1/PKG-INFO 2016-01-14 19:54:58.000000000 +0100
+++ new/pytools-2017.3/PKG-INFO 2017-06-03 20:04:28.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pytools
-Version: 2016.1
+Version: 2017.3
 Summary: A collection of tools for Python
 Home-page: http://pypi.python.org/pypi/pytools
 Author: Andreas Kloeckner
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/__init__.py 
new/pytools-2017.3/pytools/__init__.py
--- old/pytools-2016.1/pytools/__init__.py      2016-01-05 21:44:16.000000000 
+0100
+++ new/pytools-2017.3/pytools/__init__.py      2017-05-25 03:19:47.000000000 
+0200
@@ -197,6 +197,19 @@
     def __ne__(self, other):
         return not self.__eq__(other)
 
+
+class ImmutableRecordWithoutPickling(RecordWithoutPickling):
+    "Hashable record. Does not explicitly enforce immutability."
+
+    def __hash__(self):
+        return hash(
+                (type(self),) + tuple(getattr(self, field)
+                    for field in self.__class__.fields))
+
+
+class ImmutableRecord(ImmutableRecordWithoutPickling, Record):
+    pass
+
 # }}}
 
 
@@ -441,6 +454,7 @@
     raise TypeError(
         "memoize received unexpected position arguments: %s" % args)
 
+
 FunctionValueCache = memoize
 
 
@@ -448,44 +462,7 @@
     pass
 
 
-def memoize_method(method):
-    """Supports cache deletion via ``method_name.clear_cache(self)``.
-
-    .. note::
-        *clear_cache* support requires Python 2.5 or newer.
-    """
-
-    cache_dict_name = intern("_memoize_dic_"+method.__name__)
-
-    def wrapper(self, *args, **kwargs):
-        if kwargs:
-            key = (_HasKwargs, frozenset(six.iteritems(kwargs))) + args
-        else:
-            key = args
-
-        try:
-            return getattr(self, cache_dict_name)[key]
-        except AttributeError:
-            result = method(self, *args, **kwargs)
-            setattr(self, cache_dict_name, {key: result})
-            return result
-        except KeyError:
-            result = method(self, *args, **kwargs)
-            getattr(self, cache_dict_name)[key] = result
-            return result
-
-    def clear_cache(self):
-        delattr(self, cache_dict_name)
-
-    if sys.version_info >= (2, 5):
-        from functools import update_wrapper
-        new_wrapper = update_wrapper(wrapper, method)
-        new_wrapper.clear_cache = clear_cache
-
-    return new_wrapper
-
-
-def memoize_on_first_arg(function):
+def memoize_on_first_arg(function, cache_dict_name=None):
     """Like :func:`memoize_method`, but for functions that take the object
     to do memoization as first argument.
 
@@ -495,8 +472,9 @@
         *clear_cache* support requires Python 2.5 or newer.
     """
 
-    cache_dict_name = intern("_memoize_dic_"
-            + function.__module__ + function.__name__)
+    if cache_dict_name is None:
+        cache_dict_name = intern("_memoize_dic_"
+                + function.__module__ + function.__name__)
 
     def wrapper(obj, *args, **kwargs):
         if kwargs:
@@ -526,6 +504,16 @@
     return new_wrapper
 
 
+def memoize_method(method):
+    """Supports cache deletion via ``method_name.clear_cache(self)``.
+
+    .. note::
+        *clear_cache* support requires Python 2.5 or newer.
+    """
+
+    return memoize_on_first_arg(method, 
intern("_memoize_dic_"+method.__name__))
+
+
 def memoize_method_with_uncached(uncached_args=[], uncached_kwargs=set()):
     """Supports cache deletion via ``method_name.clear_cache(self)``.
 
@@ -627,8 +615,7 @@
 
 class memoize_in(object):  # noqa
     """Adds a cache to a function nested inside a method. The cache is attached
-    to *memoize_cache_context* (if it exists) or *self* in the outer (method)
-    namespace.
+    to *object*.
 
     Requires Python 2.5 or newer.
     """
@@ -804,21 +791,8 @@
     return reduce(mul, iterable, 1)
 
 
-try:
-    all = __builtins__.all
-    any = __builtins__.any
-except AttributeError:
-    def all(iterable):
-        for i in iterable:
-            if not i:
-                return False
-        return True
-
-    def any(iterable):
-        for i in iterable:
-            if i:
-                return True
-        return False
+all = six.moves.builtins.all
+any = six.moves.builtins.any
 
 
 def reverse_dictionary(the_dict):
@@ -1160,6 +1134,7 @@
                     yield prebase + [current_max] + postbase
         current_max += 1
 
+
 # backwards compatibility
 generate_positive_integer_tuples_below = 
generate_nonnegative_integer_tuples_below
 generate_all_positive_integer_tuples = generate_all_nonnegative_integer_tuples
@@ -1406,9 +1381,9 @@
             full = int(floor(scaled))
             eighths = int(ceil((scaled-full)*8))
             if eighths:
-                return full*unichr(0x2588) + unichr(0x2588+(8-eighths))
+                return full*six.unichr(0x2588) + six.unichr(0x2588+(8-eighths))
             else:
-                return full*unichr(0x2588)
+                return full*six.unichr(0x2588)
     else:
         def format_bar(cnt):
             return int(ceil(cnt*width/max_count))*"#"
@@ -1774,18 +1749,42 @@
         yield "%s_%d" % (prefix, try_num)
         try_num += 1
 
+
+def generate_numbered_unique_names(prefix, num=None):
+    orig_num = num
+    num = 0
+    if orig_num is None:
+        yield (num, prefix)
+
+    while True:
+        name = "%s_%d" % (prefix, num)
+        num += 1
+        yield (num, name)
+
+
 generate_unique_possibilities = MovedFunctionDeprecationWrapper(
         generate_unique_names)
 
 
-class UniqueNameGenerator:
+class UniqueNameGenerator(object):
     def __init__(self, existing_names=set(), forced_prefix=""):
         self.existing_names = existing_names.copy()
         self.forced_prefix = forced_prefix
+        self.prefix_to_counter = {}
 
     def is_name_conflicting(self, name):
         return name in self.existing_names
 
+    def _name_added(self, name):
+        """Callback to alert subclasses when a name has been added.
+
+        .. note::
+
+            This will not get called for the names in the *existing_names*
+            argument to :meth:`__init__`.
+        """
+        pass
+
     def add_name(self, name):
         if self.is_name_conflicting(name):
             raise ValueError("name '%s' conflicts with existing names")
@@ -1793,6 +1792,7 @@
             raise ValueError("name '%s' does not start with required prefix")
 
         self.existing_names.add(name)
+        self._name_added(name)
 
     def add_names(self, names):
         for name in names:
@@ -1801,13 +1801,18 @@
     def __call__(self, based_on="id"):
         based_on = self.forced_prefix + based_on
 
-        for var_name in generate_unique_names(based_on):
+        counter = self.prefix_to_counter.get(based_on, None)
+
+        for counter, var_name in generate_numbered_unique_names(based_on, 
counter):
             if not self.is_name_conflicting(var_name):
                 break
 
+        self.prefix_to_counter[based_on] = counter
+
         var_name = intern(var_name)
 
         self.existing_names.add(var_name)
+        self._name_added(var_name)
         return var_name
 
 # }}}
@@ -1817,6 +1822,7 @@
     import doctest
     doctest.testmod()
 
+
 if __name__ == "__main__":
     _test()
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/batchjob.py 
new/pytools-2017.3/pytools/batchjob.py
--- old/pytools-2016.1/pytools/batchjob.py      2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/batchjob.py      2016-12-06 02:09:53.000000000 
+0100
@@ -1,5 +1,7 @@
 from __future__ import absolute_import
 import six
+
+
 def _cp(src, dest):
     from pytools import assert_not_a_file
     assert_not_a_file(dest)
@@ -15,16 +17,11 @@
         inf.close()
 
 
-
-
-
 def get_timestamp():
     from datetime import datetime
     return datetime.now().strftime("%Y-%m-%d-%H%M%S")
 
 
-
-
 class BatchJob(object):
     def __init__(self, moniker, main_file, aux_files=[], timestamp=None):
         import os
@@ -49,7 +46,7 @@
 
         runscript = open("%s/run.sh" % self.path, "w")
         import sys
-        runscript.write("%s %s setup.cpy" 
+        runscript.write("%s %s setup.cpy"
                 % (sys.executable, main_file))
         runscript.close()
 
@@ -68,14 +65,10 @@
         setup.close()
 
 
-
-
-class INHERIT(object):
+class INHERIT(object):  # noqa
     pass
 
 
-
-
 class GridEngineJob(BatchJob):
     def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT},
             memory_megs=None, extra_args=[]):
@@ -103,8 +96,6 @@
             raise RuntimeError("Process submission of %s failed" % 
self.moniker)
 
 
-
-
 class PBSJob(BatchJob):
     def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT},
             memory_megs=None, extra_args=[]):
@@ -132,11 +123,9 @@
             raise RuntimeError("Process submission of %s failed" % 
self.moniker)
 
 
-
-
 def guess_job_class():
     from subprocess import Popen, PIPE, STDOUT
-    qstat_helplines = Popen(["qstat", "--help"], 
+    qstat_helplines = Popen(["qstat", "--help"],
             stdout=PIPE, stderr=STDOUT).communicate()[0].split("\n")
     if qstat_helplines[0].startswith("GE"):
         return GridEngineJob
@@ -144,8 +133,6 @@
         return PBSJob
 
 
-
-
 class ConstructorPlaceholder:
     def __init__(self, classname, *args, **kwargs):
         self.classname = classname
@@ -162,8 +149,8 @@
         return "%s(%s)" % (self.classname,
                 ",".join(
                     [str(arg) for arg in self.args]
-                    + ["%s=%s" % (kw, repr(val)) for kw, val in 
six.iteritems(self.kwargs)]
+                    + ["%s=%s" % (kw, repr(val))
+                        for kw, val in six.iteritems(self.kwargs)]
                     )
                 )
     __repr__ = __str__
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/debug.py 
new/pytools-2017.3/pytools/debug.py
--- old/pytools-2016.1/pytools/debug.py 2015-10-17 21:26:02.000000000 +0200
+++ new/pytools-2017.3/pytools/debug.py 2016-12-06 02:09:53.000000000 +0100
@@ -163,6 +163,7 @@
 
     readline.parse_and_bind("tab: complete")
 
+
 try:
     import readline
     import rlcompleter
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/diskdict.py 
new/pytools-2017.3/pytools/diskdict.py
--- old/pytools-2016.1/pytools/diskdict.py      2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/diskdict.py      2017-02-03 19:11:09.000000000 
+0100
@@ -81,6 +81,16 @@
         self.commit_interval = commit_interval
         self.commit_countdown = self.commit_interval
 
+        from warnings import warn
+        warn("Given that Python 3+ uses hash randomization, DiskDict will 
typically "
+                "be entirely useless and should not be used . Since object 
hashes "
+                "will change between runs, it will be unable to retrieve 
objects "
+                "from the dictionary in a second run, defeating the purpose of 
"
+                "persisting to disk."
+                "DiskDict is deprecated and will be removed in 2018. "
+                "Use pytools.persistent_dict instead.", DeprecationWarning,
+                stacklevel=2)
+
     def __contains__(self, key):
         if key in self.cache:
             return True
@@ -90,9 +100,9 @@
                     "select key_pickle, version_pickle, result_pickle from 
data"
                     " where key_hash = ? and version_hash = ?",
                     (hash(key), self.version_hash)):
-                if loads(str(key_pickle)) == key \
-                        and loads(str(version_pickle)) == self.version:
-                    result = loads(str(result_pickle))
+                if loads(six.binary_type(key_pickle)) == key \
+                        and loads(six.binary_type(version_pickle)) == 
self.version:
+                    result = loads(six.binary_type(result_pickle))
                     self.cache[key] = result
                     return True
 
@@ -107,9 +117,9 @@
                     "select key_pickle, version_pickle, result_pickle from 
data"
                     " where key_hash = ? and version_hash = ?",
                     (hash(key), self.version_hash)):
-                if loads(str(key_pickle)) == key \
-                        and loads(str(version_pickle)) == self.version:
-                    result = loads(str(result_pickle))
+                if loads(six.binary_type(key_pickle)) == key \
+                        and loads(six.binary_type(version_pickle)) == 
self.version:
+                    result = loads(six.binary_type(result_pickle))
                     self.cache[key] = result
                     return result
 
@@ -124,7 +134,8 @@
                 "select id, key_pickle, version_pickle from data"
                 " where key_hash = ? and version_hash = ?",
                 (hash(key), self.version_hash)):
-            if loads(key_pickle) == key and loads(version_pickle) == 
self.version:
+            if (loads(six.binary_type(key_pickle)) == key
+                    and loads(six.binary_type(version_pickle)) == 
self.version):
                 self.db_conn.execute("delete from data where id = ?", 
(item_id,))
 
         self.commit_countdown -= 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/lex.py 
new/pytools-2017.3/pytools/lex.py
--- old/pytools-2016.1/pytools/lex.py   2015-10-17 21:26:02.000000000 +0200
+++ new/pytools-2017.3/pytools/lex.py   2016-10-04 01:37:34.000000000 +0200
@@ -136,7 +136,9 @@
         return self.index + i >= len(self.lexed)
 
     def is_next(self, tag, i=0):
-        return self.next_tag(i) is tag
+        return (
+                self.index + i < len(self.lexed)
+                and self.next_tag(i) is tag)
 
     def raise_parse_error(self, msg):
         if self.is_at_end():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/log.py 
new/pytools-2017.3/pytools/log.py
--- old/pytools-2016.1/pytools/log.py   2015-10-17 21:26:02.000000000 +0200
+++ new/pytools-2017.3/pytools/log.py   2016-12-06 02:09:53.000000000 +0100
@@ -538,7 +538,7 @@
         self.constants[name] = value
 
         from pickle import dumps
-        value = buffer(dumps(value))
+        value = bytes(dumps(value))
 
         if existed:
             self.db_conn.execute("update constants set value = ? where name = 
?",
@@ -662,7 +662,7 @@
             from pickle import dumps
             self.db_conn.execute("""insert into quantities values 
(?,?,?,?)""", (
                 name, unit, description,
-                buffer(dumps(def_agg))))
+                bytes(dumps(def_agg))))
             self.db_conn.execute("""create table %s
               (step integer, rank integer, value real)""" % name)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/mpi.py 
new/pytools-2017.3/pytools/mpi.py
--- old/pytools-2016.1/pytools/mpi.py   2015-10-17 21:26:02.000000000 +0200
+++ new/pytools-2017.3/pytools/mpi.py   2016-12-06 02:09:53.000000000 +0100
@@ -1,4 +1,6 @@
 from __future__ import absolute_import
+
+
 def check_for_mpi_relaunch(argv):
     if argv[1] != "--mpi-relaunch":
         return
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/mpiwrap.py 
new/pytools-2017.3/pytools/mpiwrap.py
--- old/pytools-2016.1/pytools/mpiwrap.py       2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/mpiwrap.py       2016-12-06 02:09:53.000000000 
+0100
@@ -8,14 +8,15 @@
 import pytools.prefork
 pytools.prefork.enable_prefork()
 
-from mpi4py.MPI import *
+from mpi4py.MPI import *  # noqa
 
-if Is_initialized():
+
+if Is_initialized():  # noqa
     raise RuntimeError("MPI already initialized before MPI wrapper import")
 
-def InitWithAutoFinalize(*args, **kwargs):
-    result = Init(*args, **kwargs)
+
+def InitWithAutoFinalize(*args, **kwargs):  # noqa
+    result = Init(*args, **kwargs)  # noqa
     import atexit
-    atexit.register(Finalize)
+    atexit.register(Finalize)  # noqa
     return result
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/obj_array.py 
new/pytools-2017.3/pytools/obj_array.py
--- old/pytools-2016.1/pytools/obj_array.py     2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/obj_array.py     2016-12-06 02:09:53.000000000 
+0100
@@ -66,6 +66,7 @@
     else:
         return f
 
+
 hashable_field = MovedFunctionDeprecationWrapper(obj_array_to_hashable)
 
 
@@ -78,6 +79,7 @@
     else:
         return a == b
 
+
 field_equal = MovedFunctionDeprecationWrapper(obj_array_equal)
 
 
@@ -129,6 +131,7 @@
     else:
         return f(field)
 
+
 as_oarray_func = decorator(with_object_array_or_scalar)
 
 
@@ -158,6 +161,7 @@
     else:
         return f(*args)
 
+
 as_oarray_func_n_args = decorator(with_object_array_or_scalar_n_args)
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/persistent_dict.py 
new/pytools-2017.3/pytools/persistent_dict.py
--- old/pytools-2016.1/pytools/persistent_dict.py       2015-10-17 
21:26:02.000000000 +0200
+++ new/pytools-2017.3/pytools/persistent_dict.py       2017-06-03 
20:03:51.000000000 +0200
@@ -108,6 +108,10 @@
                     from warnings import warn
                     warn("could not obtain lock--delete '%s' if necessary"
                             % self.lock_file)
+                if attempts > 3 * 60:
+                    raise RuntimeError("waited more than three minutes "
+                            "on the lock file '%s'"
+                            "--something is wrong" % self.lock_file)
 
             cleanup_m.register(self)
 
@@ -166,24 +170,44 @@
 
 class KeyBuilder(object):
     def rec(self, key_hash, key):
-        try:
-            method = key.update_persistent_hash
-        except AttributeError:
-            pass
-        else:
-            method(key_hash, self)
-            return
+        digest = None
 
         try:
-            method = getattr(self, "update_for_"+type(key).__name__)
+            digest = key._pytools_persistent_hash_digest
         except AttributeError:
             pass
-        else:
-            method(key_hash, key)
-            return
 
-        raise TypeError("unsupported type for persistent hash keying: %s"
-                % type(key))
+        if digest is None:
+            try:
+                method = key.update_persistent_hash
+            except AttributeError:
+                pass
+            else:
+                inner_key_hash = new_hash()
+                method(inner_key_hash, self)
+                digest = inner_key_hash.digest()
+
+        if digest is None:
+            try:
+                method = getattr(self, "update_for_"+type(key).__name__)
+            except AttributeError:
+                pass
+            else:
+                inner_key_hash = new_hash()
+                method(inner_key_hash, key)
+                digest = inner_key_hash.digest()
+
+        if digest is None:
+            raise TypeError("unsupported type for persistent hash keying: %s"
+                    % type(key))
+
+        if not isinstance(key, type):
+            try:
+                key._pytools_persistent_hash_digest = digest
+            except Exception:
+                pass
+
+        key_hash.update(digest)
 
     def __call__(self, key):
         key_hash = new_hash()
@@ -226,7 +250,7 @@
         key_hash.update("<None>".encode('utf8'))
 
     def update_for_dtype(self, key_hash, key):
-        return key.str.encode("utf8")
+        key_hash.update(key.str.encode('utf8'))
 
     # }}}
 
@@ -305,6 +329,9 @@
                 with open(value_path, "wb") as outf:
                     dump(value, outf, protocol=HIGHEST_PROTOCOL)
 
+                logger.debug("%s: cache store [key=%s]" % (
+                    self.identifier, hexdigest_key))
+
                 # Write key last, so that if the reader below
                 key_path = item_dir_m.sub("key")
                 with open(key_path, "wb") as outf:
@@ -322,6 +349,8 @@
         from os.path import join, isdir
         item_dir = join(self.container_dir, hexdigest_key)
         if not isdir(item_dir):
+            logger.debug("%s: cache miss [key=%s]" % (
+                self.identifier, hexdigest_key))
             raise NoSuchEntryError(key)
 
         cleanup_m = CleanupManager()
@@ -360,10 +389,19 @@
 
                 if read_key != key:
                     # Key collision, oh well.
-                    logger.debug("key collsion in cache at '%s'"
-                            % self.container_dir)
+                    from warnings import warn
+                    warn("%s: key collision in cache at '%s' -- these are "
+                            "sufficiently unlikely that they're often "
+                            "indicative of a broken implementation "
+                            "of equality comparison"
+                            % (self.identifier, self.container_dir))
+                    # This is here so we can debug the equality comparison
+                    read_key == key
                     raise NoSuchEntryError(key)
 
+                logger.debug("%s: cache hit [key=%s]" % (
+                    self.identifier, hexdigest_key))
+
                 # {{{ load value
 
                 exc = None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/prefork.py 
new/pytools-2017.3/pytools/prefork.py
--- old/pytools-2016.1/pytools/prefork.py       2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/prefork.py       2016-12-06 02:09:53.000000000 
+0100
@@ -8,51 +8,69 @@
 from __future__ import absolute_import
 
 
-
-
-
 class ExecError(OSError):
     pass
 
 
+class DirectForker(object):
+    def __init__(self):
+        self.apids = {}
+        self.count = 0
 
-
-class DirectForker:
-    @staticmethod
-    def call(cmdline, cwd=None):
+    def call(self, cmdline, cwd=None):
         from subprocess import call
+
         try:
             return call(cmdline, cwd=cwd)
         except OSError as e:
             raise ExecError("error invoking '%s': %s"
-                    % ( " ".join(cmdline), e))
+                            % (" ".join(cmdline), e))
+
+    def call_async(self, cmdline, cwd=None):
+        from subprocess import Popen
 
-    @staticmethod
-    def call_capture_stdout(cmdline, cwd=None):
-        from subprocess import Popen, PIPE
         try:
-            return Popen(cmdline, cwd=cwd, stdin=PIPE, stdout=PIPE, 
stderr=PIPE).communicate()[0]
+            self.count += 1
+
+            proc = Popen(cmdline, cwd=cwd)
+            self.apids[self.count] = proc
+
+            return self.count
         except OSError as e:
             raise ExecError("error invoking '%s': %s"
-                    % ( " ".join(cmdline), e))
+                             % (" ".join(cmdline), e))
 
-    @staticmethod
-    def call_capture_output(cmdline, cwd=None, error_on_nonzero=True):
-        """
-        :returns: a tuple (return code, stdout_data, stderr_data).
-        """
+    def call_capture_output(self, cmdline, cwd=None, error_on_nonzero=True):
         from subprocess import Popen, PIPE
+
         try:
-            popen = Popen(cmdline, cwd=cwd, stdin=PIPE, stdout=PIPE, 
stderr=PIPE)
+            popen = Popen(cmdline, cwd=cwd, stdin=PIPE, stdout=PIPE,
+                          stderr=PIPE)
             stdout_data, stderr_data = popen.communicate()
+
             if error_on_nonzero and popen.returncode:
                 raise ExecError("status %d invoking '%s': %s"
-                        % (popen.returncode, " ".join(cmdline), stderr_data))
+                                % (popen.returncode, " ".join(cmdline),
+                                   stderr_data))
+
             return popen.returncode, stdout_data, stderr_data
         except OSError as e:
             raise ExecError("error invoking '%s': %s"
-                    % ( " ".join(cmdline), e))
+                            % (" ".join(cmdline), e))
+
+    def wait(self, aid):
+        proc = self.apids.pop(aid)
+        retc = proc.wait()
+
+        return retc
+
+    def waitall(self):
+        rets = {}
+
+        for aid in list(self.apids):
+            rets[aid] = self.wait(aid)
 
+        return rets
 
 
 def _send_packet(sock, data):
@@ -64,6 +82,7 @@
     sock.sendall(pack("I", len(packet)))
     sock.sendall(packet)
 
+
 def _recv_packet(sock, who="Process", partner="other end"):
     from struct import calcsize, unpack
     size_bytes_size = calcsize("I")
@@ -85,36 +104,39 @@
     return loads(packet)
 
 
-
-
 def _fork_server(sock):
+    # Ignore keyboard interrupts, we'll get notified by the parent.
     import signal
-    # ignore keyboard interrupts, we'll get notified by the parent.
     signal.signal(signal.SIGINT, signal.SIG_IGN)
 
-    quitflag = [False]
-
-    def quit():
-        quitflag[0] = True
+    # Construct a local DirectForker to do the dirty work
+    df = DirectForker()
 
     funcs = {
-    "quit": quit,
-    "call": DirectForker.call,
-    "call_capture_stdout": DirectForker.call_capture_stdout,
-    "call_capture_output": DirectForker.call_capture_output,
+        "call": df.call,
+        "call_async": df.call_async,
+        "call_capture_output": df.call_capture_output,
+        "wait": df.wait,
+        "waitall": df.waitall
     }
 
     try:
-        while not quitflag[0]:
-            func_name, args, kwargs = _recv_packet(sock, 
-                    who="Prefork server", partner="parent")
-
-            try:
-                result = funcs[func_name](*args, **kwargs)
-            except Exception as e:
-                _send_packet(sock, ("exception", e))
+        while True:
+            func_name, args, kwargs = _recv_packet(
+                sock,  who="Prefork server", partner="parent"
+            )
+
+            if func_name == "quit":
+                df.waitall()
+                _send_packet(sock, ("ok", None))
+                break
             else:
-                _send_packet(sock, ("ok", result))
+                try:
+                    result = funcs[func_name](*args, **kwargs)
+                except Exception as e:
+                    _send_packet(sock, ("exception", e))
+                else:
+                    _send_packet(sock, ("ok", result))
     finally:
         sock.close()
 
@@ -122,18 +144,19 @@
     os._exit(0)
 
 
-
-
-
-class IndirectForker:
+class IndirectForker(object):
     def __init__(self, server_pid, sock):
         self.server_pid = server_pid
         self.socket = sock
 
+        import atexit
+        atexit.register(self._quit)
+
     def _remote_invoke(self, name, *args, **kwargs):
         _send_packet(self.socket, (name, args, kwargs))
-        status, result = _recv_packet(self.socket, 
-                who="Prefork client", partner="prefork server")
+        status, result = _recv_packet(
+            self.socket,  who="Prefork client", partner="prefork server"
+        )
 
         if status == "exception":
             raise result
@@ -142,24 +165,31 @@
 
     def _quit(self):
         self._remote_invoke("quit")
+
         from os import waitpid
         waitpid(self.server_pid, 0)
 
     def call(self, cmdline, cwd=None):
         return self._remote_invoke("call", cmdline, cwd)
 
-    def call_capture_stdout(self, cmdline, cwd=None):
-        return self._remote_invoke("call_capture_stdout", cmdline, cwd)
+    def call_async(self, cmdline, cwd=None):
+        return self._remote_invoke("call_async", cmdline, cwd)
 
     def call_capture_output(self, cmdline, cwd=None, error_on_nonzero=True):
-        return self._remote_invoke("call_capture_output", cmdline, cwd, 
-                error_on_nonzero)
+        return self._remote_invoke("call_capture_output", cmdline, cwd,
+                                   error_on_nonzero)
 
+    def wait(self, aid):
+        return self._remote_invoke("wait", aid)
 
+    def waitall(self):
+        return self._remote_invoke("waitall")
 
 
 def enable_prefork():
-    if isinstance(forker[0], IndirectForker):
+    global forker
+
+    if isinstance(forker, IndirectForker):
         return
 
     from socket import socketpair
@@ -168,30 +198,34 @@
     from os import fork
     fork_res = fork()
 
+    # Child
     if fork_res == 0:
-        # child
         s_parent.close()
         _fork_server(s_child)
+    # Parent
     else:
         s_child.close()
-        forker[0] = IndirectForker(fork_res, s_parent)
+        forker = IndirectForker(fork_res, s_parent)
 
-        import atexit
-        atexit.register(forker[0]._quit)
 
+forker = DirectForker()
 
 
+def call(cmdline, cwd=None):
+    return forker.call(cmdline, cwd)
 
-forker = [DirectForker()]
 
-def call(cmdline, cwd=None):
-    return forker[0].call(cmdline, cwd)
+def call_async(cmdline, cwd=None):
+    return forker.call_async(cmdline, cwd)
 
-def call_capture_stdout(cmdline, cwd=None):
-    from warnings import warn
-    warn("call_capture_stdout is deprecated: use call_capture_output instead",
-            stacklevel=2)
-    return forker[0].call_capture_stdout(cmdline, cwd)
 
 def call_capture_output(cmdline, cwd=None, error_on_nonzero=True):
-    return forker[0].call_capture_output(cmdline, cwd, error_on_nonzero)
+    return forker.call_capture_output(cmdline, cwd, error_on_nonzero)
+
+
+def wait(aid):
+    return forker.wait(aid)
+
+
+def waitall():
+    return forker.waitall()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/py_codegen.py 
new/pytools-2017.3/pytools/py_codegen.py
--- old/pytools-2016.1/pytools/py_codegen.py    2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/py_codegen.py    2017-06-03 00:36:37.000000000 
+0200
@@ -22,6 +22,8 @@
 THE SOFTWARE.
 """
 
+import six
+
 
 # loosely based on
 # http://effbot.org/zone/python-code-generator.htm
@@ -74,6 +76,18 @@
             raise RuntimeError("internal error in python code generator")
         self.level -= 1
 
+    def get_module(self, name="<generated code>"):
+        result_dict = {}
+        source_text = self.get()
+        exec(compile(
+            source_text.rstrip()+"\n", name, "exec"),
+                result_dict)
+        result_dict["_MODULE_SOURCE_CODE"] = source_text
+        return result_dict
+
+    def get_picklable_module(self):
+        return PicklableModule(self.get_module())
+
 
 class PythonFunctionGenerator(PythonCodeGenerator):
     def __init__(self, name, args):
@@ -84,13 +98,71 @@
         self.indent()
 
     def get_function(self):
-        result_dict = {}
-        source_text = self.get()
-        exec(compile(source_text.rstrip()+"\n", "<generated function %s>" % 
self.name, "exec"),
-                result_dict)
-        func = result_dict[self.name]
-        result_dict["_MODULE_SOURCE_CODE"] = source_text
-        return func
+        return self.get_module()[self.name]
+
+
+# {{{ pickling of binaries for generated code
+
+def _get_empty_module_dict():
+    result_dict = {}
+    exec(compile("", "<generated function>", "exec"), result_dict)
+    return result_dict
+
+
+_empty_module_dict = _get_empty_module_dict()
+
+
+class PicklableModule(object):
+    def __init__(self, mod_globals):
+        self.mod_globals = mod_globals
+
+    def __getstate__(self):
+        import marshal
+
+        nondefault_globals = {}
+        functions = {}
+
+        from types import FunctionType
+        for k, v in six.iteritems(self.mod_globals):
+            if isinstance(v, FunctionType):
+                functions[k] = (
+                        v.__name__,
+                        marshal.dumps(v.__code__),
+                        v.__defaults__)
+
+            elif k not in _empty_module_dict:
+                nondefault_globals[k] = v
+
+        import imp
+        return (0, imp.get_magic(), functions, nondefault_globals)
+
+    def __setstate__(self, obj):
+        v = obj[0]
+        if v == 0:
+            magic, functions, nondefault_globals = obj[1:]
+        else:
+            raise ValueError("unknown version of PicklableGeneratedFunction")
+
+        import imp
+        if magic != imp.get_magic():
+            raise ValueError("cannot unpickle function binary: "
+                    "incorrect magic value (got: %s, expected: %s)"
+                    % (magic, imp.get_magic()))
+
+        import marshal
+
+        mod_globals = _empty_module_dict.copy()
+        mod_globals.update(nondefault_globals)
+        self.mod_globals = mod_globals
+
+        from types import FunctionType
+        for k, v in six.iteritems(functions):
+            name, code_bytes, argdefs = v
+            f = FunctionType(
+                    marshal.loads(code_bytes), mod_globals, argdefs=argdefs)
+            mod_globals[k] = f
+
+# }}}
 
 
 # {{{ remove common indentation
@@ -120,3 +192,5 @@
     return "\n".join(line[base_indent:] for line in lines)
 
 # }}}
+
+# vim: foldmethod=marker
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/spatial_btree.py 
new/pytools-2017.3/pytools/spatial_btree.py
--- old/pytools-2016.1/pytools/spatial_btree.py 2015-10-28 08:31:20.000000000 
+0100
+++ new/pytools-2017.3/pytools/spatial_btree.py 2016-06-06 02:33:48.000000000 
+0200
@@ -85,9 +85,22 @@
         """
 
         def insert_into_subdivision(element, bbox):
-            for bucket in self.all_buckets:
-                if do_boxes_intersect((bucket.bottom_left, bucket.top_right), 
bbox):
-                    bucket.insert(element, bbox)
+            bucket_matches = [
+                ibucket
+                for ibucket, bucket in enumerate(self.all_buckets)
+                if do_boxes_intersect((bucket.bottom_left, bucket.top_right), 
bbox)]
+
+            from random import uniform
+            if len(bucket_matches) > len(self.all_buckets) // 2:
+                # Would go into more than half of all buckets--keep it here
+                self.elements.append((element, bbox))
+            elif len(bucket_matches) > 1 and uniform(0, 1) > 0.95:
+                # Would go into more than one bucket and therefore may recurse
+                # indefinitely. Keep it here with a low probability.
+                self.elements.append((element, bbox))
+            else:
+                for ibucket_match in bucket_matches:
+                    self.all_buckets[ibucket_match].insert(element, bbox)
 
         if self.buckets is None:
             # No subdivisions yet.
@@ -99,14 +112,13 @@
                         self.all_buckets,
                         max_elements_per_box=self.max_elements_per_box)
 
+                old_elements = self.elements
+                self.elements = []
+
                 # Move all elements from the full bucket into the new finer 
ones
-                for el, el_bbox in self.elements:
+                for el, el_bbox in old_elements:
                     insert_into_subdivision(el, el_bbox)
 
-                # Free up some memory. Elements are now stored in the
-                # subdivision, so we don't need them here any more.
-                del self.elements
-
                 insert_into_subdivision(element, bbox)
             else:
                 # Simple:
@@ -128,10 +140,10 @@
 
             for result in bucket.generate_matches(point):
                 yield result
-        else:
-            # We don't. Perform linear search.
-            for el, bbox in self.elements:
-                yield el
+
+        # Perform linear search.
+        for el, bbox in self.elements:
+            yield el
 
     def visualize(self, file):
         file.write("%f %f\n" % (self.bottom_left[0], self.bottom_left[1]))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools/stopwatch.py 
new/pytools-2017.3/pytools/stopwatch.py
--- old/pytools-2016.1/pytools/stopwatch.py     2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/pytools/stopwatch.py     2016-12-06 02:09:53.000000000 
+0100
@@ -1,12 +1,8 @@
-from __future__ import division
-from __future__ import absolute_import
-from __future__ import print_function
+from __future__ import division, absolute_import, print_function
 import time
 import pytools
 
 
-
-
 class StopWatch:
     def __init__(self):
         self.Elapsed = 0.
@@ -42,16 +38,14 @@
         JOB_TIMES[self.Name] += elapsed
         if self.is_visible():
             print(" " * (len(self.Name) + 2), elapsed, "seconds")
-  
+
     def is_visible(self):
         if PRINT_JOBS.get():
-            return not self.Name in HIDDEN_JOBS
+            return self.Name not in HIDDEN_JOBS
         else:
             return self.Name in VISIBLE_JOBS
 
 
-
-
 class EtaEstimator:
     def __init__(self, total_steps):
         self.stopwatch = StopWatch().start()
@@ -67,17 +61,11 @@
             return None
 
 
-
-
 def print_job_summary():
     for key in JOB_TIMES:
         print(key, " " * (50-len(key)), JOB_TIMES[key])
 
 
-
-
-
-
 HIDDEN_JOBS = []
 VISIBLE_JOBS = []
 JOB_TIMES = pytools.DictionaryWithDefault(lambda x: 0)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools.egg-info/PKG-INFO 
new/pytools-2017.3/pytools.egg-info/PKG-INFO
--- old/pytools-2016.1/pytools.egg-info/PKG-INFO        2016-01-14 
19:54:58.000000000 +0100
+++ new/pytools-2017.3/pytools.egg-info/PKG-INFO        2017-06-03 
20:04:28.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pytools
-Version: 2016.1
+Version: 2017.3
 Summary: A collection of tools for Python
 Home-page: http://pypi.python.org/pypi/pytools
 Author: Andreas Kloeckner
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/pytools.egg-info/SOURCES.txt 
new/pytools-2017.3/pytools.egg-info/SOURCES.txt
--- old/pytools-2016.1/pytools.egg-info/SOURCES.txt     2016-01-14 
19:54:58.000000000 +0100
+++ new/pytools-2017.3/pytools.egg-info/SOURCES.txt     2017-06-03 
20:04:28.000000000 +0200
@@ -1,3 +1,4 @@
+LICENSE
 MANIFEST.in
 README
 setup.cfg
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/setup.cfg new/pytools-2017.3/setup.cfg
--- old/pytools-2016.1/setup.cfg        2016-01-14 19:54:58.000000000 +0100
+++ new/pytools-2017.3/setup.cfg        2017-06-03 20:04:28.000000000 +0200
@@ -1,6 +1,7 @@
 [flake8]
-ignore = E126,E127,E128,E123,E226,E241,E242,E265,E402,W503
+ignore = E126,E127,E128,E123,E226,E241,E242,E265,E402,W503,E731
 max-line-length = 85
+exclude = pytools/arithmetic_container.py,pytools/decorator.py
 
 [wheel]
 universal = 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/setup.py new/pytools-2017.3/setup.py
--- old/pytools-2016.1/setup.py 2016-01-14 19:53:39.000000000 +0100
+++ new/pytools-2017.3/setup.py 2017-06-03 19:59:17.000000000 +0200
@@ -1,10 +1,10 @@
 #! /usr/bin/env python
-# -*- coding: latin1 -*-
+# -*- coding: utf-8 -*-
 
 from setuptools import setup
 
 setup(name="pytools",
-      version="2016.1",
+      version="2017.3",
       description="A collection of tools for Python",
       long_description="""
       Pytools is a big bag of things that are "missing" from the Python 
standard
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/test/test_math_stuff.py 
new/pytools-2017.3/test/test_math_stuff.py
--- old/pytools-2016.1/test/test_math_stuff.py  2015-10-17 21:26:02.000000000 
+0200
+++ new/pytools-2017.3/test/test_math_stuff.py  2016-12-06 02:09:53.000000000 
+0100
@@ -2,8 +2,6 @@
 from __future__ import absolute_import
 
 
-
-
 def test_variance():
     data = [4, 7, 13, 16]
 
@@ -12,7 +10,7 @@
         return ((
             sum(di**2 for di in data)
             - sum(data)**2/n)
-            /(n-1))
+            / (n-1))
 
     from pytools import variance
     orig_variance = variance(data, entire_pop=False)
@@ -21,4 +19,3 @@
 
     data = [1e9 + x for x in data]
     assert abs(variance(data, entire_pop=False) - orig_variance) < 1e-15
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/test/test_persistent_dict.py 
new/pytools-2017.3/test/test_persistent_dict.py
--- old/pytools-2016.1/test/test_persistent_dict.py     2015-10-17 
21:26:02.000000000 +0200
+++ new/pytools-2017.3/test/test_persistent_dict.py     2017-06-03 
16:44:02.000000000 +0200
@@ -1,5 +1,4 @@
-from __future__ import division, with_statement
-from __future__ import absolute_import
+from __future__ import division, with_statement, absolute_import
 
 import pytest  # noqa
 import sys  # noqa
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytools-2016.1/test/test_pytools.py 
new/pytools-2017.3/test/test_pytools.py
--- old/pytools-2016.1/test/test_pytools.py     2015-10-28 08:34:36.000000000 
+0100
+++ new/pytools-2017.3/test/test_pytools.py     2017-03-29 22:05:55.000000000 
+0200
@@ -2,6 +2,7 @@
 from __future__ import absolute_import
 
 import pytest
+import sys
 
 
 @pytest.mark.skipif("sys.version_info < (2, 5)")
@@ -131,6 +132,7 @@
 
 @pytest.mark.parametrize("dims", [2, 3])
 def test_spatial_btree(dims, do_plot=False):
+    pytest.importorskip("numpy")
     import numpy as np
     nparticles = 2000
     x = -1 + 2*np.random.rand(dims, nparticles)
@@ -154,11 +156,46 @@
         pt.show()
 
 
-if __name__ == "__main__":
-    # make sure that import failures get reported, instead of skipping the 
tests.
-    import pyopencl  # noqa
+def test_diskdict():
+    if sys.platform.startswith("win"):
+        pytest.xfail("unreliable on windows")
+
+    from pytools.diskdict import DiskDict
+
+    from tempfile import NamedTemporaryFile
+
+    with NamedTemporaryFile() as ntf:
+        d = DiskDict(ntf.name)
+
+        key_val = [
+            ((), "hi"),
+            (frozenset([1, 2, "hi"]), 5)
+            ]
+
+        for k, v in key_val:
+            d[k] = v
+        for k, v in key_val:
+            assert d[k] == v
+        del d
+
+        d = DiskDict(ntf.name)
+        for k, v in key_val:
+            del d[k]
+        del d
+
+        d = DiskDict(ntf.name)
+        for k, v in key_val:
+            d[k] = v
+        del d
+
+        d = DiskDict(ntf.name)
+        for k, v in key_val:
+            assert k in d
+            assert d[k] == v
+        del d
 
-    import sys
+
+if __name__ == "__main__":
     if len(sys.argv) > 1:
         exec(sys.argv[1])
     else:


Reply via email to