Hello community,

here is the log from the commit of package python-cachetools for 
openSUSE:Factory checked in at 2019-02-28 21:44:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-cachetools (Old)
 and      /work/SRC/openSUSE:Factory/.python-cachetools.new.28833 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-cachetools"

Thu Feb 28 21:44:24 2019 rev:6 rq:679937 version:3.1.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-cachetools/python-cachetools.changes      
2018-12-13 19:40:54.333276811 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-cachetools.new.28833/python-cachetools.changes
   2019-02-28 21:44:26.869498339 +0100
@@ -1,0 +2,14 @@
+Wed Feb 27 18:17:13 UTC 2019 - Thomas Bechtold <tbecht...@suse.com>
+
+- update to 3.1.0:
+  - Fix Python 3.8 compatibility issue.
+  - Use ``time.monotonic`` as default timer if available.
+  - Improve documentation regarding thread safety.
+  - Officially support Python 3.7.
+  - Drop Python 3.3 support (breaking change).
+  - Remove ``missing`` cache constructor parameter (breaking change).
+  - Remove ``self`` from ``@cachedmethod`` key arguments (breaking
+    change).
+  - Add support for ``maxsize=None`` in ``cachetools.func`` decorators.
+
+-------------------------------------------------------------------

Old:
----
  cachetools-2.0.1.tar.gz

New:
----
  cachetools-3.1.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-cachetools.spec ++++++
--- /var/tmp/diff_new_pack.6dRJLh/_old  2019-02-28 21:44:28.085497820 +0100
+++ /var/tmp/diff_new_pack.6dRJLh/_new  2019-02-28 21:44:28.109497809 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-cachetools
 #
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 %bcond_without  test
 Name:           python-cachetools
-Version:        2.0.1
+Version:        3.1.0
 Release:        0
 Summary:        Extensible memoizing collections and decorators
 License:        MIT

++++++ cachetools-2.0.1.tar.gz -> cachetools-3.1.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/CHANGES.rst 
new/cachetools-3.1.0/CHANGES.rst
--- old/cachetools-2.0.1/CHANGES.rst    2017-08-11 18:53:31.000000000 +0200
+++ new/cachetools-3.1.0/CHANGES.rst    2019-01-29 21:33:24.000000000 +0100
@@ -1,3 +1,40 @@
+v3.1.0 (2019-01-29)
+-------------------
+
+- Fix Python 3.8 compatibility issue.
+
+- Use ``time.monotonic`` as default timer if available.
+
+- Improve documentation regarding thread safety.
+
+
+v3.0.0 (2018-11-04)
+-------------------
+
+- Officially support Python 3.7.
+
+- Drop Python 3.3 support (breaking change).
+
+- Remove ``missing`` cache constructor parameter (breaking change).
+
+- Remove ``self`` from ``@cachedmethod`` key arguments (breaking
+  change).
+
+- Add support for ``maxsize=None`` in ``cachetools.func`` decorators.
+
+
+v2.1.0 (2018-05-12)
+-------------------
+
+- Deprecate ``missing`` cache constructor parameter.
+
+- Handle overridden ``getsizeof()`` method in subclasses.
+
+- Fix Python 2.7 ``RRCache`` pickling issues.
+
+- Various documentation improvements.
+
+
 v2.0.1 (2017-08-11)
 -------------------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/LICENSE new/cachetools-3.1.0/LICENSE
--- old/cachetools-2.0.1/LICENSE        2017-08-11 18:23:50.000000000 +0200
+++ new/cachetools-3.1.0/LICENSE        2019-01-29 21:31:04.000000000 +0100
@@ -1,6 +1,6 @@
 The MIT License (MIT)
 
-Copyright (c) 2014-2017 Thomas Kemmer
+Copyright (c) 2014-2019 Thomas Kemmer
 
 Permission is hereby granted, free of charge, to any person obtaining a copy of
 this software and associated documentation files (the "Software"), to deal in
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/PKG-INFO 
new/cachetools-3.1.0/PKG-INFO
--- old/cachetools-2.0.1/PKG-INFO       2017-08-11 18:54:22.000000000 +0200
+++ new/cachetools-3.1.0/PKG-INFO       2019-01-29 21:46:18.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cachetools
-Version: 2.0.1
+Version: 3.1.0
 Summary: Extensible memoizing collections and decorators
 Home-page: https://github.com/tkem/cachetools
 Author: Thomas Kemmer
@@ -13,21 +13,26 @@
         including variants of the Python 3 Standard Library `@lru_cache`_
         function decorator.
         
-        .. code-block:: pycon
+        .. code-block:: python
         
-           >>> from cachetools import LRUCache
-           >>> cache = LRUCache(maxsize=2)
-           >>> cache.update([('first', 1), ('second', 2)])
-           >>> cache
-           LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
-           >>> cache['third'] = 3
-           >>> cache
-           LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
-           >>> cache['second']
-           2
-           >>> cache['fourth'] = 4
-           >>> cache
-           LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+           from cachetools import cached, LRUCache, TTLCache
+        
+           # speed up calculating Fibonacci numbers with dynamic programming
+           @cached(cache={})
+           def fib(n):
+               return n if n < 2 else fib(n - 1) + fib(n - 2)
+        
+           # cache least recently used Python Enhancement Proposals
+           @cached(cache=LRUCache(maxsize=32))
+           def get_pep(num):
+               url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+               with urllib.request.urlopen(url) as s:
+                   return s.read()
+        
+           # cache weather data for no longer than ten minutes
+           @cached(cache=TTLCache(maxsize=1024, ttl=600))
+           def get_weather(place):
+               return owm.weather_at_place(place).get_weather()
         
         For the purpose of this module, a *cache* is a mutable_ mapping_ of a
         fixed maximum size.  When the cache is full, i.e. by adding another
@@ -81,7 +86,7 @@
         License
         
------------------------------------------------------------------------
         
-        Copyright (c) 2014-2017 Thomas Kemmer.
+        Copyright (c) 2014-2019 Thomas Kemmer.
         
         Licensed under the `MIT License`_.
         
@@ -108,8 +113,10 @@
 Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/README.rst 
new/cachetools-3.1.0/README.rst
--- old/cachetools-2.0.1/README.rst     2017-08-11 18:35:38.000000000 +0200
+++ new/cachetools-3.1.0/README.rst     2019-01-29 21:31:04.000000000 +0100
@@ -5,21 +5,26 @@
 including variants of the Python 3 Standard Library `@lru_cache`_
 function decorator.
 
-.. code-block:: pycon
+.. code-block:: python
 
-   >>> from cachetools import LRUCache
-   >>> cache = LRUCache(maxsize=2)
-   >>> cache.update([('first', 1), ('second', 2)])
-   >>> cache
-   LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
-   >>> cache['third'] = 3
-   >>> cache
-   LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
-   >>> cache['second']
-   2
-   >>> cache['fourth'] = 4
-   >>> cache
-   LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+   from cachetools import cached, LRUCache, TTLCache
+
+   # speed up calculating Fibonacci numbers with dynamic programming
+   @cached(cache={})
+   def fib(n):
+       return n if n < 2 else fib(n - 1) + fib(n - 2)
+
+   # cache least recently used Python Enhancement Proposals
+   @cached(cache=LRUCache(maxsize=32))
+   def get_pep(num):
+       url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+       with urllib.request.urlopen(url) as s:
+           return s.read()
+
+   # cache weather data for no longer than ten minutes
+   @cached(cache=TTLCache(maxsize=1024, ttl=600))
+   def get_weather(place):
+       return owm.weather_at_place(place).get_weather()
 
 For the purpose of this module, a *cache* is a mutable_ mapping_ of a
 fixed maximum size.  When the cache is full, i.e. by adding another
@@ -73,7 +78,7 @@
 License
 ------------------------------------------------------------------------
 
-Copyright (c) 2014-2017 Thomas Kemmer.
+Copyright (c) 2014-2019 Thomas Kemmer.
 
 Licensed under the `MIT License`_.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/__init__.py 
new/cachetools-3.1.0/cachetools/__init__.py
--- old/cachetools-2.0.1/cachetools/__init__.py 2017-08-11 18:53:31.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/__init__.py 2019-01-29 21:31:04.000000000 
+0100
@@ -16,7 +16,7 @@
     'cached', 'cachedmethod'
 )
 
-__version__ = '2.0.1'
+__version__ = '3.1.0'
 
 if hasattr(functools.update_wrapper(lambda f: f(), lambda: 42), '__wrapped__'):
     _update_wrapper = functools.update_wrapper
@@ -79,7 +79,7 @@
                 c = cache(self)
                 if c is None:
                     return method(self, *args, **kwargs)
-                k = key(self, *args, **kwargs)
+                k = key(*args, **kwargs)
                 try:
                     return c[k]
                 except KeyError:
@@ -95,7 +95,7 @@
                 c = cache(self)
                 if c is None:
                     return method(self, *args, **kwargs)
-                k = key(self, *args, **kwargs)
+                k = key(*args, **kwargs)
                 try:
                     with lock(self):
                         return c[k]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/abc.py 
new/cachetools-3.1.0/cachetools/abc.py
--- old/cachetools-2.0.1/cachetools/abc.py      2017-08-11 18:23:50.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/abc.py      2019-01-29 21:31:04.000000000 
+0100
@@ -1,11 +1,14 @@
 from __future__ import absolute_import
 
-import collections
-
 from abc import abstractmethod
 
+try:
+    from collections.abc import MutableMapping
+except ImportError:
+    from collections import MutableMapping
+
 
-class DefaultMapping(collections.MutableMapping):
+class DefaultMapping(MutableMapping):
 
     __slots__ = ()
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/cache.py 
new/cachetools-3.1.0/cachetools/cache.py
--- old/cachetools-2.0.1/cachetools/cache.py    2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/cache.py    2018-11-04 20:58:27.000000000 
+0100
@@ -19,11 +19,10 @@
 
     __size = _DefaultSize()
 
-    def __init__(self, maxsize, missing=None, getsizeof=None):
-        if missing:
-            self.__missing = missing
+    def __init__(self, maxsize, getsizeof=None):
         if getsizeof:
-            self.__getsizeof = getsizeof
+            self.getsizeof = getsizeof
+        if self.getsizeof is not Cache.getsizeof:
             self.__size = dict()
         self.__data = dict()
         self.__currsize = 0
@@ -68,12 +67,7 @@
         return key in self.__data
 
     def __missing__(self, key):
-        value = self.__missing(key)
-        try:
-            self.__setitem__(key, value)
-        except ValueError:
-            pass  # value too large
-        return value
+        raise KeyError(key)
 
     def __iter__(self):
         return iter(self.__data)
@@ -81,14 +75,6 @@
     def __len__(self):
         return len(self.__data)
 
-    @staticmethod
-    def __getsizeof(value):
-        return 1
-
-    @staticmethod
-    def __missing(key):
-        raise KeyError(key)
-
     @property
     def maxsize(self):
         """The maximum size of the cache."""
@@ -99,6 +85,7 @@
         """The current size of the cache."""
         return self.__currsize
 
-    def getsizeof(self, value):
+    @staticmethod
+    def getsizeof(value):
         """Return the size of a cache element's value."""
-        return self.__getsizeof(value)
+        return 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/func.py 
new/cachetools-3.1.0/cachetools/func.py
--- old/cachetools-2.0.1/cachetools/func.py     2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/func.py     2019-01-29 21:31:04.000000000 
+0100
@@ -5,11 +5,15 @@
 import collections
 import functools
 import random
-import time
 
 try:
-    from threading import RLock
+    from time import monotonic as default_timer
 except ImportError:
+    from time import time as default_timer
+
+try:
+    from threading import RLock
+except ImportError:  # pragma: no cover
     from dummy_threading import RLock
 
 from . import keys
@@ -26,6 +30,24 @@
 ])
 
 
+class _UnboundCache(dict):
+
+    maxsize = None
+
+    @property
+    def currsize(self):
+        return len(self)
+
+
+class _UnboundTTLCache(TTLCache):
+    def __init__(self, ttl, timer):
+        TTLCache.__init__(self, float('inf'), ttl, timer)
+
+    @property
+    def maxsize(self):
+        return None
+
+
 def _cache(cache, typed=False):
     def decorator(func):
         key = keys.typedkey if typed else keys.hashkey
@@ -77,7 +99,10 @@
     algorithm.
 
     """
-    return _cache(LFUCache(maxsize), typed)
+    if maxsize is None:
+        return _cache(_UnboundCache(), typed)
+    else:
+        return _cache(LFUCache(maxsize), typed)
 
 
 def lru_cache(maxsize=128, typed=False):
@@ -86,7 +111,10 @@
     algorithm.
 
     """
-    return _cache(LRUCache(maxsize), typed)
+    if maxsize is None:
+        return _cache(_UnboundCache(), typed)
+    else:
+        return _cache(LRUCache(maxsize), typed)
 
 
 def rr_cache(maxsize=128, choice=random.choice, typed=False):
@@ -95,12 +123,18 @@
     algorithm.
 
     """
-    return _cache(RRCache(maxsize, choice), typed)
+    if maxsize is None:
+        return _cache(_UnboundCache(), typed)
+    else:
+        return _cache(RRCache(maxsize, choice), typed)
 
 
-def ttl_cache(maxsize=128, ttl=600, timer=time.time, typed=False):
+def ttl_cache(maxsize=128, ttl=600, timer=default_timer, typed=False):
     """Decorator to wrap a function with a memoizing callable that saves
     up to `maxsize` results based on a Least Recently Used (LRU)
     algorithm with a per-item time-to-live (TTL) value.
     """
-    return _cache(TTLCache(maxsize, ttl, timer), typed)
+    if maxsize is None:
+        return _cache(_UnboundTTLCache(ttl, timer), typed)
+    else:
+        return _cache(TTLCache(maxsize, ttl, timer), typed)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/lfu.py 
new/cachetools-3.1.0/cachetools/lfu.py
--- old/cachetools-2.0.1/cachetools/lfu.py      2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/lfu.py      2018-11-04 20:58:27.000000000 
+0100
@@ -8,8 +8,8 @@
 class LFUCache(Cache):
     """Least Frequently Used (LFU) cache implementation."""
 
-    def __init__(self, maxsize, missing=None, getsizeof=None):
-        Cache.__init__(self, maxsize, missing, getsizeof)
+    def __init__(self, maxsize, getsizeof=None):
+        Cache.__init__(self, maxsize, getsizeof)
         self.__counter = collections.Counter()
 
     def __getitem__(self, key, cache_getitem=Cache.__getitem__):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/lru.py 
new/cachetools-3.1.0/cachetools/lru.py
--- old/cachetools-2.0.1/cachetools/lru.py      2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/lru.py      2018-11-04 20:58:27.000000000 
+0100
@@ -8,8 +8,8 @@
 class LRUCache(Cache):
     """Least Recently Used (LRU) cache implementation."""
 
-    def __init__(self, maxsize, missing=None, getsizeof=None):
-        Cache.__init__(self, maxsize, missing, getsizeof)
+    def __init__(self, maxsize, getsizeof=None):
+        Cache.__init__(self, maxsize, getsizeof)
         self.__order = collections.OrderedDict()
 
     def __getitem__(self, key, cache_getitem=Cache.__getitem__):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/rr.py 
new/cachetools-3.1.0/cachetools/rr.py
--- old/cachetools-2.0.1/cachetools/rr.py       2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/rr.py       2018-11-04 20:58:27.000000000 
+0100
@@ -5,13 +5,21 @@
 from .cache import Cache
 
 
+# random.choice cannot be pickled in Python 2.7
+def _choice(seq):
+    return random.choice(seq)
+
+
 class RRCache(Cache):
     """Random Replacement (RR) cache implementation."""
 
-    def __init__(self, maxsize, choice=random.choice, missing=None,
-                 getsizeof=None):
-        Cache.__init__(self, maxsize, missing, getsizeof)
-        self.__choice = choice
+    def __init__(self, maxsize, choice=random.choice, getsizeof=None):
+        Cache.__init__(self, maxsize, getsizeof)
+        # TODO: use None as default, assing to self.choice directly?
+        if choice is random.choice:
+            self.__choice = _choice
+        else:
+            self.__choice = choice
 
     @property
     def choice(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools/ttl.py 
new/cachetools-3.1.0/cachetools/ttl.py
--- old/cachetools-2.0.1/cachetools/ttl.py      2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/cachetools/ttl.py      2019-01-29 21:31:04.000000000 
+0100
@@ -1,7 +1,11 @@
 from __future__ import absolute_import
 
 import collections
-import time
+
+try:
+    from time import monotonic as default_timer
+except ImportError:
+    from time import time as default_timer
 
 from .cache import Cache
 
@@ -57,9 +61,8 @@
 class TTLCache(Cache):
     """LRU Cache implementation with per-item time-to-live (TTL) value."""
 
-    def __init__(self, maxsize, ttl, timer=time.time, missing=None,
-                 getsizeof=None):
-        Cache.__init__(self, maxsize, missing, getsizeof)
+    def __init__(self, maxsize, ttl, timer=default_timer, getsizeof=None):
+        Cache.__init__(self, maxsize, getsizeof)
         self.__root = root = _Link()
         root.prev = root.next = root
         self.__links = collections.OrderedDict()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/cachetools.egg-info/PKG-INFO 
new/cachetools-3.1.0/cachetools.egg-info/PKG-INFO
--- old/cachetools-2.0.1/cachetools.egg-info/PKG-INFO   2017-08-11 
18:54:22.000000000 +0200
+++ new/cachetools-3.1.0/cachetools.egg-info/PKG-INFO   2019-01-29 
21:46:18.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cachetools
-Version: 2.0.1
+Version: 3.1.0
 Summary: Extensible memoizing collections and decorators
 Home-page: https://github.com/tkem/cachetools
 Author: Thomas Kemmer
@@ -13,21 +13,26 @@
         including variants of the Python 3 Standard Library `@lru_cache`_
         function decorator.
         
-        .. code-block:: pycon
+        .. code-block:: python
         
-           >>> from cachetools import LRUCache
-           >>> cache = LRUCache(maxsize=2)
-           >>> cache.update([('first', 1), ('second', 2)])
-           >>> cache
-           LRUCache([('second', 2), ('first', 1)], maxsize=2, currsize=2)
-           >>> cache['third'] = 3
-           >>> cache
-           LRUCache([('second', 2), ('third', 3)], maxsize=2, currsize=2)
-           >>> cache['second']
-           2
-           >>> cache['fourth'] = 4
-           >>> cache
-           LRUCache([('second', 2), ('fourth', 4)], maxsize=2, currsize=2)
+           from cachetools import cached, LRUCache, TTLCache
+        
+           # speed up calculating Fibonacci numbers with dynamic programming
+           @cached(cache={})
+           def fib(n):
+               return n if n < 2 else fib(n - 1) + fib(n - 2)
+        
+           # cache least recently used Python Enhancement Proposals
+           @cached(cache=LRUCache(maxsize=32))
+           def get_pep(num):
+               url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+               with urllib.request.urlopen(url) as s:
+                   return s.read()
+        
+           # cache weather data for no longer than ten minutes
+           @cached(cache=TTLCache(maxsize=1024, ttl=600))
+           def get_weather(place):
+               return owm.weather_at_place(place).get_weather()
         
         For the purpose of this module, a *cache* is a mutable_ mapping_ of a
         fixed maximum size.  When the cache is full, i.e. by adding another
@@ -81,7 +86,7 @@
         License
         
------------------------------------------------------------------------
         
-        Copyright (c) 2014-2017 Thomas Kemmer.
+        Copyright (c) 2014-2019 Thomas Kemmer.
         
         Licensed under the `MIT License`_.
         
@@ -108,8 +113,10 @@
 Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/docs/conf.py 
new/cachetools-3.1.0/docs/conf.py
--- old/cachetools-2.0.1/docs/conf.py   2017-08-11 18:23:50.000000000 +0200
+++ new/cachetools-3.1.0/docs/conf.py   2019-01-29 21:31:04.000000000 +0100
@@ -6,7 +6,7 @@
 
 
 project = 'cachetools'
-copyright = '2014-2017 Thomas Kemmer'
+copyright = '2014-2019 Thomas Kemmer'
 version = get_version(b'../cachetools/__init__.py')
 release = version
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/docs/index.rst 
new/cachetools-3.1.0/docs/index.rst
--- old/cachetools-2.0.1/docs/index.rst 2017-08-11 18:23:50.000000000 +0200
+++ new/cachetools-3.1.0/docs/index.rst 2019-01-29 21:31:04.000000000 +0100
@@ -22,6 +22,15 @@
 calls are provided, too.
 
 
+.. testsetup:: *
+
+   import operator
+   from cachetools import cached, cachedmethod, LRUCache
+
+   import mock
+   urllib = mock.MagicMock()
+
+
 Cache implementations
 ------------------------------------------------------------------------
 
@@ -30,37 +39,9 @@
 :class:`Cache`, which in turn derives from
 :class:`collections.MutableMapping`, and provide :attr:`maxsize` and
 :attr:`currsize` properties to retrieve the maximum and current size
-of the cache.  When a cache is full, :meth:`setitem` calls
-:meth:`popitem` repeatedly until there is enough room for the item to
-be added.
-
-All cache classes accept an optional `missing` keyword argument in
-their constructor, which can be used to provide a default *factory
-function*.  If the key `key` is not present, the ``cache[key]``
-operation calls :meth:`Cache.__missing__`, which in turn calls
-`missing` with `key` as its sole argument.  The cache will then store
-the object returned from ``missing(key)`` as the new cache value for
-`key`, possibly discarding other items if the cache is full.  This may
-be used to provide memoization for existing single-argument functions::
-
-    from cachetools import LRUCache
-    import urllib.request
-
-    def get_pep(num):
-        """Retrieve text of a Python Enhancement Proposal"""
-        url = 'http://www.python.org/dev/peps/pep-%04d/' % num
-        with urllib.request.urlopen(url) as s:
-            return s.read()
-
-    cache = LRUCache(maxsize=4, missing=get_pep)
-
-    for n in 8, 9, 290, 308, 320, 8, 218, 320, 279, 289, 320, 9991:
-        try:
-            print(n, len(cache[n]))
-        except urllib.error.HTTPError:
-            print(n, 'Not Found')
-    print(sorted(cache.keys()))
-
+of the cache.  When a cache is full, :meth:`Cache.__setitem__()` calls
+:meth:`self.popitem()` repeatedly until there is enough room for the
+item to be added.
 
 :class:`Cache` also features a :meth:`getsizeof` method, which returns
 the size of a given `value`.  The default implementation of
@@ -70,7 +51,14 @@
 named constructor parameter `getsizeof`, which may specify a function
 of one argument used to retrieve the size of an item's value.
 
-.. autoclass:: Cache
+.. note::
+
+   Please be aware that all these classes are *not* thread-safe.
+   Access to a shared cache from multiple threads must be properly
+   synchronized, e.g. by using one of the memoizing decorators with a
+   suitable `lock` object.
+
+.. autoclass:: Cache(maxsize, getsizeof=None)
    :members:
 
    This class discards arbitrary items using :meth:`popitem` to make
@@ -78,25 +66,21 @@
    to implement specific caching strategies.  If a subclass has to
    keep track of item access, insertion or deletion, it may
    additionally need to override :meth:`__getitem__`,
-   :meth:`__setitem__` and :meth:`__delitem__`.  If a subclass wants
-   to store meta data with its values, i.e. the `value` argument
-   passed to :meth:`Cache.__setitem__` is different from what the
-   derived class's :meth:`__setitem__` received, it will probably need
-   to override :meth:`getsizeof`, too.
+   :meth:`__setitem__` and :meth:`__delitem__`.
 
-.. autoclass:: LFUCache
+.. autoclass:: LFUCache(maxsize, getsizeof=None)
    :members:
 
    This class counts how often an item is retrieved, and discards the
    items used least often to make space when necessary.
 
-.. autoclass:: LRUCache
+.. autoclass:: LRUCache(maxsize, getsizeof=None)
    :members:
 
    This class discards the least recently used items first to make
    space when necessary.
 
-.. autoclass:: RRCache(maxsize, choice=random.choice, missing=None, 
getsizeof=None)
+.. autoclass:: RRCache(maxsize, choice=random.choice, getsizeof=None)
    :members:
 
    This class randomly selects candidate items and discards them to
@@ -107,32 +91,86 @@
    an alternative function that returns an arbitrary element from a
    non-empty sequence.
 
-.. autoclass:: TTLCache(maxsize, ttl, timer=time.time, missing=None, 
getsizeof=None)
-   :members:
-   :exclude-members: expire
+.. autoclass:: TTLCache(maxsize, ttl, timer=time.monotonic, getsizeof=None)
+   :members: popitem, timer, ttl
 
    This class associates a time-to-live value with each item.  Items
    that expire because they have exceeded their time-to-live will be
-   removed automatically.  If no expired items are there to remove,
-   the least recently used items will be discarded first to make space
-   when necessary.  Trying to access an expired item will raise a
-   :exc:`KeyError`.
-
-   By default, the time-to-live is specified in seconds, and the
-   :func:`time.time` function is used to retrieve the current time.  A
-   custom `timer` function can be supplied if needed.
-
-   .. automethod:: expire(self, time=None)
-
-      Since expired items will be "physically" removed from a cache
-      only at the next mutating operation, e.g. :meth:`__setitem__` or
-      :meth:`__delitem__`, to avoid changing the underlying dictionary
-      while iterating over it, expired items may still claim memory
-      although they are no longer accessible.  Calling this method
-      removes all items whose time-to-live would have expired by
-      `time`, so garbage collection is free to reuse their memory.  If
-      `time` is :const:`None`, this removes all items that have
-      expired by the current value returned by :attr:`timer`.
+   no longer accessible, and will be removed eventually.  If no
+   expired items are there to remove, the least recently used items
+   will be discarded first to make space when necessary.
+
+   By default, the time-to-live is specified in seconds and
+   :func:`time.monotonic` is used to retrieve the current time.  If
+   :func:`time.monotonic` is not available, e.g. when running Python
+   2.7, :func:`time.time` will be used.  A custom `timer` function can
+   be supplied if needed.
+
+   .. method:: expire(self, time=None)
+
+      Expired items will be removed from a cache only at the next
+      mutating operation, e.g. :meth:`__setitem__` or
+      :meth:`__delitem__`, and therefore may still claim memory.
+      Calling this method removes all items whose time-to-live would
+      have expired by `time`, so garbage collection is free to reuse
+      their memory.  If `time` is :const:`None`, this removes all
+      items that have expired by the current value returned by
+      :attr:`timer`.
+
+
+Extending cache classes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Sometimes it may be desirable to notice when and what cache items are
+evicted, i.e. removed from a cache to make room for new items.  Since
+all cache implementations call :meth:`popitem` to evict items from the
+cache, this can be achieved by overriding this method in a subclass:
+
+.. doctest::
+   :pyversion: >= 3
+
+   >>> class MyCache(LRUCache):
+   ...     def popitem(self):
+   ...         key, value = super().popitem()
+   ...         print('Key "%s" evicted with value "%s"' % (key, value))
+   ...         return key, value
+
+   >>> c = MyCache(maxsize=2)
+   >>> c['a'] = 1
+   >>> c['b'] = 2
+   >>> c['c'] = 3
+   Key "a" evicted with value "1"
+
+Similar to the standard library's :class:`collections.defaultdict`,
+subclasses of :class:`Cache` may implement a :meth:`__missing__`
+method which is called by :meth:`Cache.__getitem__` if the requested
+key is not found:
+
+.. doctest::
+   :pyversion: >= 3
+
+   >>> class PepStore(LRUCache):
+   ...     def __missing__(self, key):
+   ...         """Retrieve text of a Python Enhancement Proposal"""
+   ...         url = 'http://www.python.org/dev/peps/pep-%04d/' % key
+   ...         try:
+   ...             with urllib.request.urlopen(url) as s:
+   ...                 pep = s.read()
+   ...                 self[key] = pep  # store text in cache
+   ...                 return pep
+   ...         except urllib.error.HTTPError:
+   ...             return 'Not Found'  # do not store in cache
+
+   >>> peps = PepStore(maxsize=4)
+   >>> for n in 8, 9, 290, 308, 320, 8, 218, 320, 279, 289, 320:
+   ...     pep = peps[n]
+   >>> print(sorted(peps.keys()))
+   [218, 279, 289, 320]
+
+Note, though, that such a class does not really behave like a *cache*
+any more, and will lead to surprising results when used with any of
+the memoizing decorators described below.  However, it may be useful
+in its own right.
 
 
 Memoizing decorators
@@ -140,16 +178,17 @@
 
 The :mod:`cachetools` module provides decorators for memoizing
 function and method calls.  This can save time when a function is
-often called with the same arguments::
+often called with the same arguments:
 
-  from cachetools import cached
+.. doctest::
 
-  @cached(cache={})
-  def fib(n):
-      return n if n < 2 else fib(n - 1) + fib(n - 2)
+   >>> @cached(cache={})
+   ... def fib(n):
+   ...     'Compute the nth number in the Fibonacci sequence'
+   ...     return n if n < 2 else fib(n - 1) + fib(n - 2)
 
-  for i in range(100):
-      print('fib(%d) = %d' % (i, fib(i)))
+   >>> fib(42)
+   267914296
 
 .. decorator:: cached(cache, key=cachetools.keys.hashkey, lock=None)
 
@@ -189,44 +228,56 @@
    cache during runtime, the cache should be assigned to a variable.
    When a `lock` object is used, any access to the cache from outside
    the function wrapper should also be performed within an appropriate
-   `with` statement::
+   `with` statement:
+
+   .. testcode::
 
-     from threading import RLock
-     from cachetools import cached, LRUCache
+      from threading import RLock
 
-     cache = LRUCache(maxsize=100)
-     lock = RLock()
+      cache = LRUCache(maxsize=32)
+      lock = RLock()
 
-     @cached(cache, lock=lock)
-     def fib(n):
-         return n if n < 2 else fib(n - 1) + fib(n - 2)
+      @cached(cache, lock=lock)
+      def get_pep(num):
+          'Retrieve text of a Python Enhancement Proposal'
+          url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+          with urllib.request.urlopen(url) as s:
+              return s.read()
 
-     # make sure access to cache is synchronized
-     with lock:
-         cache.clear()
+      # make sure access to cache is synchronized
+      with lock:
+          cache.clear()
 
    It is also possible to use a single shared cache object with
    multiple functions.  However, care must be taken that different
    cache keys are generated for each function, even for identical
-   function arguments::
+   function arguments:
 
-     from functools import partial
-     from cachetools import cached, LRUCache
-     from cachetools.keys import hashkey
+   .. doctest::
+      :options: +ELLIPSIS
 
-     cache = LRUCache(maxsize=100)
+      >>> from cachetools.keys import hashkey
+      >>> from functools import partial
 
-     @cached(cache, key=partial(hashkey, 'fib'))
-     def fib(n):
-         return n if n < 2 else fib(n - 1) + fib(n - 2)
-
-     @cached(cache, key=partial(hashkey, 'fac'))
-     def fac(n):
-         return 1 if n == 0 else n * fac(n - 1)
-
-     print(fib(42))
-     print(fac(42))
-     print(cache)
+      >>> # shared cache for integer sequences
+      >>> numcache = {}
+
+      >>> # compute Fibonacci numbers
+      >>> @cached(numcache, key=partial(hashkey, 'fib'))
+      ... def fib(n):
+      ...    return n if n < 2 else fib(n - 1) + fib(n - 2)
+
+      >>> # compute Lucas numbers
+      >>> @cached(numcache, key=partial(hashkey, 'luc'))
+      ... def luc(n):
+      ...    return 2 - n if n < 2 else luc(n - 1) + luc(n - 2)
+
+      >>> fib(42)
+      267914296
+      >>> luc(42)
+      599074578
+      >>> list(sorted(numcache.items()))
+      [..., (('fib', 42), 267914296), ..., (('luc', 42), 599074578)]
 
 .. decorator:: cachedmethod(cache, key=cachetools.keys.hashkey, lock=None)
 
@@ -248,27 +299,30 @@
 
    One advantage of :func:`cachedmethod` over the :func:`cached`
    function decorator is that cache properties such as `maxsize` can
-   be set at runtime::
+   be set at runtime:
+
+   .. testcode::
 
-     import operator
-     import urllib.request
+      class CachedPEPs(object):
 
-     from cachetools import LRUCache, cachedmethod
+          def __init__(self, cachesize):
+              self.cache = LRUCache(maxsize=cachesize)
 
-     class CachedPEPs(object):
+          @cachedmethod(operator.attrgetter('cache'))
+          def get(self, num):
+              """Retrieve text of a Python Enhancement Proposal"""
+              url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+              with urllib.request.urlopen(url) as s:
+                  return s.read()
 
-         def __init__(self, cachesize):
-             self.cache = LRUCache(maxsize=cachesize)
+      peps = CachedPEPs(cachesize=10)
+      print("PEP #1: %s" % peps.get(1))
 
-         @cachedmethod(operator.attrgetter('cache'))
-         def get(self, num):
-             """Retrieve text of a Python Enhancement Proposal"""
-             url = 'http://www.python.org/dev/peps/pep-%04d/' % num
-             with urllib.request.urlopen(url) as s:
-                 return s.read()
+   .. testoutput::
+      :hide:
+      :options: +ELLIPSIS
 
-     peps = CachedPEPs(cachesize=10)
-     print("PEP #1: %s" % peps.get(1))
+      PEP #1: ...
 
 
 :mod:`cachetools.keys` --- Key functions for memoizing decorators
@@ -306,7 +360,7 @@
 
   def envkey(*args, env={}, **kwargs):
       key = hashkey(*args, **kwargs)
-      key += tuple(env.items())
+      key += tuple(sorted(env.items()))
       return key
 
 The :func:`envkey` function can then be used in decorator declarations
@@ -324,9 +378,9 @@
 this module provides several memoizing function decorators with a
 similar API.  All these decorators wrap a function with a memoizing
 callable that saves up to the `maxsize` most recent calls, using
-different caching strategies.  Note that unlike
-:func:`functools.lru_cache`, setting `maxsize` to :const:`None` is not
-supported.
+different caching strategies.  If `maxsize` is set to :const:`None`,
+the caching strategy is effectively disabled and the cache can grow
+without bound.
 
 If the optional argument `typed` is set to :const:`True`, function
 arguments of different types will be cached separately.  For example,
@@ -335,8 +389,9 @@
 
 The wrapped function is instrumented with :func:`cache_info` and
 :func:`cache_clear` functions to provide information about cache
-performance and clear the cache.  See the :func:`functools.lru_cache`
-documentation for details.
+performance and clear the cache.  Please see the
+:func:`functools.lru_cache` documentation for details.  Also note that
+all the decorators in this module are thread-safe by default.
 
 .. decorator:: lfu_cache(maxsize=128, typed=False)
 
@@ -356,7 +411,7 @@
    saves up to `maxsize` results based on a Random Replacement (RR)
    algorithm.
 
-.. decorator:: ttl_cache(maxsize=128, ttl=600, timer=time.time, typed=False)
+.. decorator:: ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, 
typed=False)
 
    Decorator to wrap a function with a memoizing callable that saves
    up to `maxsize` results based on a Least Recently Used (LRU)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/setup.py 
new/cachetools-3.1.0/setup.py
--- old/cachetools-2.0.1/setup.py       2017-08-11 18:41:40.000000000 +0200
+++ new/cachetools-3.1.0/setup.py       2018-11-04 20:58:27.000000000 +0100
@@ -29,10 +29,12 @@
         'Programming Language :: Python :: 2',
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.3',
         'Programming Language :: Python :: 3.4',
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
+        'Programming Language :: Python :: 3.7',
+        'Programming Language :: Python :: Implementation :: CPython',
+        'Programming Language :: Python :: Implementation :: PyPy',
         'Topic :: Software Development :: Libraries :: Python Modules'
     ]
 )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/__init__.py 
new/cachetools-3.1.0/tests/__init__.py
--- old/cachetools-2.0.1/tests/__init__.py      2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/__init__.py      2018-11-04 20:58:27.000000000 
+0100
@@ -1,10 +1,9 @@
 class CacheTestMixin(object):
 
-    def cache(self, maxsize, missing=None, getsizeof=None):
-        raise NotImplementedError
+    Cache = None
 
-    def test_cache_defaults(self):
-        cache = self.cache(maxsize=1)
+    def test_defaults(self):
+        cache = self.Cache(maxsize=1)
         self.assertEqual(0, len(cache))
         self.assertEqual(1, cache.maxsize)
         self.assertEqual(0, cache.currsize)
@@ -13,8 +12,8 @@
         self.assertEqual(1, cache.getsizeof(0))
         self.assertTrue(repr(cache).startswith(cache.__class__.__name__))
 
-    def test_cache_insert(self):
-        cache = self.cache(maxsize=2)
+    def test_insert(self):
+        cache = self.Cache(maxsize=2)
 
         cache.update({1: 1, 2: 2})
         self.assertEqual(2, len(cache))
@@ -31,8 +30,8 @@
         self.assertEqual(4, cache[4])
         self.assertTrue(1 in cache or 2 in cache or 3 in cache)
 
-    def test_cache_update(self):
-        cache = self.cache(maxsize=2)
+    def test_update(self):
+        cache = self.Cache(maxsize=2)
 
         cache.update({1: 1, 2: 2})
         self.assertEqual(2, len(cache))
@@ -49,8 +48,8 @@
         self.assertEqual('a', cache[1])
         self.assertEqual('b', cache[2])
 
-    def test_cache_delete(self):
-        cache = self.cache(maxsize=2)
+    def test_delete(self):
+        cache = self.Cache(maxsize=2)
 
         cache.update({1: 1, 2: 2})
         self.assertEqual(2, len(cache))
@@ -73,8 +72,8 @@
         self.assertNotIn(1, cache)
         self.assertNotIn(2, cache)
 
-    def test_cache_pop(self):
-        cache = self.cache(maxsize=2)
+    def test_pop(self):
+        cache = self.Cache(maxsize=2)
 
         cache.update({1: 1, 2: 2})
         self.assertEqual(2, cache.pop(2))
@@ -93,8 +92,8 @@
         self.assertEqual(None, cache.pop(1, None))
         self.assertEqual(None, cache.pop(0, None))
 
-    def test_cache_popitem(self):
-        cache = self.cache(maxsize=2)
+    def test_popitem(self):
+        cache = self.Cache(maxsize=2)
 
         cache.update({1: 1, 2: 2})
         self.assertIn(cache.pop(1), {1: 1, 2: 2})
@@ -105,9 +104,9 @@
         with self.assertRaises(KeyError):
             cache.popitem()
 
-    def test_cache_missing(self):
-        cache = self.cache(maxsize=2, missing=lambda x: x)
-
+    def _test_missing(self, cache):
+        self.assertEqual(0, cache.currsize)
+        self.assertEqual(2, cache.maxsize)
         self.assertEqual(0, len(cache))
         self.assertEqual(1, cache[1])
         self.assertEqual(2, cache[2])
@@ -157,8 +156,9 @@
         self.assertTrue(1 in cache or 2 in cache)
         self.assertTrue(1 not in cache or 2 not in cache)
 
-        cache = self.cache(maxsize=2, missing=lambda x: x,
-                           getsizeof=lambda x: x)
+    def _test_missing_getsizeof(self, cache):
+        self.assertEqual(0, cache.currsize)
+        self.assertEqual(2, cache.maxsize)
         self.assertEqual(1, cache[1])
         self.assertIn(1, cache)
         self.assertEqual(2, cache[2])
@@ -169,10 +169,21 @@
         self.assertIn(2, cache)
         self.assertNotIn(3, cache)
 
-    def test_cache_getsizeof(self):
-        cache = self.cache(maxsize=3, getsizeof=lambda x: x)
-        self.assertEqual(3, cache.maxsize)
+    def test_missing_subclass(self):
+        class Cache(self.Cache):
+            def __missing__(self, key):
+                try:
+                    self[key] = key
+                except ValueError:
+                    pass
+                return key
+
+        self._test_missing(Cache(maxsize=2))
+        self._test_missing_getsizeof(Cache(maxsize=2, getsizeof=lambda x: x))
+
+    def _test_getsizeof(self, cache):
         self.assertEqual(0, cache.currsize)
+        self.assertEqual(3, cache.maxsize)
         self.assertEqual(1, cache.getsizeof(1))
         self.assertEqual(2, cache.getsizeof(2))
         self.assertEqual(3, cache.getsizeof(3))
@@ -214,10 +225,20 @@
         self.assertEqual(3, cache.currsize)
         self.assertEqual(3, cache[3])
 
-    def test_cache_pickle(self):
+    def test_getsizeof_param(self):
+        self._test_getsizeof(self.Cache(maxsize=3, getsizeof=lambda x: x))
+
+    def test_getsizeof_subclass(self):
+        class Cache(self.Cache):
+            def getsizeof(self, value):
+                return value
+
+        self._test_getsizeof(Cache(maxsize=3))
+
+    def test_pickle(self):
         import pickle
 
-        source = self.cache(maxsize=2)
+        source = self.Cache(maxsize=2)
         source.update({1: 1, 2: 2})
 
         cache = pickle.loads(pickle.dumps(source))
@@ -239,13 +260,13 @@
 
         self.assertEqual(cache, pickle.loads(pickle.dumps(cache)))
 
-    def test_cache_pickle_maxsize(self):
+    def test_pickle_maxsize(self):
         import pickle
         import sys
 
         # test empty cache, single element, large cache (recursion limit)
         for n in [0, 1, sys.getrecursionlimit() * 2]:
-            source = self.cache(maxsize=n)
+            source = self.Cache(maxsize=n)
             source.update((i, i) for i in range(n))
             cache = pickle.loads(pickle.dumps(source))
             self.assertEqual(n, len(cache))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_cache.py 
new/cachetools-3.1.0/tests/test_cache.py
--- old/cachetools-2.0.1/tests/test_cache.py    2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_cache.py    2018-11-04 20:58:27.000000000 
+0100
@@ -1,11 +1,10 @@
 import unittest
 
-from cachetools import Cache
+import cachetools
 
 from . import CacheTestMixin
 
 
 class CacheTest(unittest.TestCase, CacheTestMixin):
 
-    def cache(self, maxsize, missing=None, getsizeof=None):
-        return Cache(maxsize, missing=missing, getsizeof=getsizeof)
+    Cache = cachetools.Cache
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_func.py 
new/cachetools-3.1.0/tests/test_func.py
--- old/cachetools-2.0.1/tests/test_func.py     2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_func.py     2018-11-04 20:58:27.000000000 
+0100
@@ -30,7 +30,7 @@
         self.assertEqual(cached(1), 1)
         self.assertEqual(cached.cache_info(), (0, 1, 2, 1))
 
-    def test_decorator_nosize(self):
+    def test_decorator_nocache(self):
         cached = self.decorator(maxsize=0)(lambda n: n)
 
         self.assertEqual(cached.cache_info(), (0, 0, 0, 0))
@@ -41,6 +41,17 @@
         self.assertEqual(cached(1.0), 1.0)
         self.assertEqual(cached.cache_info(), (0, 3, 0, 0))
 
+    def test_decorator_unbound(self):
+        cached = self.decorator(maxsize=None)(lambda n: n)
+
+        self.assertEqual(cached.cache_info(), (0, 0, None, 0))
+        self.assertEqual(cached(1), 1)
+        self.assertEqual(cached.cache_info(), (0, 1, None, 1))
+        self.assertEqual(cached(1), 1)
+        self.assertEqual(cached.cache_info(), (1, 1, None, 1))
+        self.assertEqual(cached(1.0), 1.0)
+        self.assertEqual(cached.cache_info(), (2, 1, None, 1))
+
     def test_decorator_typed(self):
         cached = self.decorator(maxsize=2, typed=True)(lambda n: n)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_lfu.py 
new/cachetools-3.1.0/tests/test_lfu.py
--- old/cachetools-2.0.1/tests/test_lfu.py      2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_lfu.py      2018-11-04 20:58:27.000000000 
+0100
@@ -7,11 +7,10 @@
 
 class LFUCacheTest(unittest.TestCase, CacheTestMixin):
 
-    def cache(self, maxsize, missing=None, getsizeof=None):
-        return LFUCache(maxsize, missing=missing, getsizeof=getsizeof)
+    Cache = LFUCache
 
     def test_lfu(self):
-        cache = self.cache(maxsize=2)
+        cache = LFUCache(maxsize=2)
 
         cache[1] = 1
         cache[1]
@@ -29,7 +28,7 @@
         self.assertEqual(cache[1], 1)
 
     def test_lfu_getsizeof(self):
-        cache = self.cache(maxsize=3, getsizeof=lambda x: x)
+        cache = LFUCache(maxsize=3, getsizeof=lambda x: x)
 
         cache[1] = 1
         cache[2] = 2
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_lru.py 
new/cachetools-3.1.0/tests/test_lru.py
--- old/cachetools-2.0.1/tests/test_lru.py      2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_lru.py      2018-11-04 20:58:27.000000000 
+0100
@@ -7,11 +7,10 @@
 
 class LRUCacheTest(unittest.TestCase, CacheTestMixin):
 
-    def cache(self, maxsize, missing=None, getsizeof=None):
-        return LRUCache(maxsize, missing=missing, getsizeof=getsizeof)
+    Cache = LRUCache
 
     def test_lru(self):
-        cache = self.cache(maxsize=2)
+        cache = LRUCache(maxsize=2)
 
         cache[1] = 1
         cache[2] = 2
@@ -36,7 +35,7 @@
         self.assertNotIn(2, cache)
 
     def test_lru_getsizeof(self):
-        cache = self.cache(maxsize=3, getsizeof=lambda x: x)
+        cache = LRUCache(maxsize=3, getsizeof=lambda x: x)
 
         cache[1] = 1
         cache[2] = 2
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_method.py 
new/cachetools-3.1.0/tests/test_method.py
--- old/cachetools-2.0.1/tests/test_method.py   2016-10-03 14:09:18.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_method.py   2018-11-04 20:58:27.000000000 
+0100
@@ -22,6 +22,10 @@
         self.count += 1
         return count
 
+    # https://github.com/tkem/cachetools/issues/107
+    def __hash__(self):
+        raise TypeError('unhashable type')
+
 
 class Locked(object):
 
@@ -109,6 +113,7 @@
     def test_weakref(self):
         import weakref
         import fractions
+        import gc
 
         # in Python 3.4, `int` does not support weak references even
         # when subclassed, but Fraction apparently does...
@@ -119,6 +124,7 @@
         cached = Cached(weakref.WeakValueDictionary(), count=Int(0))
 
         self.assertEqual(cached.get(0), 0)
+        gc.collect()
         self.assertEqual(cached.get(0), 1)
 
         ref = cached.get(1)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_rr.py 
new/cachetools-3.1.0/tests/test_rr.py
--- old/cachetools-2.0.1/tests/test_rr.py       2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_rr.py       2018-11-04 20:58:27.000000000 
+0100
@@ -1,4 +1,3 @@
-import random
 import unittest
 
 from cachetools import RRCache
@@ -6,19 +5,12 @@
 from . import CacheTestMixin
 
 
-# random.choice cannot be pickled...
-def choice(seq):
-    return random.choice(seq)
-
-
 class RRCacheTest(unittest.TestCase, CacheTestMixin):
 
-    def cache(self, maxsize, choice=choice, missing=None, getsizeof=None):
-        return RRCache(maxsize, choice=choice, missing=missing,
-                       getsizeof=getsizeof)
+    Cache = RRCache
 
-    def test_choice(self):
-        cache = self.cache(maxsize=2, choice=min)
+    def test_rr(self):
+        cache = RRCache(maxsize=2, choice=min)
         self.assertEqual(min, cache.choice)
 
         cache[1] = 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tests/test_ttl.py 
new/cachetools-3.1.0/tests/test_ttl.py
--- old/cachetools-2.0.1/tests/test_ttl.py      2016-04-22 15:03:41.000000000 
+0200
+++ new/cachetools-3.1.0/tests/test_ttl.py      2018-11-04 20:58:27.000000000 
+0100
@@ -19,42 +19,17 @@
         self.time += 1
 
 
-class TTLCacheTest(unittest.TestCase, CacheTestMixin):
-
-    def cache(self, maxsize, ttl=0, missing=None, getsizeof=None):
-        return TTLCache(maxsize, ttl, timer=Timer(), missing=missing,
-                        getsizeof=getsizeof)
+class TTLTestCache(TTLCache):
+    def __init__(self, maxsize, ttl=0, **kwargs):
+        TTLCache.__init__(self, maxsize, ttl=ttl, timer=Timer(), **kwargs)
 
-    def test_lru(self):
-        cache = self.cache(maxsize=2)
-
-        cache[1] = 1
-        cache[2] = 2
-        cache[3] = 3
 
-        self.assertEqual(len(cache), 2)
-        self.assertNotIn(1, cache)
-        self.assertEqual(cache[2], 2)
-        self.assertEqual(cache[3], 3)
-
-        cache[2]
-        cache[4] = 4
-        self.assertEqual(len(cache), 2)
-        self.assertNotIn(1, cache)
-        self.assertEqual(cache[2], 2)
-        self.assertNotIn(3, cache)
-        self.assertEqual(cache[4], 4)
+class TTLCacheTest(unittest.TestCase, CacheTestMixin):
 
-        cache[5] = 5
-        self.assertEqual(len(cache), 2)
-        self.assertNotIn(1, cache)
-        self.assertNotIn(2, cache)
-        self.assertNotIn(3, cache)
-        self.assertEqual(cache[4], 4)
-        self.assertEqual(cache[5], 5)
+    Cache = TTLTestCache
 
     def test_ttl(self):
-        cache = self.cache(maxsize=2, ttl=1)
+        cache = TTLCache(maxsize=2, ttl=1, timer=Timer())
         self.assertEqual(0, cache.timer())
         self.assertEqual(1, cache.ttl)
 
@@ -108,8 +83,36 @@
         with self.assertRaises(KeyError):
             del cache[3]
 
-    def test_expire(self):
-        cache = self.cache(maxsize=3, ttl=2)
+    def test_ttl_lru(self):
+        cache = TTLCache(maxsize=2, ttl=0, timer=Timer())
+
+        cache[1] = 1
+        cache[2] = 2
+        cache[3] = 3
+
+        self.assertEqual(len(cache), 2)
+        self.assertNotIn(1, cache)
+        self.assertEqual(cache[2], 2)
+        self.assertEqual(cache[3], 3)
+
+        cache[2]
+        cache[4] = 4
+        self.assertEqual(len(cache), 2)
+        self.assertNotIn(1, cache)
+        self.assertEqual(cache[2], 2)
+        self.assertNotIn(3, cache)
+        self.assertEqual(cache[4], 4)
+
+        cache[5] = 5
+        self.assertEqual(len(cache), 2)
+        self.assertNotIn(1, cache)
+        self.assertNotIn(2, cache)
+        self.assertNotIn(3, cache)
+        self.assertEqual(cache[4], 4)
+        self.assertEqual(cache[5], 5)
+
+    def test_ttl_expire(self):
+        cache = TTLCache(maxsize=3, ttl=2, timer=Timer())
         with cache.timer as time:
             self.assertEqual(time, cache.timer())
         self.assertEqual(2, cache.ttl)
@@ -155,7 +158,7 @@
         self.assertNotIn(2, cache)
         self.assertNotIn(3, cache)
 
-    def test_atomic(self):
+    def test_ttl_atomic(self):
         cache = TTLCache(maxsize=1, ttl=1, timer=Timer(auto=True))
         cache[1] = 1
         self.assertEqual(1, cache[1])
@@ -169,22 +172,8 @@
         cache.clear()
         self.assertEqual(0, len(cache))
 
-    def test_missing(self):
-        class DefaultTTLCache(TTLCache):
-            def __missing__(self, key):
-                self[key] = key
-                return key
-
-        cache = DefaultTTLCache(maxsize=1, ttl=1, timer=Timer())
-        self.assertEqual(1, cache[1])
-        self.assertIn(1, cache)
-        self.assertNotIn(2, cache)
-        self.assertEqual(2, cache[2])
-        self.assertNotIn(1, cache)
-        self.assertIn(2, cache)
-
-    def test_tuple_key(self):
-        cache = self.cache(maxsize=1, ttl=0)
+    def test_ttl_tuple_key(self):
+        cache = TTLCache(maxsize=1, ttl=0, timer=Timer())
         self.assertEqual(0, cache.ttl)
 
         cache[(1, 2, 3)] = 42
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-2.0.1/tox.ini new/cachetools-3.1.0/tox.ini
--- old/cachetools-2.0.1/tox.ini        2016-10-03 14:09:18.000000000 +0200
+++ new/cachetools-3.1.0/tox.ini        2018-11-04 20:58:27.000000000 +0100
@@ -1,5 +1,5 @@
 [tox]
-envlist = check-manifest,docs,flake8,py
+envlist = check-manifest,docs,doctest,flake8,py
 
 [testenv]
 deps =
@@ -22,6 +22,13 @@
 commands =
      sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
 
+[testenv:doctest]
+deps =
+     mock
+     sphinx
+commands =
+     sphinx-build -W -b doctest -d {envtmpdir}/doctrees docs 
{envtmpdir}/doctest
+
 [testenv:flake8]
 deps =
     flake8


Reply via email to