Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-cachetools for 
openSUSE:Factory checked in at 2023-01-29 14:10:27
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-cachetools (Old)
 and      /work/SRC/openSUSE:Factory/.python-cachetools.new.32243 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-cachetools"

Sun Jan 29 14:10:27 2023 rev:17 rq:1061731 version:5.3.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-cachetools/python-cachetools.changes      
2023-01-14 20:31:44.109219031 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-cachetools.new.32243/python-cachetools.changes
   2023-01-29 14:14:23.560263077 +0100
@@ -1,0 +2,6 @@
+Sat Jan 28 12:28:38 UTC 2023 - Dirk Müller <dmuel...@suse.com>
+
+- update to 5.3.0:
+  * Add ``cache_info()`` function to ``@cached`` decorator.
+
+-------------------------------------------------------------------

Old:
----
  cachetools-5.2.1.tar.gz

New:
----
  cachetools-5.3.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-cachetools.spec ++++++
--- /var/tmp/diff_new_pack.awXDD4/_old  2023-01-29 14:14:23.948265187 +0100
+++ /var/tmp/diff_new_pack.awXDD4/_new  2023-01-29 14:14:23.956265231 +0100
@@ -19,7 +19,7 @@
 %define skip_python2 1
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-cachetools
-Version:        5.2.1
+Version:        5.3.0
 Release:        0
 Summary:        Extensible memoizing collections and decorators
 License:        MIT

++++++ cachetools-5.2.1.tar.gz -> cachetools-5.3.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/CHANGELOG.rst 
new/cachetools-5.3.0/CHANGELOG.rst
--- old/cachetools-5.2.1/CHANGELOG.rst  2023-01-08 21:32:15.000000000 +0100
+++ new/cachetools-5.3.0/CHANGELOG.rst  2023-01-22 23:22:32.000000000 +0100
@@ -1,3 +1,9 @@
+v5.3.0 (2023-01-22)
+===================
+
+- Add ``cache_info()`` function to ``@cached`` decorator.
+
+
 v5.2.1 (2023-01-08)
 ===================
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/PKG-INFO 
new/cachetools-5.3.0/PKG-INFO
--- old/cachetools-5.2.1/PKG-INFO       2023-01-08 21:34:18.072167000 +0100
+++ new/cachetools-5.3.0/PKG-INFO       2023-01-22 23:29:41.295361500 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: cachetools
-Version: 5.2.1
+Version: 5.3.0
 Summary: Extensible memoizing collections and decorators
 Home-page: https://github.com/tkem/cachetools/
 Author: Thomas Kemmer
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/docs/conf.py 
new/cachetools-5.3.0/docs/conf.py
--- old/cachetools-5.2.1/docs/conf.py   2023-01-08 21:31:51.000000000 +0100
+++ new/cachetools-5.3.0/docs/conf.py   2023-01-22 23:22:12.000000000 +0100
@@ -7,8 +7,8 @@
 
 project = "cachetools"
 copyright = "2014-2023 Thomas Kemmer"
-version = "5.2"
-release = "5.2.1"
+version = "5.3"
+release = "5.3.0"
 
 extensions = [
     "sphinx.ext.autodoc",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/docs/index.rst 
new/cachetools-5.3.0/docs/index.rst
--- old/cachetools-5.2.1/docs/index.rst 2022-05-29 22:40:29.000000000 +0200
+++ new/cachetools-5.3.0/docs/index.rst 2023-01-22 23:22:12.000000000 +0100
@@ -256,7 +256,7 @@
    >>> fib(42)
    267914296
 
-.. decorator:: cached(cache, key=cachetools.keys.hashkey, lock=None)
+.. decorator:: cached(cache, key=cachetools.keys.hashkey, lock=None, 
info=False)
 
    Decorator to wrap a function with a memoizing callable that saves
    results in a cache.
@@ -321,6 +321,31 @@
       # no need for get_pep.cache_lock here
       get_pep.cache_clear()
 
+   If `info` is set to :const:`True`, the wrapped function is
+   instrumented with a :func:`cache_info()` function that returns a
+   named tuple showing `hits`, `misses`, `maxsize` and `currsize`, to
+   help measure the effectiveness of the cache.
+
+   .. note::
+
+      Note that this will inflict a - probably minor - performance
+      penalty, so it has to be explicitly enabled.
+
+   .. doctest::
+      :pyversion: >= 3
+
+      >>> @cached(cache=LRUCache(maxsize=32), info=True)
+      ... def get_pep(num):
+      ...     url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+      ...     with urllib.request.urlopen(url) as s:
+      ...         return s.read()
+
+      >>> for n in 8, 290, 308, 320, 8, 218, 320, 279, 289, 320, 9991:
+      ...     pep = get_pep(n)
+
+      >>> get_pep.cache_info()
+      CacheInfo(hits=3, misses=8, maxsize=32, currsize=8)
+
    The original underlying function is accessible through the
    :attr:`__wrapped__` attribute.  This can be used for introspection
    or for bypassing the cache.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/setup.cfg 
new/cachetools-5.3.0/setup.cfg
--- old/cachetools-5.2.1/setup.cfg      2023-01-08 21:34:18.072167000 +0100
+++ new/cachetools-5.3.0/setup.cfg      2023-01-22 23:29:41.295361500 +0100
@@ -5,7 +5,7 @@
 author = Thomas Kemmer
 author_email = tkem...@computer.org
 license = MIT
-license_file = LICENSE
+license_files = LICENSE
 description = Extensible memoizing collections and decorators
 long_description = file: README.rst
 classifiers = 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/src/cachetools/__init__.py 
new/cachetools-5.3.0/src/cachetools/__init__.py
--- old/cachetools-5.2.1/src/cachetools/__init__.py     2023-01-08 
21:31:51.000000000 +0100
+++ new/cachetools-5.3.0/src/cachetools/__init__.py     2023-01-22 
23:22:12.000000000 +0100
@@ -13,7 +13,7 @@
     "cachedmethod",
 )
 
-__version__ = "5.2.1"
+__version__ = "5.3.0"
 
 import collections
 import collections.abc
@@ -615,64 +615,163 @@
         return value
 
 
-def cached(cache, key=keys.hashkey, lock=None):
+_CacheInfo = collections.namedtuple(
+    "CacheInfo", ["hits", "misses", "maxsize", "currsize"]
+)
+
+
+def cached(cache, key=keys.hashkey, lock=None, info=False):
     """Decorator to wrap a function with a memoizing callable that saves
     results in a cache.
 
     """
 
     def decorator(func):
-        if cache is None:
+        if info:
+            hits = misses = 0
 
-            def wrapper(*args, **kwargs):
-                return func(*args, **kwargs)
+            if isinstance(cache, Cache):
 
-            def clear():
-                pass
+                def getinfo():
+                    nonlocal hits, misses
+                    return _CacheInfo(hits, misses, cache.maxsize, 
cache.currsize)
 
-        elif lock is None:
+            elif isinstance(cache, collections.abc.Mapping):
 
-            def wrapper(*args, **kwargs):
-                k = key(*args, **kwargs)
-                try:
-                    return cache[k]
-                except KeyError:
-                    pass  # key not found
-                v = func(*args, **kwargs)
-                try:
-                    cache[k] = v
-                except ValueError:
-                    pass  # value too large
-                return v
+                def getinfo():
+                    nonlocal hits, misses
+                    return _CacheInfo(hits, misses, None, len(cache))
 
-            def clear():
-                cache.clear()
+            else:
 
-        else:
+                def getinfo():
+                    nonlocal hits, misses
+                    return _CacheInfo(hits, misses, 0, 0)
+
+            if cache is None:
+
+                def wrapper(*args, **kwargs):
+                    nonlocal misses
+                    misses += 1
+                    return func(*args, **kwargs)
+
+                def cache_clear():
+                    nonlocal hits, misses
+                    hits = misses = 0
+
+                cache_info = getinfo
+
+            elif lock is None:
+
+                def wrapper(*args, **kwargs):
+                    nonlocal hits, misses
+                    k = key(*args, **kwargs)
+                    try:
+                        result = cache[k]
+                        hits += 1
+                        return result
+                    except KeyError:
+                        misses += 1
+                    v = func(*args, **kwargs)
+                    try:
+                        cache[k] = v
+                    except ValueError:
+                        pass  # value too large
+                    return v
+
+                def cache_clear():
+                    nonlocal hits, misses
+                    cache.clear()
+                    hits = misses = 0
 
-            def wrapper(*args, **kwargs):
-                k = key(*args, **kwargs)
-                try:
+                cache_info = getinfo
+
+            else:
+
+                def wrapper(*args, **kwargs):
+                    nonlocal hits, misses
+                    k = key(*args, **kwargs)
+                    try:
+                        with lock:
+                            result = cache[k]
+                            hits += 1
+                            return result
+                    except KeyError:
+                        with lock:
+                            misses += 1
+                    v = func(*args, **kwargs)
+                    # in case of a race, prefer the item already in the cache
+                    try:
+                        with lock:
+                            return cache.setdefault(k, v)
+                    except ValueError:
+                        return v  # value too large
+
+                def cache_clear():
+                    nonlocal hits, misses
                     with lock:
-                        return cache[k]
-                except KeyError:
-                    pass  # key not found
-                v = func(*args, **kwargs)
-                # in case of a race, prefer the item already in the cache
-                try:
+                        cache.clear()
+                        hits = misses = 0
+
+                def cache_info():
                     with lock:
-                        return cache.setdefault(k, v)
-                except ValueError:
-                    return v  # value too large
+                        return getinfo()
 
-            def clear():
-                with lock:
+        else:
+            if cache is None:
+
+                def wrapper(*args, **kwargs):
+                    return func(*args, **kwargs)
+
+                def cache_clear():
+                    pass
+
+            elif lock is None:
+
+                def wrapper(*args, **kwargs):
+                    k = key(*args, **kwargs)
+                    try:
+                        return cache[k]
+                    except KeyError:
+                        pass  # key not found
+                    v = func(*args, **kwargs)
+                    try:
+                        cache[k] = v
+                    except ValueError:
+                        pass  # value too large
+                    return v
+
+                def cache_clear():
                     cache.clear()
 
+            else:
+
+                def wrapper(*args, **kwargs):
+                    k = key(*args, **kwargs)
+                    try:
+                        with lock:
+                            return cache[k]
+                    except KeyError:
+                        pass  # key not found
+                    v = func(*args, **kwargs)
+                    # in case of a race, prefer the item already in the cache
+                    try:
+                        with lock:
+                            return cache.setdefault(k, v)
+                    except ValueError:
+                        return v  # value too large
+
+                def cache_clear():
+                    with lock:
+                        cache.clear()
+
+            cache_info = None
+
         wrapper.cache = cache
         wrapper.cache_key = key
         wrapper.cache_lock = lock
-        wrapper.cache_clear = clear
+        wrapper.cache_clear = cache_clear
+        wrapper.cache_info = cache_info
 
         return functools.update_wrapper(wrapper, func)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/src/cachetools/func.py 
new/cachetools-5.3.0/src/cachetools/func.py
--- old/cachetools-5.2.1/src/cachetools/func.py 2022-05-29 22:40:29.000000000 
+0200
+++ new/cachetools-5.3.0/src/cachetools/func.py 2023-01-22 23:22:12.000000000 
+0100
@@ -2,8 +2,6 @@
 
 __all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", 
"ttl_cache")
 
-import collections
-import functools
 import math
 import random
 import time
@@ -14,24 +12,10 @@
     from dummy_threading import RLock
 
 from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
+from . import cached
 from . import keys
 
 
-_CacheInfo = collections.namedtuple(
-    "CacheInfo", ["hits", "misses", "maxsize", "currsize"]
-)
-
-
-class _UnboundCache(dict):
-    @property
-    def maxsize(self):
-        return None
-
-    @property
-    def currsize(self):
-        return len(self)
-
-
 class _UnboundTTLCache(TTLCache):
     def __init__(self, ttl, timer):
         TTLCache.__init__(self, math.inf, ttl, timer)
@@ -41,50 +25,11 @@
         return None
 
 
-def _cache(cache, typed):
-    maxsize = cache.maxsize
-
+def _cache(cache, maxsize, typed):
     def decorator(func):
         key = keys.typedkey if typed else keys.hashkey
-        hits = misses = 0
-        lock = RLock()
-
-        def wrapper(*args, **kwargs):
-            nonlocal hits, misses
-            k = key(*args, **kwargs)
-            with lock:
-                try:
-                    v = cache[k]
-                    hits += 1
-                    return v
-                except KeyError:
-                    misses += 1
-            v = func(*args, **kwargs)
-            # in case of a race, prefer the item already in the cache
-            try:
-                with lock:
-                    return cache.setdefault(k, v)
-            except ValueError:
-                return v  # value too large
-
-        def cache_info():
-            with lock:
-                maxsize = cache.maxsize
-                currsize = cache.currsize
-            return _CacheInfo(hits, misses, maxsize, currsize)
-
-        def cache_clear():
-            nonlocal hits, misses
-            with lock:
-                try:
-                    cache.clear()
-                finally:
-                    hits = misses = 0
-
-        wrapper.cache_info = cache_info
-        wrapper.cache_clear = cache_clear
+        wrapper = cached(cache=cache, key=key, lock=RLock(), info=True)(func)
         wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
-        functools.update_wrapper(wrapper, func)
         return wrapper
 
     return decorator
@@ -97,11 +42,11 @@
 
     """
     if maxsize is None:
-        return _cache(_UnboundCache(), typed)
+        return _cache({}, None, typed)
     elif callable(maxsize):
-        return _cache(FIFOCache(128), typed)(maxsize)
+        return _cache(FIFOCache(128), 128, typed)(maxsize)
     else:
-        return _cache(FIFOCache(maxsize), typed)
+        return _cache(FIFOCache(maxsize), maxsize, typed)
 
 
 def lfu_cache(maxsize=128, typed=False):
@@ -111,11 +56,11 @@
 
     """
     if maxsize is None:
-        return _cache(_UnboundCache(), typed)
+        return _cache({}, None, typed)
     elif callable(maxsize):
-        return _cache(LFUCache(128), typed)(maxsize)
+        return _cache(LFUCache(128), 128, typed)(maxsize)
     else:
-        return _cache(LFUCache(maxsize), typed)
+        return _cache(LFUCache(maxsize), maxsize, typed)
 
 
 def lru_cache(maxsize=128, typed=False):
@@ -125,11 +70,11 @@
 
     """
     if maxsize is None:
-        return _cache(_UnboundCache(), typed)
+        return _cache({}, None, typed)
     elif callable(maxsize):
-        return _cache(LRUCache(128), typed)(maxsize)
+        return _cache(LRUCache(128), 128, typed)(maxsize)
     else:
-        return _cache(LRUCache(maxsize), typed)
+        return _cache(LRUCache(maxsize), maxsize, typed)
 
 
 def mru_cache(maxsize=128, typed=False):
@@ -138,11 +83,11 @@
     algorithm.
     """
     if maxsize is None:
-        return _cache(_UnboundCache(), typed)
+        return _cache({}, None, typed)
     elif callable(maxsize):
-        return _cache(MRUCache(128), typed)(maxsize)
+        return _cache(MRUCache(128), 128, typed)(maxsize)
     else:
-        return _cache(MRUCache(maxsize), typed)
+        return _cache(MRUCache(maxsize), maxsize, typed)
 
 
 def rr_cache(maxsize=128, choice=random.choice, typed=False):
@@ -152,11 +97,11 @@
 
     """
     if maxsize is None:
-        return _cache(_UnboundCache(), typed)
+        return _cache({}, None, typed)
     elif callable(maxsize):
-        return _cache(RRCache(128, choice), typed)(maxsize)
+        return _cache(RRCache(128, choice), 128, typed)(maxsize)
     else:
-        return _cache(RRCache(maxsize, choice), typed)
+        return _cache(RRCache(maxsize, choice), maxsize, typed)
 
 
 def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
@@ -165,8 +110,8 @@
     algorithm with a per-item time-to-live (TTL) value.
     """
     if maxsize is None:
-        return _cache(_UnboundTTLCache(ttl, timer), typed)
+        return _cache(_UnboundTTLCache(ttl, timer), None, typed)
     elif callable(maxsize):
-        return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
+        return _cache(TTLCache(128, ttl, timer), 128, typed)(maxsize)
     else:
-        return _cache(TTLCache(maxsize, ttl, timer), typed)
+        return _cache(TTLCache(maxsize, ttl, timer), maxsize, typed)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/src/cachetools.egg-info/PKG-INFO 
new/cachetools-5.3.0/src/cachetools.egg-info/PKG-INFO
--- old/cachetools-5.2.1/src/cachetools.egg-info/PKG-INFO       2023-01-08 
21:34:18.000000000 +0100
+++ new/cachetools-5.3.0/src/cachetools.egg-info/PKG-INFO       2023-01-22 
23:29:41.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: cachetools
-Version: 5.2.1
+Version: 5.3.0
 Summary: Extensible memoizing collections and decorators
 Home-page: https://github.com/tkem/cachetools/
 Author: Thomas Kemmer
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cachetools-5.2.1/tests/test_cached.py 
new/cachetools-5.3.0/tests/test_cached.py
--- old/cachetools-5.2.1/tests/test_cached.py   2022-05-29 22:40:29.000000000 
+0200
+++ new/cachetools-5.3.0/tests/test_cached.py   2023-01-22 23:22:12.000000000 
+0100
@@ -145,16 +145,25 @@
         self.assertEqual(len(cache), 0)
         self.assertEqual(lock.count, 3)
 
-    def test_decorator_clear_none(self):
-        cache = None
-        wrapper = cachetools.cached(cache)(self.func)
-        wrapper.cache_clear()  # no-op
-
 
 class CacheWrapperTest(unittest.TestCase, DecoratorTestMixin):
     def cache(self, minsize):
         return cachetools.Cache(maxsize=minsize)
 
+    def test_decorator_info(self):
+        cache = self.cache(2)
+        wrapper = cachetools.cached(cache, info=True)(self.func)
+        self.assertEqual(wrapper.cache_info(), (0, 0, 2, 0))
+        self.assertEqual(wrapper(0), 0)
+        self.assertEqual(wrapper.cache_info(), (0, 1, 2, 1))
+        self.assertEqual(wrapper(1), 1)
+        self.assertEqual(wrapper.cache_info(), (0, 2, 2, 2))
+        self.assertEqual(wrapper(0), 0)
+        self.assertEqual(wrapper.cache_info(), (1, 2, 2, 2))
+        wrapper.cache_clear()
+        self.assertEqual(len(cache), 0)
+        self.assertEqual(wrapper.cache_info(), (0, 0, 2, 0))
+
     def test_zero_size_cache_decorator(self):
         cache = self.cache(0)
         wrapper = cachetools.cached(cache)(self.func)
@@ -173,11 +182,33 @@
         self.assertEqual(len(cache), 0)
         self.assertEqual(lock.count, 2)
 
+    def test_zero_size_cache_decorator_info(self):
+        cache = self.cache(0)
+        wrapper = cachetools.cached(cache, info=True)(self.func)
+
+        self.assertEqual(wrapper.cache_info(), (0, 0, 0, 0))
+        self.assertEqual(wrapper(0), 0)
+        self.assertEqual(wrapper.cache_info(), (0, 1, 0, 0))
+
 
 class DictWrapperTest(unittest.TestCase, DecoratorTestMixin):
     def cache(self, minsize):
         return dict()
 
+    def test_decorator_info(self):
+        cache = self.cache(2)
+        wrapper = cachetools.cached(cache, info=True)(self.func)
+        self.assertEqual(wrapper.cache_info(), (0, 0, None, 0))
+        self.assertEqual(wrapper(0), 0)
+        self.assertEqual(wrapper.cache_info(), (0, 1, None, 1))
+        self.assertEqual(wrapper(1), 1)
+        self.assertEqual(wrapper.cache_info(), (0, 2, None, 2))
+        self.assertEqual(wrapper(0), 0)
+        self.assertEqual(wrapper.cache_info(), (1, 2, None, 2))
+        wrapper.cache_clear()
+        self.assertEqual(len(cache), 0)
+        self.assertEqual(wrapper.cache_info(), (0, 0, None, 0))
+
 
 class NoneWrapperTest(unittest.TestCase):
     def func(self, *args, **kwargs):
@@ -189,3 +220,26 @@
         self.assertEqual(wrapper(0), (0,))
         self.assertEqual(wrapper(1), (1,))
         self.assertEqual(wrapper(1, foo="bar"), (1, ("foo", "bar")))
+
+    def test_decorator_attributes(self):
+        wrapper = cachetools.cached(None)(self.func)
+
+        self.assertIs(wrapper.cache, None)
+        self.assertIs(wrapper.cache_key, cachetools.keys.hashkey)
+        self.assertIs(wrapper.cache_lock, None)
+
+    def test_decorator_clear(self):
+        wrapper = cachetools.cached(None)(self.func)
+
+        wrapper.cache_clear()  # no-op
+
+    def test_decorator_info(self):
+        wrapper = cachetools.cached(None, info=True)(self.func)
+
+        self.assertEqual(wrapper.cache_info(), (0, 0, 0, 0))
+        self.assertEqual(wrapper(0), (0,))
+        self.assertEqual(wrapper.cache_info(), (0, 1, 0, 0))
+        self.assertEqual(wrapper(1), (1,))
+        self.assertEqual(wrapper.cache_info(), (0, 2, 0, 0))
+        wrapper.cache_clear()
+        self.assertEqual(wrapper.cache_info(), (0, 0, 0, 0))

Reply via email to