Hello community,

here is the log from the commit of package python-web_cache for 
openSUSE:Leap:15.2 checked in at 2020-03-02 13:24:51
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Leap:15.2/python-web_cache (Old)
 and      /work/SRC/openSUSE:Leap:15.2/.python-web_cache.new.26092 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-web_cache"

Mon Mar  2 13:24:51 2020 rev:11 rq:777276 version:1.1.0

Changes:
--------
--- /work/SRC/openSUSE:Leap:15.2/python-web_cache/python-web_cache.changes      
2020-01-15 15:54:10.271622934 +0100
+++ 
/work/SRC/openSUSE:Leap:15.2/.python-web_cache.new.26092/python-web_cache.changes
   2020-03-02 13:24:52.374579162 +0100
@@ -1,0 +2,22 @@
+Wed Dec 11 08:15:14 UTC 2019 - Tomáš Chvátal <[email protected]>
+
+- Pull in full python for sqlite module
+
+-------------------------------------------------------------------
+Thu Apr 11 11:00:53 UTC 2019 - Marketa Calabkova <[email protected]>
+
+- update to version 1.1.0
+  * Store database format in table name to ease future 
+    incompatible changes
+  * Allow disabling compression if ratio is below a threshold value
+  * Drop Python 3.3 support
+  * Get rid of pypandoc
+- use Github release tarball because the tests are not exported 
+  on PyPi
+
+-------------------------------------------------------------------
+Tue Dec  4 12:55:56 UTC 2018 - Matej Cepl <[email protected]>
+
+- Remove superfluous devel dependency for noarch package
+
+-------------------------------------------------------------------

Old:
----
  web_cache-1.0.2.tar.gz

New:
----
  1.1.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-web_cache.spec ++++++
--- /var/tmp/diff_new_pack.jBjmld/_old  2020-03-02 13:24:53.018580443 +0100
+++ /var/tmp/diff_new_pack.jBjmld/_new  2020-03-02 13:24:53.018580443 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-web_cache
 #
-# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -12,23 +12,25 @@
 # license that conforms to the Open Source Definition (Version 1.9)
 # published by the Open Source Initiative.
 
-# Please submit bugfixes or comments via http://bugs.opensuse.org/
+# Please submit bugfixes or comments via https://bugs.opensuse.org/
+#
 
-%define skip_python2 1
 
+%define skip_python2 1
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-web_cache
-Version:        1.0.2
+Version:        1.1.0
 Release:        0
-License:        LGPL-2.1
 Summary:        Persistent cache storage python module
-Url:            https://github.com/desbma/web_cache
-Group:          Development/Languages/Python
-Source:         
https://files.pythonhosted.org/packages/source/w/web_cache/web_cache-%{version}.tar.gz
-BuildRequires:  python-rpm-macros
-BuildRequires:  %{python_module devel}
+License:        LGPL-2.1-only
+URL:            https://github.com/desbma/web_cache
+Source:         https://github.com/desbma/web_cache/archive/%{version}.tar.gz
 BuildRequires:  %{python_module setuptools}
+BuildRequires:  %{python_module unittest2}
+BuildRequires:  %{pythons}
 BuildRequires:  fdupes
+BuildRequires:  python-rpm-macros
+Requires:       python
 BuildArch:      noarch
 %python_subpackages
 
@@ -48,8 +50,10 @@
 %python_install
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
+%check
+%python_expand PYTHONPATH=%{buildroot}%{$python_sitelib} $python -m unittest 
discover -v
+
 %files %{python_files}
-%defattr(-,root,root,-)
 %doc README.md
 %license LICENSE
 %{python_sitelib}/*

++++++ web_cache-1.0.2.tar.gz -> 1.1.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/.gitignore 
new/web_cache-1.1.0/.gitignore
--- old/web_cache-1.0.2/.gitignore      1970-01-01 01:00:00.000000000 +0100
+++ new/web_cache-1.1.0/.gitignore      2018-11-04 01:16:06.000000000 +0100
@@ -0,0 +1,4 @@
+__pycache__
+/*.egg*
+/build
+/dist
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/.travis.yml 
new/web_cache-1.1.0/.travis.yml
--- old/web_cache-1.0.2/.travis.yml     1970-01-01 01:00:00.000000000 +0100
+++ new/web_cache-1.1.0/.travis.yml     2018-11-04 01:16:06.000000000 +0100
@@ -0,0 +1,47 @@
+dist: trusty
+sudo: false
+language: python
+cache:
+  directories:
+    - $HOME/.cache/pypy
+matrix:
+  include:
+    - python: "3.4"
+    - python: "3.5"
+    - python: "3.5-dev"
+    - python: "3.6"
+    - python: "3.6-dev"
+    - python: "3.7-dev"
+    - python: "pypy3"
+      env: PYPY3_PYTHON_VERSION=3.5 PYPY3_VERSION=5.10.0 
PYPY3_SHA256=d03f81f26e5e67d808569c5c69d56ceb007df78f7e36ab1c50da4d9096cebde0
+  allow_failures:
+    - python: "3.5-dev"
+    - python: "3.6-dev"
+    - python: "3.7-dev"
+    - python: "pypy3"
+  fast_finish: true
+before_install:
+  - "if [ $TRAVIS_PYTHON_VERSION != 'pypy3' ]; then pip install -U pip; fi"
+  - "if [ $TRAVIS_PYTHON_VERSION = 'pypy3' ]; then mkdir -p ~/.cache/pypy && 
echo $PYPY3_SHA256  
~/.cache/pypy/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable.tar.bz2
 | sha256sum --status -c || curl -f -L 
https://bitbucket.org/squeaky/portable-pypy/downloads/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable.tar.bz2
 > 
~/.cache/pypy/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable.tar.bz2;
 fi"
+  - "if [ $TRAVIS_PYTHON_VERSION = 'pypy3' ]; then echo $PYPY3_SHA256  
~/.cache/pypy/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable.tar.bz2
 | sha256sum --status -c && tar -xjf 
~/.cache/pypy/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable.tar.bz2
 -C ~ && deactivate && 
~/pypy$PYPY3_PYTHON_VERSION-$PYPY3_VERSION-linux_x86_64-portable/bin/virtualenv-pypy
 ~/pypy3-venv && . ~/pypy3-venv/bin/activate; fi"
+install:
+  - "pip install coveralls"
+script:
+  - "coverage run --source=web_cache setup.py test"
+after_success:
+  - "coveralls"
+deploy:
+  provider: pypi
+  user: desbma
+  password:
+    secure: 
"Qv6FcQ08dWVt2o9HPc+GLx4GOwXOqGiAs6Qswv/DnppPgBvrLZhOg6zv/rW+zUau1FKGGK43ggBpsmOlpkaFNvuGCkIwIUMEtBEX4iER4VyZSvs2iaCd5GaomsG+xBuFwKcANWI0IfEwyaXbnD2g3+Buyyt1WSkrc3gVE7wKKQcRLS+Gsd0fflGzmTa+qGaSoeHsuASqlBzWiavsWiHkWzqr0Des6BOQtuP9JbVWvqjSGZg1d6wVNwUBmuscTmBJZIq/hlcErNPB1W0s5UVk2GfspMt8etdkcxFl76MlV5q2IVDRZYTifk5BKP8afKCg+Vbv3JTLCICfYi2+nwBFmmQTPI/+7mZD+j0wruQH/6BzqR/nGXjYiPsq/RdpAtyGtU75sWuOYnyeTcTuODtw92VYq2ZruUMjb55i9p4NdQCSuSnK+pViFczuNzh3jViOt/EFbGsNeANrE4B6xuNRzJ7Dy1SSwYtGfXIFjA0UzoZiKxtsY7K5shqYtgd6dFs/QH01ucYZdGWEk8TbgvNv52d3Tc0HElT72SC/Pjg9zp9P1gdjRPAufpjYH0z0/LK6JAKWH9xnxDHUdjgRW9AkTcNrIGIW8obx1+wx0640CuHUWvbbp146P+SnbEYixkBwMtnCcywIiLGs37PV44ZUNet2opccsPYVrgAkzidYEDE="
+  on:
+    tags: true
+    branch: master
+    python: "3.6"
+notifications:
+  email:
+    recipients:
+      - secure: 
"RVij2G5EC2R/zb91Ex45QHBBVogp+7bTZyrcZKdKWLzvNoRGuAhE4j8dhPnQyGme61k9LOXrE++clQthKZ3qViS56x3x11KGCq/ZKq8/PlE871hIRNZh2E9898lysmn19QyAOGCHI61+KPCzO8kQ62KKF5VWHcTYfPJY4EkyDBAp2VTlOfnOYRkJbmCFkOkm1xIaNnT9uxbJEbDmOy5ML/6yNU1mS4qYl5wU6nnqX/ggmqhiymBIQmGcVHcZ3A2+EHHN2HAoWEzde8uk9smQS+IuX/3HyBa9WD/VYWwLtu9LDLZhhF+kkiCUJwfC5ns1YquA7oWplqN54lTLomSxf3XXidwMs/ULd5+/P47HOgwQUW2t7U5MQghiA+Lxa3wU/AEEkXISpezHQioBSfOKChpNYiRUvEXwGk0E8B6NQ+wLSkbU+XMVb4Z/k/WQxuiB+x67La2ZDXxjtrQ1afFZ5a2yW/kVSycbdOlQHliSCp6BbtmRRhYNwV+GnNMSlSA9AU6wuadegyOhVhy/cBqqydFvI6AHta8tAbqkerY2iBJWuZDORwD+gPe8q7p4x0aYw02ku9pfjCfNUlQ8d7QLn2cgKL9xwNe2s5pqGJ8hD/dMmB7rV57ga+ICXIe11UA/lAytmCRPtStCEDp4b995k+uB8CwqX1WIpOaaHuQhots="
+    on_success: always
+    on_failure: always
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/PKG-INFO new/web_cache-1.1.0/PKG-INFO
--- old/web_cache-1.0.2/PKG-INFO        2017-04-23 03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/PKG-INFO        1970-01-01 01:00:00.000000000 +0100
@@ -1,72 +0,0 @@
-Metadata-Version: 1.1
-Name: web_cache
-Version: 1.0.2
-Summary: Simple persistent cache storage, with different cache eviction 
strategies, and optional compression
-Home-page: https://github.com/desbma/web_cache
-Author: desbma
-Author-email: UNKNOWN
-License: UNKNOWN
-Download-URL: https://github.com/desbma/web_cache/archive/1.0.2.tar.gz
-Description: Web cache
-        =========
-        
-        |Latest version| |Tests status| |Coverage| |Supported Python versions|
-        |License|
-        
-        Python module for simple key-value storage backed up by sqlite3
-        database. The typical use case is a URL to HTTP data cache, but it can
-        also be used fo non web ressources.
-        
-        Features
-        --------
-        
-        -  Simple ``dict`` interface allows natural usage (``if key in cache``,
-           ``value = cache[key]``, etc.)
-        -  Optional Zlib, BZIP2 or LZMA compression
-        -  FIFO or LRU cache eviction strategies
-        -  Optional thread safe interface to work around Python Sqlite3 'same
-           thread' limitation
-        -  Provides cache hit rate statistics
-        
-        Installation (from PyPI, with PIP)
-        ----------------------------------
-        
-        web\_cache requires `Python <https://www.python.org/downloads/>`__ >=
-        3.3.
-        
-        1. If you don't already have it, `install
-           pip <http://www.pip-installer.org/en/latest/installing.html>`__ for
-           Python 3 (not needed if you are using Python >= 3.4)
-        2. Install web\_cache: ``pip3 install web_cache``
-        
-        License
-        -------
-        
-        `LGPLv2.1 <https://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>`__
-        
-        .. |Latest version| image:: 
https://img.shields.io/pypi/v/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        .. |Tests status| image:: 
https://img.shields.io/travis/desbma/web_cache/master.svg?label=tests&style=flat
-           :target: https://travis-ci.org/desbma/web_cache
-        .. |Coverage| image:: 
https://img.shields.io/coveralls/desbma/web_cache/master.svg?style=flat
-           :target: https://coveralls.io/github/desbma/web_cache?branch=master
-        .. |Supported Python versions| image:: 
https://img.shields.io/pypi/pyversions/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        .. |License| image:: 
https://img.shields.io/github/license/desbma/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        
-Keywords: cache,sqlite3,key-value,persistent
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or 
later (LGPLv2+)
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Topic :: Database
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/README.md 
new/web_cache-1.1.0/README.md
--- old/web_cache-1.0.2/README.md       2017-04-23 03:04:24.000000000 +0200
+++ new/web_cache-1.1.0/README.md       2018-11-04 01:16:06.000000000 +0100
@@ -5,7 +5,7 @@
 [![Tests 
status](https://img.shields.io/travis/desbma/web_cache/master.svg?label=tests&style=flat)](https://travis-ci.org/desbma/web_cache)
 
[![Coverage](https://img.shields.io/coveralls/desbma/web_cache/master.svg?style=flat)](https://coveralls.io/github/desbma/web_cache?branch=master)
 [![Supported Python 
versions](https://img.shields.io/pypi/pyversions/web_cache.svg?style=flat)](https://pypi.python.org/pypi/web_cache/)
-[![License](https://img.shields.io/github/license/desbma/web_cache.svg?style=flat)](https://pypi.python.org/pypi/web_cache/)
+[![License](https://img.shields.io/github/license/desbma/web_cache.svg?style=flat)](https://github.com/desbma/web_cache/blob/master/LICENSE)
 
 Python module for simple key-value storage backed up by sqlite3 database.
 The typical use case is a URL to HTTP data cache, but it can also be used fo 
non web ressources.
@@ -22,12 +22,12 @@
 
 ## Installation (from PyPI, with PIP)
 
-web_cache requires [Python](https://www.python.org/downloads/) >= 3.3.
+web_cache requires [Python](https://www.python.org/downloads/) >= 3.4.
 
-1. If you don't already have it, [install 
pip](http://www.pip-installer.org/en/latest/installing.html) for Python 3 (not 
needed if you are using Python >= 3.4)
+1. If you don't already have it, [install 
pip](https://pip.pypa.io/en/stable/installing/) for Python 3
 2. Install web_cache: `pip3 install web_cache`
 
 
 ## License
 
-[LGPLv2.1](https://www.gnu.org/licenses/old-licenses/lgpl-2.1.html)
+[LGPLv2](https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/setup.cfg 
new/web_cache-1.1.0/setup.cfg
--- old/web_cache-1.0.2/setup.cfg       2017-04-23 03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/setup.cfg       1970-01-01 01:00:00.000000000 +0100
@@ -1,5 +0,0 @@
-[egg_info]
-tag_svn_revision = 0
-tag_date = 0
-tag_build = 
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/setup.py new/web_cache-1.1.0/setup.py
--- old/web_cache-1.0.2/setup.py        2017-04-23 03:04:24.000000000 +0200
+++ new/web_cache-1.1.0/setup.py        2018-11-04 01:16:06.000000000 +0100
@@ -7,35 +7,24 @@
 from setuptools import find_packages, setup
 
 
-if sys.hexversion < 0x3030000:
-  print("Python version %s is unsupported, >= 3.3.0 is needed" % 
(".".join(map(str, sys.version_info[:3]))))
+if sys.hexversion < 0x3040000:
+  print("Python version %s is unsupported, >= 3.4.0 is needed" % 
(".".join(map(str, sys.version_info[:3]))))
   exit(1)
 
 with open(os.path.join("web_cache", "__init__.py"), "rt") as f:
   version = re.search("__version__ = \"([^\"]+)\"", f.read()).group(1)
 
-requirements = []
-# require enum34 if enum module is missing (Python 3.3)
-try:
-  import enum
-except ImportError:
-  requirements.append("enum34")
-
-try:
-  import pypandoc
-  readme = pypandoc.convert("README.md", "rst")
-except ImportError:
-  with open("README.md", "rt") as f:
-    readme = f.read()
+with open("README.md", "rt") as f:
+  readme = f.read()
 
 setup(name="web_cache",
       version=version,
       author="desbma",
       packages=find_packages(exclude=("tests",)),
       test_suite="tests",
-      install_requires=requirements,
       description="Simple persistent cache storage, with different cache 
eviction strategies, and optional compression",
       long_description=readme,
+      long_description_content_type="text/markdown",
       url="https://github.com/desbma/web_cache";,
       download_url="https://github.com/desbma/web_cache/archive/%s.tar.gz"; % 
(version),
       keywords=["cache", "sqlite3", "key-value", "persistent"],
@@ -46,7 +35,6 @@
                    "Programming Language :: Python",
                    "Programming Language :: Python :: 3",
                    "Programming Language :: Python :: 3 :: Only",
-                   "Programming Language :: Python :: 3.3",
                    "Programming Language :: Python :: 3.4",
                    "Programming Language :: Python :: 3.5",
                    "Programming Language :: Python :: 3.6",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/tests/__init__.py 
new/web_cache-1.1.0/tests/__init__.py
--- old/web_cache-1.0.2/tests/__init__.py       1970-01-01 01:00:00.000000000 
+0100
+++ new/web_cache-1.1.0/tests/__init__.py       2018-11-04 01:16:06.000000000 
+0100
@@ -0,0 +1,213 @@
+#!/usr/bin/env python3
+
+import collections
+import gc
+import logging
+import os
+import pickle
+import random
+import string
+import sys
+import tempfile
+import time
+import unittest
+
+import web_cache
+
+
+web_cache.DISABLE_PERSISTENT_CACHING = True
+
+INFINITY = sys.maxsize
+
+
+def get_random_string(length, chars=string.ascii_letters + string.digits):
+  return "".join(random.choice(chars) for _ in range(length))
+
+
+class TestWebCache(unittest.TestCase):
+
+  def test_getSetDelete(self):
+    """ Get/set/delete cache items using all cache parameter combinations. """
+    for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
+      for compression in web_cache.Compression:
+        for compression_level in range(1, 9):
+          for auto_compression_threshold in (0.05, 1):
+            for compressible_data in (True, False):
+              for caching_strategy in web_cache.CachingStrategy:
+                for expiration in (None, 0, INFINITY):
+                  for sql_crash_safe in (True, False):
+                    table_name = get_random_string(16, string.ascii_letters)
+                    with tempfile.TemporaryDirectory(suffix=".sqlite") as 
tmp_dir:
+                      # init cache
+                      cache_filepath = os.path.join(tmp_dir, "db.sqlite")
+                      cache = cache_class(cache_filepath,
+                                          table_name,
+                                          caching_strategy=caching_strategy,
+                                          expiration=expiration,
+                                          compression=compression,
+                                          compression_level=compression_level,
+                                          
auto_compression_threshold=auto_compression_threshold,
+                                          safe_mode=sql_crash_safe)
+                      already_used_keys = set()
+                      item_count = 0
+
+                      for req_type in ("get", "post"):
+                        for item_count in range(item_count + 1, item_count + 
4):
+                          while True:
+                            # generate cache key
+                            key = get_random_string(16)
+                            if req_type == "post":
+                              key = key, collections.OrderedDict(((k, v) for 
k, v in zip((get_random_string(8) for _ in range(4)),
+                                                                               
          (get_random_string(16) for _ in range(4)))))
+
+                            # ensure key is unique for this cache
+                            bin_key = pickle.dumps(key)
+                            if bin_key not in already_used_keys:
+                              already_used_keys.add(bin_key)
+                              break
+
+                          # generate cache data
+                          if compressible_data:
+                            data = b"a" * (2 ** 13)
+                          else:
+                            data = os.urandom(2 ** 13)
+
+                          # check cache size
+                          self.assertEqual(len(cache), item_count - 1)
+
+                          # check key is not in cache
+                          self.assertNotIn(key, cache)
+                          with self.assertRaises(KeyError):
+                            cache[key]
+                          with self.assertRaises(KeyError):
+                            del cache[key]
+
+                          # add data to cache
+                          cache[key] = data
+
+                          # check key is in cache
+                          self.assertIn(key, cache)
+                          self.assertEqual(cache[key], data)
+
+                          # check cache size
+                          self.assertEqual(len(cache), item_count)
+
+                          # delete cache item
+                          del cache[key]
+
+                          # check it is not in cache anymore
+                          self.assertNotIn(key, cache)
+                          with self.assertRaises(KeyError):
+                            cache[key]
+                          with self.assertRaises(KeyError):
+                            del cache[key]
+
+                          # check cache size
+                          self.assertEqual(len(cache), item_count - 1)
+
+                          # check other keys are still here
+                          for old_key in map(pickle.loads, already_used_keys):
+                            if old_key != key:
+                              self.assertIn(old_key, cache)
+
+                          # add cache item again
+                          cache[key] = data
+
+                # fix huge memory usage with pypy
+                gc.collect()
+
+  def test_getCacheHitStats(self):
+    """ Get cache stats using all cache parameter combinations. """
+    for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
+      for compression in web_cache.Compression:
+        for compression_level in range(1, 9):
+          for caching_strategy in web_cache.CachingStrategy:
+            for expiration in (None, 0, INFINITY):
+              for sql_crash_safe in (True, False):
+                table_name = get_random_string(16, string.ascii_letters)
+                with tempfile.TemporaryDirectory(suffix=".sqlite") as tmp_dir:
+                  # init cache
+                  cache_filepath = os.path.join(tmp_dir, "db.sqlite")
+                  cache = cache_class(cache_filepath,
+                                      table_name,
+                                      caching_strategy=caching_strategy,
+                                      expiration=expiration,
+                                      compression=compression,
+                                      compression_level=compression_level,
+                                      safe_mode=sql_crash_safe)
+
+                  i = 0
+                  for req_type in ("get", "post"):
+                    for i in range(i + 1, 5):
+                      # generate item
+                      key = "%s_%u" % (req_type, i)
+                      if req_type == "post":
+                        key = key, collections.OrderedDict(((k, v) for k, v in 
zip((get_random_string(4) for _ in range(2)),
+                                                                               
    (get_random_string(8) for _ in range(2)))))
+                      data = os.urandom(2 ** 13)
+
+                      # add item
+                      cache[key] = data
+
+                      # check cache hit stats
+                      self.assertEqual(cache.getCacheHitStats(), (i - 1, i - 
1))
+                      self.assertIn(key, cache)
+                      self.assertEqual(cache.getCacheHitStats(), (i, i - 1))
+                      self.assertNotIn("(o_o)", cache)
+                      self.assertEqual(cache.getCacheHitStats(), (i, i))
+
+                # fix huge memory usage with pypy
+                gc.collect()
+
+  def test_purge(self):
+    """ Purge obsolete cache entries. """
+    for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
+      for caching_strategy in web_cache.CachingStrategy:
+        for expiration in (None, 2, INFINITY):
+          table_name = get_random_string(16, string.ascii_letters)
+          with tempfile.TemporaryDirectory(suffix=".sqlite") as tmp_dir:
+            # init cache
+            cache_filepath = os.path.join(tmp_dir, "db.sqlite")
+            cache = cache_class(cache_filepath,
+                                table_name,
+                                caching_strategy=caching_strategy,
+                                expiration=expiration)
+
+            # add items
+            for req_type in ("get", "post"):
+              for i in range(5):
+                key = "%s_%u" % (req_type, i)
+                if req_type == "post":
+                  key = key, collections.OrderedDict(((k, v) for k, v in 
zip((get_random_string(4) for _ in range(2)),
+                                                                             
(get_random_string(8) for _ in range(2)))))
+                data = os.urandom(2 ** 13)
+                cache[key] = data
+
+            # purge
+            purged_count = cache.purge()
+            if expiration and (expiration != INFINITY):
+              # before expiration, nothing should have been purged
+              time.sleep(1)
+              self.assertEqual(purged_count, 0)
+              self.assertEqual(len(cache), 10)
+              # wait for expiration
+              time.sleep(expiration)
+              # after expiration, all should have been purged
+              purged_count = cache.purge()
+              self.assertEqual(purged_count, 10)
+              self.assertEqual(len(cache), 0)
+            else:
+              # nothing should have been purged
+              self.assertEqual(purged_count, 0)
+              self.assertEqual(len(cache), 10)
+
+          # fix huge memory usage with pypy
+          gc.collect()
+
+
+if __name__ == "__main__":
+  # disable logging
+  logging.basicConfig(level=logging.CRITICAL + 1)
+
+  # run tests
+  unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/update-version 
new/web_cache-1.1.0/update-version
--- old/web_cache-1.0.2/update-version  1970-01-01 01:00:00.000000000 +0100
+++ new/web_cache-1.1.0/update-version  2018-11-04 01:16:06.000000000 +0100
@@ -0,0 +1,16 @@
+#!/bin/bash -eu
+
+set -o pipefail
+
+readonly VERSION=${1:?}
+
+
+cd "$(git rev-parse --show-toplevel)"
+
+sed -i "s/^\(__version__ = \"\).*\(\"\)/\1$VERSION\2/w /dev/stdout" 
web_cache/__init__.py
+
+git add web_cache/__init__.py
+
+git commit -m "Version ${VERSION}"
+
+git tag -m "Version ${VERSION}" ${VERSION}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/web_cache/__init__.py 
new/web_cache-1.1.0/web_cache/__init__.py
--- old/web_cache-1.0.2/web_cache/__init__.py   2017-04-23 03:04:24.000000000 
+0200
+++ new/web_cache-1.1.0/web_cache/__init__.py   2018-11-04 01:16:06.000000000 
+0100
@@ -1,6 +1,6 @@
 """ Persistent cache storage for web ressources, with different cache eviction 
strategies, and optional compression. """
 
-__version__ = "1.0.2"
+__version__ = "1.1.0"
 __author__ = "desbma"
 __license__ = "LGPLv2"
 
@@ -18,17 +18,25 @@
 import zlib
 
 
+DB_FORMAT_VERSION = 2  # incremented at each incompatible database/pickle 
format change
+PICKLE_PROTOCOL_VERSION = 4
 DISABLE_PERSISTENT_CACHING = False  # useful for tests
 
 
-Compression = enum.Enum("Compression", ("NONE", "DEFLATE", "BZIP2", "LZMA"))
+class Compression(enum.IntEnum):
+  NONE = 0
+  DEFLATE = 1
+  BZIP2 = 2
+  LZMA = 3
+
+
 CachingStrategy = enum.Enum("CachingStrategy", ("FIFO", "LRU"))
 
 
 class WebCache:
 
   def __init__(self, db_filepath, table_name, *, caching_strategy, 
expiration=None, compression=Compression.NONE,
-               compression_level=9, safe_mode=False):
+               compression_level=9, auto_compression_threshold=1, 
safe_mode=False):
     """
     Args:
       db_filepath: Database filepath
@@ -38,6 +46,7 @@
         never expire
       compression: Algorithm used to compress cache items
       compression_level: Compression level (0-9)
+      auto_compression_threshold: Don't compress if compression ratio is above 
this value
       safe_mode: If False, will enable some optimizations that increase cache 
write speed, but may compromise cache
         integrity in case of Python crash or power loss
     """
@@ -49,6 +58,8 @@
     assert(compression in Compression)
     self.__compression = compression
     self.__compression_level = compression_level
+    assert(0 < auto_compression_threshold <= 1)
+    self.__auto_compression_threshold = auto_compression_threshold
 
     # connection
     if DISABLE_PERSISTENT_CACHING:
@@ -68,22 +79,28 @@
                                      url TEXT PRIMARY KEY,
                                      added_timestamp INTEGER NOT NULL,
                                      last_accessed_timestamp INTEGER NOT NULL,
+                                     compression INTEGER NOT NULL,
                                      data BLOB NOT NULL
-                                   );""" % (self.__table_name))
-      self.__connection.execute("""CREATE TABLE IF NOT EXISTS %s_post
+                                   );""" % (self.getDbTableName()))
+      self.__connection.execute("""CREATE TABLE IF NOT EXISTS %s
                                    (
                                      url TEXT NOT NULL,
                                      post_data BLOB NOT NULL,
                                      added_timestamp INTEGER NOT NULL,
                                      last_accessed_timestamp INTEGER NOT NULL,
+                                     compression INTEGER NOT NULL,
                                      data BLOB NOT NULL
-                                   );""" % (self.__table_name))
-      self.__connection.execute("CREATE INDEX IF NOT EXISTS idx ON 
%s_post(url, post_data);" % (self.__table_name))
+                                   );""" % (self.getDbTableName(post=True)))
+      self.__connection.execute("CREATE INDEX IF NOT EXISTS idx ON %s(url, 
post_data);" % (self.getDbTableName(post=True)))
 
     # stats
     self.__hit_count = 0
     self.__miss_count = 0
 
+  def getDbTableName(self, *, post=False):
+    """ Get sqlite table name. """
+    return "%s%s_f%u" % (self.__table_name, "_post" if post else "", 
DB_FORMAT_VERSION)
+
   def getDatabaseFileSize(self):
     """ Return the file size of the database as a pretty string. """
     if DISABLE_PERSISTENT_CACHING:
@@ -104,9 +121,10 @@
 
   def __len__(self):
     """ Return the number of items in the cache. """
+    row_count = 0
     with self.__connection:
-      row_count = self.__connection.execute("SELECT COUNT(*) FROM %s;" % 
(self.__table_name)).fetchall()[0][0]
-      row_count += self.__connection.execute("SELECT COUNT(*) FROM %s_post;" % 
(self.__table_name)).fetchall()[0][0]
+      for post in (False, True):
+        row_count += self.__connection.execute("SELECT COUNT(*) FROM %s;" % 
(self.getDbTableName(post=post))).fetchone()[0]
     return row_count
 
   def __del__(self):
@@ -125,28 +143,28 @@
 
     with self.__connection:
       if post_data is not None:
-        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, protocol=3))
-        data = self.__connection.execute("""SELECT data
-                                            FROM %s_post
-                                            WHERE url = ? AND
-                                                  post_data = ?;""" % 
(self.__table_name),
-                                         (url, post_bin_data)).fetchone()
+        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, 
protocol=PICKLE_PROTOCOL_VERSION))
+        r = self.__connection.execute("""SELECT data, compression
+                                         FROM %s
+                                         WHERE url = ? AND
+                                               post_data = ?;""" % 
(self.getDbTableName(post=True)),
+                                      (url, post_bin_data)).fetchone()
       else:
-        data = self.__connection.execute("""SELECT data
-                                            FROM %s
-                                            WHERE url = ?;""" % 
(self.__table_name),
-                                         (url,)).fetchone()
-    if not data:
+        r = self.__connection.execute("""SELECT data, compression
+                                         FROM %s
+                                         WHERE url = ?;""" % 
(self.getDbTableName()),
+                                      (url,)).fetchone()
+    if not r:
       raise KeyError(url_data)
-    data = data[0]
+    data, compression = r
 
-    if self.__compression is Compression.DEFLATE:
+    if compression == Compression.DEFLATE:
       buffer = memoryview(data)
       data = zlib.decompress(buffer)
-    elif self.__compression is Compression.BZIP2:
+    elif compression == Compression.BZIP2:
       buffer = memoryview(data)
       data = bz2.decompress(buffer)
-    elif self.__compression is Compression.LZMA:
+    elif compression == Compression.LZMA:
       buffer = memoryview(data)
       data = lzma.decompress(buffer)
 
@@ -155,13 +173,13 @@
       with self.__connection:
         if post_data is not None:
           self.__connection.execute("UPDATE " +
-                                    self.__table_name +
-                                    "_post SET last_accessed_timestamp = 
strftime('%s', 'now') WHERE url = ? AND post_data = ?;",
+                                    self.getDbTableName(post=True) + " " +
+                                    "SET last_accessed_timestamp = 
strftime('%s', 'now') WHERE url = ? AND post_data = ?;",
                                     (url, post_bin_data))
         else:
           self.__connection.execute("UPDATE " +
-                                    self.__table_name +
-                                    " SET last_accessed_timestamp = 
strftime('%s', 'now') WHERE url = ?;",
+                                    self.getDbTableName() + " " +
+                                    "SET last_accessed_timestamp = 
strftime('%s', 'now') WHERE url = ?;",
                                     (url,))
     return data
 
@@ -175,26 +193,43 @@
 
     if self.__compression is Compression.DEFLATE:
       buffer = memoryview(data)
-      data = zlib.compress(buffer, self.__compression_level)
+      compressed_data = zlib.compress(buffer, self.__compression_level)
     elif self.__compression is Compression.BZIP2:
       buffer = memoryview(data)
-      data = bz2.compress(buffer, compresslevel=self.__compression_level)
+      compressed_data = bz2.compress(buffer, 
compresslevel=self.__compression_level)
     elif self.__compression is Compression.LZMA:
       buffer = memoryview(data)
-      data = lzma.compress(buffer, format=lzma.FORMAT_ALONE, 
preset=self.__compression_level)
+      compressed_data = lzma.compress(buffer, format=lzma.FORMAT_ALONE, 
preset=self.__compression_level)
+
+    if (self.__compression is Compression.NONE) or (len(compressed_data) > 
len(data) * self.__auto_compression_threshold):
+      data_to_store = data
+      compression = Compression.NONE
+    else:
+      data_to_store = compressed_data
+      compression = self.__compression
+
+    # if self.__compression is not Compression.NONE:
+    #   print("%s compression: "
+    #         "original size = %u b, "
+    #         "compressed size = %u b, "
+    #         "compression threshold (%.1f%%) = %u b" % ("Disabling" if 
(compression is Compression.NONE) else "Enabling",
+    #                                                    len(data),
+    #                                                    len(compressed_data),
+    #                                                    
self.__auto_compression_threshold * 100,
+    #                                                    
self.__auto_compression_threshold * len(data)))
 
     with self.__connection:
       if post_data is not None:
-        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, protocol=3))
+        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, 
protocol=PICKLE_PROTOCOL_VERSION))
         self.__connection.execute("INSERT OR REPLACE INTO " +
-                                  self.__table_name +
-                                  "_post (url, post_data, added_timestamp, 
last_accessed_timestamp,data) VALUES (?, ?, strftime('%s','now'), 
strftime('%s','now'), ?);",
-                                  (url, post_bin_data, sqlite3.Binary(data)))
+                                  self.getDbTableName(post=True) +
+                                  " (url, post_data, added_timestamp, 
last_accessed_timestamp, compression, data) VALUES (?, ?, strftime('%s','now'), 
strftime('%s','now'), ?, ?);",
+                                  (url, post_bin_data, compression, 
sqlite3.Binary(data_to_store)))
       else:
         self.__connection.execute("INSERT OR REPLACE INTO " +
-                                  self.__table_name +
-                                  " (url, added_timestamp, 
last_accessed_timestamp,data) VALUES (?, strftime('%s','now'), 
strftime('%s','now'), ?);",
-                                  (url, sqlite3.Binary(data)))
+                                  self.getDbTableName() +
+                                  " (url, added_timestamp, 
last_accessed_timestamp, compression, data) VALUES (?, strftime('%s','now'), 
strftime('%s','now'), ?, ?);",
+                                  (url, compression, 
sqlite3.Binary(data_to_store)))
 
   def __delitem__(self, url_data):
     """ Remove an item from cache. """
@@ -206,12 +241,12 @@
 
     with self.__connection:
       if post_data is not None:
-        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, protocol=3))
-        deleted_count = self.__connection.execute("DELETE FROM " + 
self.__table_name + "_post " +
+        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, 
protocol=PICKLE_PROTOCOL_VERSION))
+        deleted_count = self.__connection.execute("DELETE FROM " + 
self.getDbTableName(post=True) + " " +
                                                   "WHERE url = ? AND post_data 
= ?;",
                                                   (url, 
post_bin_data)).rowcount
       else:
-        deleted_count = self.__connection.execute("DELETE FROM " + 
self.__table_name + " WHERE url = ?;",
+        deleted_count = self.__connection.execute("DELETE FROM " + 
self.getDbTableName() + " WHERE url = ?;",
                                                   (url,)).rowcount
     if deleted_count == 0:
       raise KeyError(url_data)
@@ -223,20 +258,18 @@
       with self.__connection:
         if self.__caching_strategy is CachingStrategy.FIFO:
           # dump least recently added rows
-          for table_suffix in ("", "_post"):
+          for post in (False, True):
             purged_count += self.__connection.execute("DELETE FROM " +
-                                                      self.__table_name +
-                                                      "%s " % (table_suffix) +
+                                                      
self.getDbTableName(post=post) + " "
                                                       "WHERE (strftime('%s', 
'now') - added_timestamp) > ?;",
                                                       
(self.__expiration,)).rowcount
         elif self.__caching_strategy is CachingStrategy.LRU:
           # dump least recently accessed rows
-          for table_suffix in ("", "_post"):
+          for post in (False, True):
             purged_count += self.__connection.execute("DELETE FROM " +
-                                                       self.__table_name +
-                                                       "%s " % (table_suffix) +
-                                                       "WHERE (strftime('%s', 
'now') - last_accessed_timestamp) > ?;",
-                                                       
(self.__expiration,)).rowcount
+                                                      
self.getDbTableName(post=post) + " "
+                                                      "WHERE (strftime('%s', 
'now') - last_accessed_timestamp) > ?;",
+                                                      
(self.__expiration,)).rowcount
     return purged_count
 
   def __contains__(self, url_data):
@@ -249,17 +282,17 @@
 
     with self.__connection:
       if post_data is not None:
-        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, protocol=3))
+        post_bin_data = sqlite3.Binary(pickle.dumps(post_data, 
protocol=PICKLE_PROTOCOL_VERSION))
         hit = (self.__connection.execute("""SELECT COUNT(*)
-                                            FROM %s_post
+                                            FROM %s
                                             WHERE url = ? AND
-                                                  post_data = ?;""" % 
(self.__table_name),
-                                         (url, 
post_bin_data)).fetchall()[0][0] > 0)
+                                                  post_data = ?;""" % 
(self.getDbTableName(post=True)),
+                                         (url, post_bin_data)).fetchone()[0] > 
0)
       else:
         hit = (self.__connection.execute("""SELECT COUNT(*)
                                             FROM %s
-                                            WHERE url = ?;""" % 
(self.__table_name),
-                                         (url,)).fetchall()[0][0] > 0)
+                                            WHERE url = ?;""" % 
(self.getDbTableName()),
+                                         (url,)).fetchone()[0] > 0)
     if hit:
       self.__hit_count += 1
     else:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/web_cache.egg-info/PKG-INFO 
new/web_cache-1.1.0/web_cache.egg-info/PKG-INFO
--- old/web_cache-1.0.2/web_cache.egg-info/PKG-INFO     2017-04-23 
03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/web_cache.egg-info/PKG-INFO     1970-01-01 
01:00:00.000000000 +0100
@@ -1,72 +0,0 @@
-Metadata-Version: 1.1
-Name: web-cache
-Version: 1.0.2
-Summary: Simple persistent cache storage, with different cache eviction 
strategies, and optional compression
-Home-page: https://github.com/desbma/web_cache
-Author: desbma
-Author-email: UNKNOWN
-License: UNKNOWN
-Download-URL: https://github.com/desbma/web_cache/archive/1.0.2.tar.gz
-Description: Web cache
-        =========
-        
-        |Latest version| |Tests status| |Coverage| |Supported Python versions|
-        |License|
-        
-        Python module for simple key-value storage backed up by sqlite3
-        database. The typical use case is a URL to HTTP data cache, but it can
-        also be used fo non web ressources.
-        
-        Features
-        --------
-        
-        -  Simple ``dict`` interface allows natural usage (``if key in cache``,
-           ``value = cache[key]``, etc.)
-        -  Optional Zlib, BZIP2 or LZMA compression
-        -  FIFO or LRU cache eviction strategies
-        -  Optional thread safe interface to work around Python Sqlite3 'same
-           thread' limitation
-        -  Provides cache hit rate statistics
-        
-        Installation (from PyPI, with PIP)
-        ----------------------------------
-        
-        web\_cache requires `Python <https://www.python.org/downloads/>`__ >=
-        3.3.
-        
-        1. If you don't already have it, `install
-           pip <http://www.pip-installer.org/en/latest/installing.html>`__ for
-           Python 3 (not needed if you are using Python >= 3.4)
-        2. Install web\_cache: ``pip3 install web_cache``
-        
-        License
-        -------
-        
-        `LGPLv2.1 <https://www.gnu.org/licenses/old-licenses/lgpl-2.1.html>`__
-        
-        .. |Latest version| image:: 
https://img.shields.io/pypi/v/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        .. |Tests status| image:: 
https://img.shields.io/travis/desbma/web_cache/master.svg?label=tests&style=flat
-           :target: https://travis-ci.org/desbma/web_cache
-        .. |Coverage| image:: 
https://img.shields.io/coveralls/desbma/web_cache/master.svg?style=flat
-           :target: https://coveralls.io/github/desbma/web_cache?branch=master
-        .. |Supported Python versions| image:: 
https://img.shields.io/pypi/pyversions/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        .. |License| image:: 
https://img.shields.io/github/license/desbma/web_cache.svg?style=flat
-           :target: https://pypi.python.org/pypi/web_cache/
-        
-Keywords: cache,sqlite3,key-value,persistent
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or 
later (LGPLv2+)
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Topic :: Database
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/web_cache.egg-info/SOURCES.txt 
new/web_cache-1.1.0/web_cache.egg-info/SOURCES.txt
--- old/web_cache-1.0.2/web_cache.egg-info/SOURCES.txt  2017-04-23 
03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/web_cache.egg-info/SOURCES.txt  1970-01-01 
01:00:00.000000000 +0100
@@ -1,10 +0,0 @@
-LICENSE
-MANIFEST.in
-README.md
-setup.py
-web_cache/__init__.py
-web_cache.egg-info/PKG-INFO
-web_cache.egg-info/SOURCES.txt
-web_cache.egg-info/dependency_links.txt
-web_cache.egg-info/requires.txt
-web_cache.egg-info/top_level.txt
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/web_cache-1.0.2/web_cache.egg-info/dependency_links.txt 
new/web_cache-1.1.0/web_cache.egg-info/dependency_links.txt
--- old/web_cache-1.0.2/web_cache.egg-info/dependency_links.txt 2017-04-23 
03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/web_cache.egg-info/dependency_links.txt 1970-01-01 
01:00:00.000000000 +0100
@@ -1 +0,0 @@
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/web_cache.egg-info/requires.txt 
new/web_cache-1.1.0/web_cache.egg-info/requires.txt
--- old/web_cache-1.0.2/web_cache.egg-info/requires.txt 2017-04-23 
03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/web_cache.egg-info/requires.txt 1970-01-01 
01:00:00.000000000 +0100
@@ -1 +0,0 @@
-enum34
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/web_cache-1.0.2/web_cache.egg-info/top_level.txt 
new/web_cache-1.1.0/web_cache.egg-info/top_level.txt
--- old/web_cache-1.0.2/web_cache.egg-info/top_level.txt        2017-04-23 
03:06:29.000000000 +0200
+++ new/web_cache-1.1.0/web_cache.egg-info/top_level.txt        1970-01-01 
01:00:00.000000000 +0100
@@ -1 +0,0 @@
-web_cache


Reply via email to