Hello community,

here is the log from the commit of package python-CacheControl for 
openSUSE:Factory checked in at 2019-03-01 16:48:07
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-CacheControl (Old)
 and      /work/SRC/openSUSE:Factory/.python-CacheControl.new.28833 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-CacheControl"

Fri Mar  1 16:48:07 2019 rev:3 rq:680057 version:0.12.5

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-CacheControl/python-CacheControl.changes  
2018-12-13 19:40:51.313280026 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-CacheControl.new.28833/python-CacheControl.changes
       2019-03-01 16:48:11.777781579 +0100
@@ -1,0 +2,7 @@
+Thu Feb 28 09:10:36 UTC 2019 - Tomáš Chvátal <[email protected]>
+
+- Update to 0.12.5:
+  * various test fixes
+  * few crasher fixes
+
+-------------------------------------------------------------------

Old:
----
  CacheControl-0.12.4.tar.gz

New:
----
  CacheControl-0.12.5.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-CacheControl.spec ++++++
--- /var/tmp/diff_new_pack.YIO2FP/_old  2019-03-01 16:48:12.381781351 +0100
+++ /var/tmp/diff_new_pack.YIO2FP/_new  2019-03-01 16:48:12.385781349 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-CacheControl
 #
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,7 +18,7 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-CacheControl
-Version:        0.12.4
+Version:        0.12.5
 Release:        0
 Summary:        httplib2 caching for requests
 License:        Apache-2.0

++++++ CacheControl-0.12.4.tar.gz -> CacheControl-0.12.5.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/CacheControl.egg-info/PKG-INFO 
new/CacheControl-0.12.5/CacheControl.egg-info/PKG-INFO
--- old/CacheControl-0.12.4/CacheControl.egg-info/PKG-INFO      2018-01-27 
07:24:39.000000000 +0100
+++ new/CacheControl-0.12.5/CacheControl.egg-info/PKG-INFO      2018-06-07 
16:53:12.000000000 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 1.1
+Metadata-Version: 1.2
 Name: CacheControl
-Version: 0.12.4
+Version: 0.12.5
 Summary: httplib2 caching for requests
 Home-page: https://github.com/ionrock/cachecontrol
 Author: Eric Larson
@@ -56,10 +56,11 @@
 Classifier: Environment :: Web Environment
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/CacheControl-0.12.4/CacheControl.egg-info/requires.txt 
new/CacheControl-0.12.5/CacheControl.egg-info/requires.txt
--- old/CacheControl-0.12.4/CacheControl.egg-info/requires.txt  2018-01-27 
07:24:39.000000000 +0100
+++ new/CacheControl-0.12.5/CacheControl.egg-info/requires.txt  2018-06-07 
16:53:12.000000000 +0200
@@ -1,5 +1,5 @@
 requests
-msgpack-python
+msgpack
 
 [filecache]
 lockfile>=0.9
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/PKG-INFO 
new/CacheControl-0.12.5/PKG-INFO
--- old/CacheControl-0.12.4/PKG-INFO    2018-01-27 07:24:39.000000000 +0100
+++ new/CacheControl-0.12.5/PKG-INFO    2018-06-07 16:53:12.000000000 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 1.1
+Metadata-Version: 1.2
 Name: CacheControl
-Version: 0.12.4
+Version: 0.12.5
 Summary: httplib2 caching for requests
 Home-page: https://github.com/ionrock/cachecontrol
 Author: Eric Larson
@@ -56,10 +56,11 @@
 Classifier: Environment :: Web Environment
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/__init__.py 
new/CacheControl-0.12.5/cachecontrol/__init__.py
--- old/CacheControl-0.12.4/cachecontrol/__init__.py    2018-01-27 
01:28:19.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/__init__.py    2018-06-07 
16:53:04.000000000 +0200
@@ -2,9 +2,9 @@
 
 Make it easy to import from cachecontrol without long namespaces.
 """
-__author__ = 'Eric Larson'
-__email__ = '[email protected]'
-__version__ = '0.12.4'
+__author__ = "Eric Larson"
+__email__ = "[email protected]"
+__version__ = "0.12.5"
 
 from .wrapper import CacheControl
 from .adapter import CacheControlAdapter
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/_cmd.py 
new/CacheControl-0.12.5/cachecontrol/_cmd.py
--- old/CacheControl-0.12.4/cachecontrol/_cmd.py        2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/_cmd.py        2018-06-07 
16:52:42.000000000 +0200
@@ -17,14 +17,11 @@
 
 def get_session():
     adapter = CacheControlAdapter(
-        DictCache(),
-        cache_etags=True,
-        serializer=None,
-        heuristic=None,
+        DictCache(), cache_etags=True, serializer=None, heuristic=None
     )
     sess = requests.Session()
-    sess.mount('http://', adapter)
-    sess.mount('https://', adapter)
+    sess.mount("http://";, adapter)
+    sess.mount("https://";, adapter)
 
     sess.cache_controller = adapter.controller
     return sess
@@ -32,7 +29,7 @@
 
 def get_args():
     parser = ArgumentParser()
-    parser.add_argument('url', help='The URL to try and cache')
+    parser.add_argument("url", help="The URL to try and cache")
     return parser.parse_args()
 
 
@@ -51,10 +48,10 @@
 
     # Now try to get it
     if sess.cache_controller.cached_request(resp.request):
-        print('Cached!')
+        print("Cached!")
     else:
-        print('Not cached :(')
+        print("Not cached :(")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/adapter.py 
new/CacheControl-0.12.5/cachecontrol/adapter.py
--- old/CacheControl-0.12.4/cachecontrol/adapter.py     2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/adapter.py     2018-06-07 
16:52:42.000000000 +0200
@@ -10,25 +10,27 @@
 
 
 class CacheControlAdapter(HTTPAdapter):
-    invalidating_methods = set(['PUT', 'DELETE'])
+    invalidating_methods = {"PUT", "DELETE"}
 
-    def __init__(self, cache=None,
-                 cache_etags=True,
-                 controller_class=None,
-                 serializer=None,
-                 heuristic=None,
-                 cacheable_methods=None,
-                 *args, **kw):
+    def __init__(
+        self,
+        cache=None,
+        cache_etags=True,
+        controller_class=None,
+        serializer=None,
+        heuristic=None,
+        cacheable_methods=None,
+        *args,
+        **kw
+    ):
         super(CacheControlAdapter, self).__init__(*args, **kw)
         self.cache = cache or DictCache()
         self.heuristic = heuristic
-        self.cacheable_methods = cacheable_methods or ('GET',)
+        self.cacheable_methods = cacheable_methods or ("GET",)
 
         controller_factory = controller_class or CacheController
         self.controller = controller_factory(
-            self.cache,
-            cache_etags=cache_etags,
-            serializer=serializer,
+            self.cache, cache_etags=cache_etags, serializer=serializer
         )
 
     def send(self, request, cacheable_methods=None, **kw):
@@ -43,20 +45,18 @@
             except zlib.error:
                 cached_response = None
             if cached_response:
-                return self.build_response(request, cached_response,
-                                           from_cache=True)
+                return self.build_response(request, cached_response, 
from_cache=True)
 
             # check for etags and add headers if appropriate
-            request.headers.update(
-                self.controller.conditional_headers(request)
-            )
+            
request.headers.update(self.controller.conditional_headers(request))
 
         resp = super(CacheControlAdapter, self).send(request, **kw)
 
         return resp
 
-    def build_response(self, request, response, from_cache=False,
-                       cacheable_methods=None):
+    def build_response(
+        self, request, response, from_cache=False, cacheable_methods=None
+    ):
         """
         Build a response by making a request or using the cache.
 
@@ -101,10 +101,8 @@
                 response._fp = CallbackFileWrapper(
                     response._fp,
                     functools.partial(
-                        self.controller.cache_response,
-                        request,
-                        response,
-                    )
+                        self.controller.cache_response, request, response
+                    ),
                 )
                 if response.chunked:
                     super_update_chunk_length = response._update_chunk_length
@@ -113,11 +111,12 @@
                         super_update_chunk_length()
                         if self.chunk_left == 0:
                             self._fp._close()
-                    response._update_chunk_length = 
types.MethodType(_update_chunk_length, response)
 
-        resp = super(CacheControlAdapter, self).build_response(
-            request, response
-        )
+                    response._update_chunk_length = types.MethodType(
+                        _update_chunk_length, response
+                    )
+
+        resp = super(CacheControlAdapter, self).build_response(request, 
response)
 
         # See if we should invalidate the cache.
         if request.method in self.invalidating_methods and resp.ok:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/cache.py 
new/CacheControl-0.12.5/cachecontrol/cache.py
--- old/CacheControl-0.12.4/cachecontrol/cache.py       2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/cache.py       2018-06-07 
16:32:16.000000000 +0200
@@ -8,13 +8,13 @@
 class BaseCache(object):
 
     def get(self, key):
-        raise NotImplemented()
+        raise NotImplementedError()
 
     def set(self, key, value):
-        raise NotImplemented()
+        raise NotImplementedError()
 
     def delete(self, key):
-        raise NotImplemented()
+        raise NotImplementedError()
 
     def close(self):
         pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/CacheControl-0.12.4/cachecontrol/caches/file_cache.py 
new/CacheControl-0.12.5/cachecontrol/caches/file_cache.py
--- old/CacheControl-0.12.4/cachecontrol/caches/file_cache.py   2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/caches/file_cache.py   2018-06-07 
16:52:42.000000000 +0200
@@ -9,7 +9,7 @@
     FileNotFoundError
 except NameError:
     # py2.X
-    FileNotFoundError = OSError
+    FileNotFoundError = (IOError, OSError)
 
 
 def _secure_open_write(filename, fmode):
@@ -46,6 +46,7 @@
     fd = os.open(filename, flags, fmode)
     try:
         return os.fdopen(fd, "wb")
+
     except:
         # An error occurred wrapping our FD in a file object
         os.close(fd)
@@ -53,8 +54,16 @@
 
 
 class FileCache(BaseCache):
-    def __init__(self, directory, forever=False, filemode=0o0600,
-                 dirmode=0o0700, use_dir_lock=None, lock_class=None):
+
+    def __init__(
+        self,
+        directory,
+        forever=False,
+        filemode=0o0600,
+        dirmode=0o0700,
+        use_dir_lock=None,
+        lock_class=None,
+    ):
 
         if use_dir_lock is not None and lock_class is not None:
             raise ValueError("Cannot use use_dir_lock and lock_class together")
@@ -63,12 +72,15 @@
             from lockfile import LockFile
             from lockfile.mkdirlockfile import MkdirLockFile
         except ImportError:
-            notice = dedent("""
+            notice = dedent(
+                """
             NOTE: In order to use the FileCache you must have
             lockfile installed. You can install it via pip:
               pip install lockfile
-            """)
+            """
+            )
             raise ImportError(notice)
+
         else:
             if use_dir_lock:
                 lock_class = MkdirLockFile
@@ -95,11 +107,12 @@
 
     def get(self, key):
         name = self._fn(key)
-        if not os.path.exists(name):
-            return None
+        try:
+            with open(name, "rb") as fh:
+                return fh.read()
 
-        with open(name, 'rb') as fh:
-            return fh.read()
+        except FileNotFoundError:
+            return None
 
     def set(self, key, value):
         name = self._fn(key)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/CacheControl-0.12.4/cachecontrol/caches/redis_cache.py 
new/CacheControl-0.12.5/cachecontrol/caches/redis_cache.py
--- old/CacheControl-0.12.4/cachecontrol/caches/redis_cache.py  2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/caches/redis_cache.py  2018-06-07 
16:33:44.000000000 +0200
@@ -4,16 +4,6 @@
 from cachecontrol.cache import BaseCache
 
 
-def total_seconds(td):
-    """Python 2.6 compatability"""
-    if hasattr(td, 'total_seconds'):
-        return int(td.total_seconds())
-
-    ms = td.microseconds
-    secs = (td.seconds + td.days * 24 * 3600)
-    return int((ms + secs * 10**6) / 10**6)
-
-
 class RedisCache(BaseCache):
 
     def __init__(self, conn):
@@ -27,7 +17,7 @@
             self.conn.set(key, value)
         else:
             expires = expires - datetime.utcnow()
-            self.conn.setex(key, total_seconds(expires), value)
+            self.conn.setex(key, int(expires.total_seconds()), value)
 
     def delete(self, key):
         self.conn.delete(key)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/controller.py 
new/CacheControl-0.12.5/cachecontrol/controller.py
--- old/CacheControl-0.12.4/cachecontrol/controller.py  2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/controller.py  2018-06-07 
16:52:42.000000000 +0200
@@ -30,8 +30,10 @@
 class CacheController(object):
     """An interface to see if request should cached or not.
     """
-    def __init__(self, cache=None, cache_etags=True, serializer=None,
-                 status_codes=None):
+
+    def __init__(
+        self, cache=None, cache_etags=True, serializer=None, status_codes=None
+    ):
         self.cache = cache or DictCache()
         self.cache_etags = cache_etags
         self.serializer = serializer or Serializer()
@@ -64,34 +66,35 @@
     def parse_cache_control(self, headers):
         known_directives = {
             # https://tools.ietf.org/html/rfc7234#section-5.2
-            'max-age': (int, True,),
-            'max-stale': (int, False,),
-            'min-fresh': (int, True,),
-            'no-cache': (None, False,),
-            'no-store': (None, False,),
-            'no-transform': (None, False,),
-            'only-if-cached' : (None, False,),
-            'must-revalidate': (None, False,),
-            'public': (None, False,),
-            'private': (None, False,),
-            'proxy-revalidate': (None, False,),
-            's-maxage': (int, True,)
+            "max-age": (int, True),
+            "max-stale": (int, False),
+            "min-fresh": (int, True),
+            "no-cache": (None, False),
+            "no-store": (None, False),
+            "no-transform": (None, False),
+            "only-if-cached": (None, False),
+            "must-revalidate": (None, False),
+            "public": (None, False),
+            "private": (None, False),
+            "proxy-revalidate": (None, False),
+            "s-maxage": (int, True),
         }
 
-        cc_headers = headers.get('cache-control',
-                                 headers.get('Cache-Control', ''))
+        cc_headers = headers.get("cache-control", headers.get("Cache-Control", 
""))
 
         retval = {}
 
-        for cc_directive in cc_headers.split(','):
-            parts = cc_directive.split('=', 1)
+        for cc_directive in cc_headers.split(","):
+            if not cc_directive.strip():
+                continue
+
+            parts = cc_directive.split("=", 1)
             directive = parts[0].strip()
 
             try:
                 typ, required = known_directives[directive]
             except KeyError:
-                logger.debug('Ignoring unknown cache-control directive: %s',
-                             directive)
+                logger.debug("Ignoring unknown cache-control directive: %s", 
directive)
                 continue
 
             if not typ or not required:
@@ -101,11 +104,16 @@
                     retval[directive] = typ(parts[1].strip())
                 except IndexError:
                     if required:
-                        logger.debug('Missing value for cache-control '
-                                     'directive: %s', directive)
+                        logger.debug(
+                            "Missing value for cache-control " "directive: %s",
+                            directive,
+                        )
                 except ValueError:
-                    logger.debug('Invalid value for cache-control directive '
-                                 '%s, must be %s', directive, typ.__name__)
+                    logger.debug(
+                        "Invalid value for cache-control directive " "%s, must 
be %s",
+                        directive,
+                        typ.__name__,
+                    )
 
         return retval
 
@@ -119,24 +127,24 @@
         cc = self.parse_cache_control(request.headers)
 
         # Bail out if the request insists on fresh data
-        if 'no-cache' in cc:
+        if "no-cache" in cc:
             logger.debug('Request header has "no-cache", cache bypassed')
             return False
 
-        if 'max-age' in cc and cc['max-age'] == 0:
+        if "max-age" in cc and cc["max-age"] == 0:
             logger.debug('Request header has "max_age" as 0, cache bypassed')
             return False
 
         # Request allows serving from the cache, let's see if we find something
         cache_data = self.cache.get(cache_url)
         if cache_data is None:
-            logger.debug('No cache entry available')
+            logger.debug("No cache entry available")
             return False
 
         # Check whether it can be deserialized
         resp = self.serializer.loads(request, cache_data)
         if not resp:
-            logger.warning('Cache entry deserialization failed, entry ignored')
+            logger.warning("Cache entry deserialization failed, entry ignored")
             return False
 
         # If we have a cached 301, return it immediately. We don't
@@ -148,27 +156,27 @@
         # Client can try to refresh the value by repeating the request
         # with cache busting headers as usual (ie no-cache).
         if resp.status == 301:
-            msg = ('Returning cached "301 Moved Permanently" response '
-                   '(ignoring date and etag information)')
+            msg = (
+                'Returning cached "301 Moved Permanently" response '
+                "(ignoring date and etag information)"
+            )
             logger.debug(msg)
             return resp
 
         headers = CaseInsensitiveDict(resp.headers)
-        if not headers or 'date' not in headers:
-            if 'etag' not in headers:
+        if not headers or "date" not in headers:
+            if "etag" not in headers:
                 # Without date or etag, the cached response can never be used
                 # and should be deleted.
-                logger.debug('Purging cached response: no date or etag')
+                logger.debug("Purging cached response: no date or etag")
                 self.cache.delete(cache_url)
-            logger.debug('Ignoring cached response: no date')
+            logger.debug("Ignoring cached response: no date")
             return False
 
         now = time.time()
-        date = calendar.timegm(
-            parsedate_tz(headers['date'])
-        )
+        date = calendar.timegm(parsedate_tz(headers["date"]))
         current_age = max(0, now - date)
-        logger.debug('Current age based on date: %i', current_age)
+        logger.debug("Current age based on date: %i", current_age)
 
         # TODO: There is an assumption that the result will be a
         #       urllib3 response object. This may not be best since we
@@ -180,45 +188,41 @@
         freshness_lifetime = 0
 
         # Check the max-age pragma in the cache control header
-        if 'max-age' in resp_cc:
-            freshness_lifetime = resp_cc['max-age']
-            logger.debug('Freshness lifetime from max-age: %i',
-                         freshness_lifetime)
+        if "max-age" in resp_cc:
+            freshness_lifetime = resp_cc["max-age"]
+            logger.debug("Freshness lifetime from max-age: %i", 
freshness_lifetime)
 
         # If there isn't a max-age, check for an expires header
-        elif 'expires' in headers:
-            expires = parsedate_tz(headers['expires'])
+        elif "expires" in headers:
+            expires = parsedate_tz(headers["expires"])
             if expires is not None:
                 expire_time = calendar.timegm(expires) - date
                 freshness_lifetime = max(0, expire_time)
-                logger.debug("Freshness lifetime from expires: %i",
-                             freshness_lifetime)
+                logger.debug("Freshness lifetime from expires: %i", 
freshness_lifetime)
 
         # Determine if we are setting freshness limit in the
         # request. Note, this overrides what was in the response.
-        if 'max-age' in cc:
-            freshness_lifetime = cc['max-age']
-            logger.debug('Freshness lifetime from request max-age: %i',
-                         freshness_lifetime)
+        if "max-age" in cc:
+            freshness_lifetime = cc["max-age"]
+            logger.debug(
+                "Freshness lifetime from request max-age: %i", 
freshness_lifetime
+            )
 
-        if 'min-fresh' in cc:
-            min_fresh = cc['min-fresh']
+        if "min-fresh" in cc:
+            min_fresh = cc["min-fresh"]
             # adjust our current age by our min fresh
             current_age += min_fresh
-            logger.debug('Adjusted current age from min-fresh: %i',
-                         current_age)
+            logger.debug("Adjusted current age from min-fresh: %i", 
current_age)
 
         # Return entry if it is fresh enough
         if freshness_lifetime > current_age:
             logger.debug('The response is "fresh", returning cached response')
-            logger.debug('%i > %i', freshness_lifetime, current_age)
+            logger.debug("%i > %i", freshness_lifetime, current_age)
             return resp
 
         # we're not fresh. If we don't have an Etag, clear it out
-        if 'etag' not in headers:
-            logger.debug(
-                'The cached response is "stale" with no etag, purging'
-            )
+        if "etag" not in headers:
+            logger.debug('The cached response is "stale" with no etag, 
purging')
             self.cache.delete(cache_url)
 
         # return the original handler
@@ -232,16 +236,15 @@
         if resp:
             headers = CaseInsensitiveDict(resp.headers)
 
-            if 'etag' in headers:
-                new_headers['If-None-Match'] = headers['ETag']
+            if "etag" in headers:
+                new_headers["If-None-Match"] = headers["ETag"]
 
-            if 'last-modified' in headers:
-                new_headers['If-Modified-Since'] = headers['Last-Modified']
+            if "last-modified" in headers:
+                new_headers["If-Modified-Since"] = headers["Last-Modified"]
 
         return new_headers
 
-    def cache_response(self, request, response, body=None,
-                       status_codes=None):
+    def cache_response(self, request, response, body=None, status_codes=None):
         """
         Algorithm for caching requests.
 
@@ -252,9 +255,7 @@
         cacheable_status_codes = status_codes or self.cacheable_status_codes
         if response.status not in cacheable_status_codes:
             logger.debug(
-                'Status code %s not in %s',
-                response.status,
-                cacheable_status_codes
+                "Status code %s not in %s", response.status, 
cacheable_status_codes
             )
             return
 
@@ -264,10 +265,12 @@
         # Content-Length is valid then we can check to see if the body we've
         # been given matches the expected size, and if it doesn't we'll just
         # skip trying to cache it.
-        if (body is not None and
-                "content-length" in response_headers and
-                response_headers["content-length"].isdigit() and
-                int(response_headers["content-length"]) != len(body)):
+        if (
+            body is not None
+            and "content-length" in response_headers
+            and response_headers["content-length"].isdigit()
+            and int(response_headers["content-length"]) != len(body)
+        ):
             return
 
         cc_req = self.parse_cache_control(request.headers)
@@ -278,53 +281,49 @@
 
         # Delete it from the cache if we happen to have it stored there
         no_store = False
-        if 'no-store' in cc:
+        if "no-store" in cc:
             no_store = True
             logger.debug('Response header has "no-store"')
-        if 'no-store' in cc_req:
+        if "no-store" in cc_req:
             no_store = True
             logger.debug('Request header has "no-store"')
         if no_store and self.cache.get(cache_url):
             logger.debug('Purging existing cache entry to honor "no-store"')
             self.cache.delete(cache_url)
+        if no_store:
+            return
 
         # If we've been given an etag, then keep the response
-        if self.cache_etags and 'etag' in response_headers:
-            logger.debug('Caching due to etag')
+        if self.cache_etags and "etag" in response_headers:
+            logger.debug("Caching due to etag")
             self.cache.set(
-                cache_url,
-                self.serializer.dumps(request, response, body=body),
+                cache_url, self.serializer.dumps(request, response, body=body)
             )
 
         # Add to the cache any 301s. We do this before looking that
         # the Date headers.
         elif response.status == 301:
-            logger.debug('Caching permanant redirect')
-            self.cache.set(
-                cache_url,
-                self.serializer.dumps(request, response)
-            )
+            logger.debug("Caching permanant redirect")
+            self.cache.set(cache_url, self.serializer.dumps(request, response))
 
         # Add to the cache if the response headers demand it. If there
         # is no date header then we can't do anything about expiring
         # the cache.
-        elif 'date' in response_headers:
+        elif "date" in response_headers:
             # cache when there is a max-age > 0
-            if 'max-age' in cc and cc['max-age'] > 0:
-                logger.debug('Caching b/c date exists and max-age > 0')
+            if "max-age" in cc and cc["max-age"] > 0:
+                logger.debug("Caching b/c date exists and max-age > 0")
                 self.cache.set(
-                    cache_url,
-                    self.serializer.dumps(request, response, body=body),
+                    cache_url, self.serializer.dumps(request, response, 
body=body)
                 )
 
             # If the request can expire, it means we should cache it
             # in the meantime.
-            elif 'expires' in response_headers:
-                if response_headers['expires']:
-                    logger.debug('Caching b/c of expires header')
+            elif "expires" in response_headers:
+                if response_headers["expires"]:
+                    logger.debug("Caching b/c of expires header")
                     self.cache.set(
-                        cache_url,
-                        self.serializer.dumps(request, response, body=body),
+                        cache_url, self.serializer.dumps(request, response, 
body=body)
                     )
 
     def update_cached_response(self, request, response):
@@ -336,10 +335,7 @@
         """
         cache_url = self.cache_url(request.url)
 
-        cached_response = self.serializer.loads(
-            request,
-            self.cache.get(cache_url)
-        )
+        cached_response = self.serializer.loads(request, 
self.cache.get(cache_url))
 
         if not cached_response:
             # we didn't have a cached response
@@ -352,22 +348,20 @@
         # the cached body invalid. But... just in case, we'll be sure
         # to strip out ones we know that might be problmatic due to
         # typical assumptions.
-        excluded_headers = [
-            "content-length",
-        ]
+        excluded_headers = ["content-length"]
 
         cached_response.headers.update(
-            dict((k, v) for k, v in response.headers.items()
-                 if k.lower() not in excluded_headers)
+            dict(
+                (k, v)
+                for k, v in response.headers.items()
+                if k.lower() not in excluded_headers
+            )
         )
 
         # we want a 200 b/c we have content via the cache
         cached_response.status = 200
 
         # update our cache
-        self.cache.set(
-            cache_url,
-            self.serializer.dumps(request, cached_response),
-        )
+        self.cache.set(cache_url, self.serializer.dumps(request, 
cached_response))
 
         return cached_response
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/filewrapper.py 
new/CacheControl-0.12.5/cachecontrol/filewrapper.py
--- old/CacheControl-0.12.4/cachecontrol/filewrapper.py 2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/filewrapper.py 2018-06-07 
16:52:42.000000000 +0200
@@ -27,17 +27,19 @@
         # self.__fp hasn't been set.
         #
         # [0] 
https://docs.python.org/2/reference/expressions.html#atom-identifiers
-        fp = self.__getattribute__('_CallbackFileWrapper__fp')
+        fp = self.__getattribute__("_CallbackFileWrapper__fp")
         return getattr(fp, name)
 
     def __is_fp_closed(self):
         try:
             return self.__fp.fp is None
+
         except AttributeError:
             pass
 
         try:
             return self.__fp.closed
+
         except AttributeError:
             pass
 
@@ -66,7 +68,7 @@
 
     def _safe_read(self, amt):
         data = self.__fp._safe_read(amt)
-        if amt == 2 and data == b'\r\n':
+        if amt == 2 and data == b"\r\n":
             # urllib executes this read to toss the CRLF at the end
             # of the chunk.
             return data
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/heuristics.py 
new/CacheControl-0.12.5/cachecontrol/heuristics.py
--- old/CacheControl-0.12.4/cachecontrol/heuristics.py  2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/heuristics.py  2018-06-07 
16:52:42.000000000 +0200
@@ -46,7 +46,7 @@
             response.headers.update(updated_headers)
             warning_header_value = self.warning(response)
             if warning_header_value is not None:
-                response.headers.update({'Warning': warning_header_value})
+                response.headers.update({"Warning": warning_header_value})
 
         return response
 
@@ -56,15 +56,15 @@
     Cache the response by providing an expires 1 day in the
     future.
     """
+
     def update_headers(self, response):
         headers = {}
 
-        if 'expires' not in response.headers:
-            date = parsedate(response.headers['date'])
-            expires = expire_after(timedelta(days=1),
-                                   date=datetime(*date[:6]))
-            headers['expires'] = datetime_to_header(expires)
-            headers['cache-control'] = 'public'
+        if "expires" not in response.headers:
+            date = parsedate(response.headers["date"])
+            expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
+            headers["expires"] = datetime_to_header(expires)
+            headers["cache-control"] = "public"
         return headers
 
 
@@ -78,13 +78,10 @@
 
     def update_headers(self, response):
         expires = expire_after(self.delta)
-        return {
-            'expires': datetime_to_header(expires),
-            'cache-control': 'public',
-        }
+        return {"expires": datetime_to_header(expires), "cache-control": 
"public"}
 
     def warning(self, response):
-        tmpl = '110 - Automatically cached for %s. Response might be stale'
+        tmpl = "110 - Automatically cached for %s. Response might be stale"
         return tmpl % self.delta
 
 
@@ -100,27 +97,27 @@
     
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
     Unlike mozilla we limit this to 24-hr.
     """
-    cacheable_by_default_statuses = set([
+    cacheable_by_default_statuses = {
         200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
-    ])
+    }
 
     def update_headers(self, resp):
         headers = resp.headers
 
-        if 'expires' in headers:
+        if "expires" in headers:
             return {}
 
-        if 'cache-control' in headers and headers['cache-control'] != 'public':
+        if "cache-control" in headers and headers["cache-control"] != "public":
             return {}
 
         if resp.status not in self.cacheable_by_default_statuses:
             return {}
 
-        if 'date' not in headers or 'last-modified' not in headers:
+        if "date" not in headers or "last-modified" not in headers:
             return {}
 
-        date = calendar.timegm(parsedate_tz(headers['date']))
-        last_modified = parsedate(headers['last-modified'])
+        date = calendar.timegm(parsedate_tz(headers["date"]))
+        last_modified = parsedate(headers["last-modified"])
         if date is None or last_modified is None:
             return {}
 
@@ -132,7 +129,7 @@
             return {}
 
         expires = date + freshness_lifetime
-        return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
+        return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
 
     def warning(self, resp):
         return None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/serialize.py 
new/CacheControl-0.12.5/cachecontrol/serialize.py
--- old/CacheControl-0.12.4/cachecontrol/serialize.py   2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/serialize.py   2018-06-07 
16:52:42.000000000 +0200
@@ -48,23 +48,22 @@
             u"response": {
                 u"body": body,
                 u"headers": dict(
-                    (text_type(k), text_type(v))
-                    for k, v in response.headers.items()
+                    (text_type(k), text_type(v)) for k, v in 
response.headers.items()
                 ),
                 u"status": response.status,
                 u"version": response.version,
                 u"reason": text_type(response.reason),
                 u"strict": response.strict,
                 u"decode_content": response.decode_content,
-            },
+            }
         }
 
         # Construct our vary headers
         data[u"vary"] = {}
         if u"vary" in response_headers:
-            varied_headers = response_headers[u'vary'].split(',')
+            varied_headers = response_headers[u"vary"].split(",")
             for header in varied_headers:
-                header = header.strip()
+                header = text_type(header).strip()
                 header_value = request.headers.get(header, None)
                 if header_value is not None:
                     header_value = text_type(header_value)
@@ -95,7 +94,8 @@
 
         # Dispatch to the actual load method for the given version
         try:
-            return getattr(self, "_loads_v{0}".format(ver))(request, data)
+            return getattr(self, "_loads_v{}".format(ver))(request, data)
+
         except AttributeError:
             # This is a version we don't have a loads function for, so we'll
             # just treat it as a miss and return None
@@ -118,11 +118,11 @@
 
         body_raw = cached["response"].pop("body")
 
-        headers = CaseInsensitiveDict(data=cached['response']['headers'])
-        if headers.get('transfer-encoding', '') == 'chunked':
-            headers.pop('transfer-encoding')
+        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
+        if headers.get("transfer-encoding", "") == "chunked":
+            headers.pop("transfer-encoding")
 
-        cached['response']['headers'] = headers
+        cached["response"]["headers"] = headers
 
         try:
             body = io.BytesIO(body_raw)
@@ -133,13 +133,9 @@
             # fail with:
             #
             #     TypeError: 'str' does not support the buffer interface
-            body = io.BytesIO(body_raw.encode('utf8'))
+            body = io.BytesIO(body_raw.encode("utf8"))
 
-        return HTTPResponse(
-            body=body,
-            preload_content=False,
-            **cached["response"]
-        )
+        return HTTPResponse(body=body, preload_content=False, 
**cached["response"])
 
     def _loads_v0(self, request, data):
         # The original legacy cache data. This doesn't contain enough
@@ -162,16 +158,12 @@
             return
 
         # We need to decode the items that we've base64 encoded
-        cached["response"]["body"] = _b64_decode_bytes(
-            cached["response"]["body"]
-        )
+        cached["response"]["body"] = 
_b64_decode_bytes(cached["response"]["body"])
         cached["response"]["headers"] = dict(
             (_b64_decode_str(k), _b64_decode_str(v))
             for k, v in cached["response"]["headers"].items()
         )
-        cached["response"]["reason"] = _b64_decode_str(
-            cached["response"]["reason"],
-        )
+        cached["response"]["reason"] = 
_b64_decode_str(cached["response"]["reason"])
         cached["vary"] = dict(
             (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
             for k, v in cached["vary"].items()
@@ -187,7 +179,7 @@
 
     def _loads_v4(self, request, data):
         try:
-            cached = msgpack.loads(data, encoding='utf-8')
+            cached = msgpack.loads(data, encoding="utf-8")
         except ValueError:
             return
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/cachecontrol/wrapper.py 
new/CacheControl-0.12.5/cachecontrol/wrapper.py
--- old/CacheControl-0.12.4/cachecontrol/wrapper.py     2018-01-27 
01:03:49.000000000 +0100
+++ new/CacheControl-0.12.5/cachecontrol/wrapper.py     2018-06-07 
16:52:42.000000000 +0200
@@ -2,14 +2,16 @@
 from .cache import DictCache
 
 
-def CacheControl(sess,
-                 cache=None,
-                 cache_etags=True,
-                 serializer=None,
-                 heuristic=None,
-                 controller_class=None,
-                 adapter_class=None,
-                 cacheable_methods=None):
+def CacheControl(
+    sess,
+    cache=None,
+    cache_etags=True,
+    serializer=None,
+    heuristic=None,
+    controller_class=None,
+    adapter_class=None,
+    cacheable_methods=None,
+):
 
     cache = cache or DictCache()
     adapter_class = adapter_class or CacheControlAdapter
@@ -19,9 +21,9 @@
         serializer=serializer,
         heuristic=heuristic,
         controller_class=controller_class,
-        cacheable_methods=cacheable_methods
+        cacheable_methods=cacheable_methods,
     )
-    sess.mount('http://', adapter)
-    sess.mount('https://', adapter)
+    sess.mount("http://";, adapter)
+    sess.mount("https://";, adapter)
 
     return sess
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CacheControl-0.12.4/setup.py 
new/CacheControl-0.12.5/setup.py
--- old/CacheControl-0.12.4/setup.py    2018-01-27 01:28:19.000000000 +0100
+++ new/CacheControl-0.12.5/setup.py    2018-06-07 16:53:04.000000000 +0200
@@ -1,50 +1,41 @@
 import setuptools
 
-long_description = open('README.rst').read()
+long_description = open("README.rst").read()
 
-VERSION = '0.12.4'
+VERSION = "0.12.5"
 
 setup_params = dict(
-    name='CacheControl',
+    name="CacheControl",
     version=VERSION,
-    author='Eric Larson',
-    author_email='[email protected]',
-    url='https://github.com/ionrock/cachecontrol',
-    keywords='requests http caching web',
+    author="Eric Larson",
+    author_email="[email protected]",
+    url="https://github.com/ionrock/cachecontrol";,
+    keywords="requests http caching web",
     packages=setuptools.find_packages(),
-    package_data={'': ['LICENSE.txt']},
-    package_dir={'cachecontrol': 'cachecontrol'},
+    package_data={"": ["LICENSE.txt"]},
+    package_dir={"cachecontrol": "cachecontrol"},
     include_package_data=True,
-    description='httplib2 caching for requests',
+    description="httplib2 caching for requests",
     long_description=long_description,
-    install_requires=[
-        'requests',
-        'msgpack-python',
-    ],
-    extras_require={
-        'filecache': ['lockfile>=0.9'],
-        'redis': ['redis>=2.10.5'],
-    },
-    entry_points={
-        'console_scripts': [
-            'doesitcache = cachecontrol._cmd:main',
-        ]
-    },
+    install_requires=["requests", "msgpack"],
+    extras_require={"filecache": ["lockfile>=0.9"], "redis": 
["redis>=2.10.5"]},
+    entry_points={"console_scripts": ["doesitcache = cachecontrol._cmd:main"]},
+    python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
     classifiers=[
-        'Development Status :: 4 - Beta',
-        'Environment :: Web Environment',
-        'License :: OSI Approved :: Apache Software License',
-        'Operating System :: OS Independent',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.2',
-        'Programming Language :: Python :: 3.3',
-        'Programming Language :: Python :: 3.4',
-        'Topic :: Internet :: WWW/HTTP',
+        "Development Status :: 4 - Beta",
+        "Environment :: Web Environment",
+        "License :: OSI Approved :: Apache Software License",
+        "Operating System :: OS Independent",
+        "Programming Language :: Python :: 2",
+        "Programming Language :: Python :: 2.7",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.4",
+        "Programming Language :: Python :: 3.5",
+        "Programming Language :: Python :: 3.6",
+        "Topic :: Internet :: WWW/HTTP",
     ],
 )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     setuptools.setup(**setup_params)


Reply via email to