Hello community,

here is the log from the commit of package python-requests-cache for 
openSUSE:Factory checked in at 2019-05-07 23:20:09
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-requests-cache (Old)
 and      /work/SRC/openSUSE:Factory/.python-requests-cache.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-requests-cache"

Tue May  7 23:20:09 2019 rev:3 rq:701310 version:0.5.0

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-requests-cache/python-requests-cache.changes  
    2018-12-24 11:43:11.297356566 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-requests-cache.new.5148/python-requests-cache.changes
    2019-05-07 23:20:11.069082128 +0200
@@ -1,0 +2,11 @@
+Tue May  7 12:27:11 UTC 2019 - [email protected]
+
+- version update to 0.5.0
+  * Add gridfs support, thanks to @chengguangnan
+  * Add dynamodb support, thanks to @ar90n
+  * Add response filter #104, thanks to @christopher-dG
+  * Fix bulk_commit #78
+  * Fix remove_expired_responses missed in __init__.py #93
+  * Fix deprecation warnings #122, thanks to mbarkhau
+
+-------------------------------------------------------------------

Old:
----
  requests-cache-0.4.13.tar.gz

New:
----
  requests-cache-0.5.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-requests-cache.spec ++++++
--- /var/tmp/diff_new_pack.0GyJRj/_old  2019-05-07 23:20:11.769083726 +0200
+++ /var/tmp/diff_new_pack.0GyJRj/_new  2019-05-07 23:20:11.769083726 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-requests-cache
 #
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,7 +18,7 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-requests-cache
-Version:        0.4.13
+Version:        0.5.0
 Release:        0
 Summary:        Persistent cache for requests library
 License:        BSD-2-Clause

++++++ requests-cache-0.4.13.tar.gz -> requests-cache-0.5.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/HISTORY.rst 
new/requests-cache-0.5.0/HISTORY.rst
--- old/requests-cache-0.4.13/HISTORY.rst       2016-12-23 10:35:51.000000000 
+0100
+++ new/requests-cache-0.5.0/HISTORY.rst        2019-04-18 20:04:09.000000000 
+0200
@@ -3,6 +3,18 @@
 History
 -------
 
+0.5.0 (2019-04-18)
+++++++++++++++++++
+Project is now added to https://www.codeshelter.co/
+
+* Add gridfs support, thanks to @chengguangnan 
+* Add dynamodb support, thanks to @ar90n
+* Add response filter #104, thanks to @christopher-dG
+* Fix bulk_commit #78
+* Fix remove_expired_responses missed in __init__.py #93
+* Fix deprecation warnings #122, thanks to mbarkhau 
+
+
 0.4.13 (2016-12-23)
 +++++++++++++++++++
 * Support PyMongo3, thanks to @craigls #72
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/PKG-INFO 
new/requests-cache-0.5.0/PKG-INFO
--- old/requests-cache-0.4.13/PKG-INFO  2016-12-23 10:36:39.000000000 +0100
+++ new/requests-cache-0.5.0/PKG-INFO   2019-04-18 20:06:34.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: requests-cache
-Version: 0.4.13
+Version: 0.5.0
 Summary: Persistent cache for requests library
 Home-page: https://github.com/reclosedev/requests-cache
 Author: Roman Haritonov
@@ -20,6 +20,7 @@
             :target: 
https://coveralls.io/github/reclosedev/requests-cache?branch=master
         
         
+        
         Usage example
         -------------
         
@@ -70,6 +71,18 @@
         History
         -------
         
+        0.5.0 (2019-04-18)
+        ++++++++++++++++++
+        Project is now added to https://www.codeshelter.co/
+        
+        * Add gridfs support, thanks to @chengguangnan 
+        * Add dynamodb support, thanks to @ar90n
+        * Add response filter #104, thanks to @christopher-dG
+        * Fix bulk_commit #78
+        * Fix remove_expired_responses missed in __init__.py #93
+        * Fix deprecation warnings #122, thanks to mbarkhau 
+        
+        
         0.4.13 (2016-12-23)
         +++++++++++++++++++
         * Support PyMongo3, thanks to @craigls #72
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/README.rst 
new/requests-cache-0.5.0/README.rst
--- old/requests-cache-0.4.13/README.rst        2016-12-23 10:28:59.000000000 
+0100
+++ new/requests-cache-0.5.0/README.rst 2019-04-13 11:28:41.000000000 +0200
@@ -12,6 +12,7 @@
     :target: 
https://coveralls.io/github/reclosedev/requests-cache?branch=master
 
 
+
 Usage example
 -------------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/docs/user_guide.rst 
new/requests-cache-0.5.0/docs/user_guide.rst
--- old/requests-cache-0.4.13/docs/user_guide.rst       2016-12-23 
10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/docs/user_guide.rst        2019-04-13 
10:14:31.000000000 +0200
@@ -152,7 +152,7 @@
     expire_after = timedelta(hours=1)
     requests_cache.install_cache(expire_after=expire_after)
     ...
-    requests_cache.remove_expired_responses()
+    requests_cache.core.remove_expired_responses()
     # or
     remove_old_entries.get_cache().remove_old_entries(datetime.utcnow() - 
expire_after)
     # when used as session
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/requests_cache/__init__.py 
new/requests-cache-0.5.0/requests_cache/__init__.py
--- old/requests-cache-0.4.13/requests_cache/__init__.py        2016-12-23 
10:35:51.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/__init__.py 2019-04-18 
20:04:55.000000000 +0200
@@ -23,9 +23,9 @@
     :license: BSD, see LICENSE for more details.
 """
 __docformat__ = 'restructuredtext'
-__version__ = '0.4.13'
+__version__ = '0.5.0'
 
 from .core import(
     CachedSession, install_cache, uninstall_cache,
-    disabled, enabled, get_cache, clear, configure
+    disabled, enabled, get_cache, clear, configure, remove_expired_responses
 )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/__init__.py 
new/requests-cache-0.5.0/requests_cache/backends/__init__.py
--- old/requests-cache-0.4.13/requests_cache/backends/__init__.py       
2016-12-23 10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/backends/__init__.py        
2019-04-13 10:14:31.000000000 +0200
@@ -17,7 +17,8 @@
 _backend_dependencies = {
     'sqlite': 'sqlite3',
     'mongo': 'pymongo',
-    'redis': 'redis'
+    'redis': 'redis',
+    'dynamodb': 'dynamodb'
 }
 
 try:
@@ -33,12 +34,24 @@
 except ImportError:
     MongoCache = None
 
+
+try:
+    from .gridfs import GridFSCache
+    registry['gridfs'] = GridFSCache
+except ImportError:
+    GridFSCache = None
+
 try:
     from .redis import RedisCache
     registry['redis'] = RedisCache
 except ImportError:
     RedisCache = None
 
+try:
+    from .dynamodb import DynamoDbCache
+    registry['dynamodb'] = DynamoDbCache
+except ImportError:
+    DynamoDbCache = None
 
 def create_backend(backend_name, cache_name, options):
     if isinstance(backend_name, BaseCache):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/dynamodb.py 
new/requests-cache-0.5.0/requests_cache/backends/dynamodb.py
--- old/requests-cache-0.4.13/requests_cache/backends/dynamodb.py       
1970-01-01 01:00:00.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/backends/dynamodb.py        
2019-04-13 10:14:31.000000000 +0200
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+    requests_cache.backends.dynamodb
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    ``dynamodb`` cache backend
+"""
+from .base import BaseCache
+from .storage.dynamodbdict import DynamoDbDict
+
+
+class DynamoDbCache(BaseCache):
+    """ ``dynamodb`` cache backend.
+    """
+    def __init__(self, table_name='requests-cache', **options):
+        """
+        :param namespace: dynamodb table name (default: ``'requests-cache'``)
+        :param connection: (optional) ``boto3.resource('dynamodb')``
+        """
+        super(DynamoDbCache, self).__init__(**options)
+        self.responses = DynamoDbDict(table_name, 'responses',
+                                      options.get('connection'),
+                                      options.get('endpont_url'),
+                                      options.get('region_name'),
+                                      options.get('read_capacity_units'),
+                                      options.get('write_capacity_units'))
+        self.keys_map = DynamoDbDict(table_name,
+                                     'urls',
+                                     self.responses.connection)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/gridfs.py 
new/requests-cache-0.5.0/requests_cache/backends/gridfs.py
--- old/requests-cache-0.4.13/requests_cache/backends/gridfs.py 1970-01-01 
01:00:00.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/backends/gridfs.py  2019-04-13 
10:14:31.000000000 +0200
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+    requests_cache.backends.gridfs
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    ``gridfs`` cache backend
+    
+    Use MongoDB GridFS to support documents greater than 16MB.
+    
+    Usage:
+        requests_cache.install_cache(backend='gridfs')
+    
+    Or:
+        from pymongo import MongoClient
+        requests_cache.install_cache(backend='gridfs', 
connection=MongoClient('another-host.local'))
+"""
+from .base import BaseCache
+from .storage.mongodict import MongoDict
+from .storage.gridfspickledict import GridFSPickleDict
+
+
+class GridFSCache(BaseCache):
+    """ ``gridfs`` cache backend.
+    """
+    def __init__(self, db_name, **options):
+        """
+        :param db_name: database name
+        :param connection: (optional) ``pymongo.Connection``
+        """
+        super(GridFSCache, self).__init__(**options)
+        self.responses = GridFSPickleDict(db_name, options.get('connection'))
+        self.keys_map = MongoDict(db_name, 'http_redirects', 
self.responses.connection)
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/storage/dbdict.py 
new/requests-cache-0.5.0/requests_cache/backends/storage/dbdict.py
--- old/requests-cache-0.4.13/requests_cache/backends/storage/dbdict.py 
2016-12-23 10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/backends/storage/dbdict.py  
2019-04-13 11:15:37.000000000 +0200
@@ -6,7 +6,11 @@
 
     Dictionary-like objects for saving large data sets to `sqlite` database
 """
-from collections import MutableMapping
+try:
+    from collections.abc import MutableMapping
+except ImportError:
+    from collections import MutableMapping
+
 import sqlite3 as sqlite
 from contextlib import contextmanager
 try:
@@ -48,7 +52,7 @@
         self.table_name = table_name
         self.fast_save = fast_save
         
-        #: Transactions can be commited if this property is set to `True`
+        #: Transactions can be committed if this property is set to `True`
         self.can_commit = True
 
         
@@ -108,8 +112,9 @@
         finally:
             self._bulk_commit = False
             self.can_commit = True
-            self._pending_connection.close()
-            self._pending_connection = None
+            if self._pending_connection is not None:
+                self._pending_connection.close()
+                self._pending_connection = None
 
     def __getitem__(self, key):
         with self.connection() as con:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/storage/dynamodbdict.py 
new/requests-cache-0.5.0/requests_cache/backends/storage/dynamodbdict.py
--- old/requests-cache-0.4.13/requests_cache/backends/storage/dynamodbdict.py   
1970-01-01 01:00:00.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/backends/storage/dynamodbdict.py    
2019-04-13 10:14:31.000000000 +0200
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+    requests_cache.backends.dynamodbdict
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Dictionary-like objects for saving large data sets to ``dynamodb`` 
key-store
+"""
+from collections import MutableMapping
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+import boto3
+from boto3.dynamodb.conditions import Key, Attr
+from botocore.exceptions import ClientError
+
+
+class DynamoDbDict(MutableMapping):
+    """ DynamoDbDict - a dictionary-like interface for ``dynamodb`` key-stores
+    """
+
+    def __init__(self, table_name, namespace='dynamodb_dict_data',
+                 connection=None,
+                 endpoint_url=None,
+                 region_name='us-east-1',
+                 read_capacity_units=1,
+                 write_capacity_units=1):
+
+        """
+        The actual key name on the dynamodb server will be
+        ``namespace``:``namespace_name``
+
+        In order to deal with how dynamodb stores data/keys,
+        everything, i.e. keys and data, must be pickled.
+
+        :param table_name: table name to use
+        :param namespace_name: name of the hash map stored in dynamodb
+                                (default: dynamodb_dict_data)
+        :param connection: ``boto3.resource('dynamodb')`` instance.
+                           If it's ``None`` (default), a new connection with
+                           default options will be created
+        :param endpoint_url: url of dynamodb server.
+
+        """
+        self._self_key = namespace
+        if connection is not None:
+            self.connection = connection
+        else:
+            self.connection = boto3.resource('dynamodb',
+                                             endpoint_url=endpoint_url,
+                                             region_name=region_name)
+        try:
+            self.connection.create_table(
+                AttributeDefinitions=[
+                    {
+                        'AttributeName': 'namespace',
+                        'AttributeType': 'S',
+                    },
+                    {
+                        'AttributeName': 'key',
+                        'AttributeType': 'S',
+                    }
+                ],
+                TableName=table_name,
+                KeySchema=[
+                    {
+                        'AttributeName': 'namespace',
+                        'KeyType': 'HASH'
+                    },
+                    {
+                        'AttributeName': 'key',
+                        'KeyType': 'RANGE'
+                    }
+                ],
+                ProvisionedThroughput={
+                    'ReadCapacityUnits': read_capacity_units,
+                    'WriteCapacityUnits': write_capacity_units
+                }
+            )
+        except ClientError:
+            pass
+        self._table = self.connection.Table(table_name)
+        self._table.wait_until_exists()
+
+    def __getitem__(self, key):
+        composite_key = {'namespace': self._self_key, 'key': str(key)}
+        result = self._table.get_item(Key=composite_key)
+        if not 'Item' in result:
+            raise KeyError
+        return pickle.loads(result['Item']['value'].value)
+
+    def __setitem__(self, key, item):
+        item = {'namespace': self._self_key,
+                'key': str(key),
+                'value': pickle.dumps(item)}
+        self._table.put_item(Item=item)
+
+    def __delitem__(self, key):
+        composite_key = {'namespace': self._self_key, 'key': str(key)}
+        response = self._table.delete_item(Key=composite_key,
+                                           ReturnValues='ALL_OLD')
+        if not 'Attributes' in response:
+            raise KeyError
+
+    def __len__(self):
+        return self.__count_table()
+
+    def __iter__(self):
+        response = self.__scan_table()
+        for v in response['Items']:
+            yield pickle.loads(v['value'].value)
+
+    def clear(self):
+        response = self.__scan_table()
+        for v in response['Items']:
+            composite_key = {'namespace': v['namespace'], 'key': v['key']}
+            self._table.delete_item(Key=composite_key)
+
+    def __str__(self):
+        return str(dict(self.items()))
+
+    def __scan_table(self):
+        expression_attribute_values = {':Namespace': self._self_key}
+        expression_attribute_names = {'#N': 'namespace'}
+        key_condition_expression = '#N = :Namespace'
+        return 
self._table.query(ExpressionAttributeValues=expression_attribute_values,
+                                 
ExpressionAttributeNames=expression_attribute_names,
+                                 
KeyConditionExpression=key_condition_expression)
+    def __count_table(self):
+        expression_attribute_values = {':Namespace': self._self_key}
+        expression_attribute_names = {'#N': 'namespace'}
+        key_condition_expression = '#N = :Namespace'
+        return self._table.query(Select='COUNT',
+                                 
ExpressionAttributeValues=expression_attribute_values,
+                                 
ExpressionAttributeNames=expression_attribute_names,
+                                 
KeyConditionExpression=key_condition_expression)['Count']
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache/backends/storage/gridfspickledict.py 
new/requests-cache-0.5.0/requests_cache/backends/storage/gridfspickledict.py
--- 
old/requests-cache-0.4.13/requests_cache/backends/storage/gridfspickledict.py   
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/requests-cache-0.5.0/requests_cache/backends/storage/gridfspickledict.py    
    2019-04-13 10:14:31.000000000 +0200
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+    requests_cache.backends.mongodict
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Dictionary-like objects for saving large data sets to ``mongodb`` database
+"""
+
+from collections import MutableMapping
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+# Use PyMongo 3 if present
+try:
+    from pymongo import MongoClient
+except ImportError:
+    from pymongo import Connection as MongoClient
+
+from gridfs import GridFS
+
+class GridFSPickleDict(MutableMapping):
+    """ MongoDict - a dictionary-like interface for ``mongo`` database
+    """
+    def __init__(self, db_name, connection=None):
+        """
+        :param db_name: database name (be careful with production databases)
+        :param connection: ``pymongo.Connection`` instance. If it's ``None``
+                           (default) new connection with default options will
+                           be created
+        """
+        if connection is not None:
+            self.connection = connection
+        else:
+            self.connection = MongoClient()
+
+        self.db = self.connection[db_name]
+        self.fs = GridFS(self.db)
+
+    def __getitem__(self, key):
+        result = self.fs.find_one({'_id': key})
+        if result is None:
+            raise KeyError
+        return pickle.loads(bytes(result.read()))
+
+    def __setitem__(self, key, item):
+        self.__delitem__(key)
+        self.fs.put(pickle.dumps(item), **{'_id': key})
+
+    def __delitem__(self, key):
+        res = self.fs.find_one({'_id': key})
+        if res is not None:
+            self.fs.delete(res._id)
+
+    def __len__(self):
+        return self.db['fs.files'].count()
+
+    def __iter__(self):
+        for d in self.fs.find():
+            yield d._id
+
+    def clear(self):
+        self.db['fs.files'].drop()
+        self.db['fs.chunks'].drop()
+
+    def __str__(self):
+        return str(dict(self.items()))
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/requests_cache/core.py 
new/requests-cache-0.5.0/requests_cache/core.py
--- old/requests-cache-0.4.13/requests_cache/core.py    2016-12-23 
10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache/core.py     2019-04-13 
11:15:37.000000000 +0200
@@ -34,13 +34,14 @@
 
     def __init__(self, cache_name='cache', backend=None, expire_after=None,
                  allowable_codes=(200,), allowable_methods=('GET',),
-                 old_data_on_error=False, **backend_options):
+                 filter_fn=lambda r: True, old_data_on_error=False,
+                 **backend_options):
         """
         :param cache_name: for ``sqlite`` backend: cache file will start with 
this prefix,
                            e.g ``cache.sqlite``
 
                            for ``mongodb``: it's used as database name
-                           
+
                            for ``redis``: it's used as the namespace. This 
means all keys
                            are prefixed with ``'cache_name:'``
         :param backend: cache backend name e.g ``'sqlite'``, ``'mongodb'``, 
``'redis'``, ``'memory'``.
@@ -54,8 +55,12 @@
         :type allowable_codes: tuple
         :param allowable_methods: cache only requests of this methods 
(default: 'GET')
         :type allowable_methods: tuple
+        :param filter_fn: function to apply to each response; the response is 
only cached if
+                          this returns `True`. Note that this function does 
not not modify
+                          the cached response in any way.
+        :type filter_fn: function
         :kwarg backend_options: options for chosen backend. See corresponding
-                                :ref:`sqlite <backends_sqlite>`, :ref:`mongo 
<backends_mongo>` 
+                                :ref:`sqlite <backends_sqlite>`, :ref:`mongo 
<backends_mongo>`
                                 and :ref:`redis <backends_redis>` backends API 
documentation
         :param include_get_headers: If `True` headers will be part of cache 
key.
                                     E.g. after get('some_link', 
headers={'Accept':'application/json'})
@@ -74,6 +79,7 @@
 
         self._cache_allowable_codes = allowable_codes
         self._cache_allowable_methods = allowable_methods
+        self._filter_fn = filter_fn
         self._return_old_data_on_error = old_data_on_error
         self._is_cache_disabled = False
         super(CachedSession, self).__init__()
@@ -129,6 +135,14 @@
             return response
 
         main_key = self.cache.create_key(response.request)
+
+        # If self._return_old_data_on_error is set,
+        # responses won't always have the from_cache attribute.
+        if (hasattr(response, "from_cache") and not response.from_cache
+            and self._filter_fn(response) is not True):
+            self.cache.delete(main_key)
+            return response
+
         for r in response.history:
             self.cache.add_key_mapping(
                 self.cache.create_key(r.request), main_key
@@ -170,7 +184,8 @@
 
 def install_cache(cache_name='cache', backend=None, expire_after=None,
                   allowable_codes=(200,), allowable_methods=('GET',),
-                  session_factory=CachedSession, **backend_options):
+                  filter_fn=lambda r: True, session_factory=CachedSession,
+                  **backend_options):
     """
     Installs cache for all ``Requests`` requests by monkey-patching ``Session``
 
@@ -189,6 +204,7 @@
                 expire_after=expire_after,
                 allowable_codes=allowable_codes,
                 allowable_methods=allowable_methods,
+                filter_fn=filter_fn,
                 **backend_options
             )
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache.egg-info/PKG-INFO 
new/requests-cache-0.5.0/requests_cache.egg-info/PKG-INFO
--- old/requests-cache-0.4.13/requests_cache.egg-info/PKG-INFO  2016-12-23 
10:36:39.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache.egg-info/PKG-INFO   2019-04-18 
20:06:34.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: requests-cache
-Version: 0.4.13
+Version: 0.5.0
 Summary: Persistent cache for requests library
 Home-page: https://github.com/reclosedev/requests-cache
 Author: Roman Haritonov
@@ -20,6 +20,7 @@
             :target: 
https://coveralls.io/github/reclosedev/requests-cache?branch=master
         
         
+        
         Usage example
         -------------
         
@@ -70,6 +71,18 @@
         History
         -------
         
+        0.5.0 (2019-04-18)
+        ++++++++++++++++++
+        Project is now added to https://www.codeshelter.co/
+        
+        * Add gridfs support, thanks to @chengguangnan 
+        * Add dynamodb support, thanks to @ar90n
+        * Add response filter #104, thanks to @christopher-dG
+        * Fix bulk_commit #78
+        * Fix remove_expired_responses missed in __init__.py #93
+        * Fix deprecation warnings #122, thanks to mbarkhau 
+        
+        
         0.4.13 (2016-12-23)
         +++++++++++++++++++
         * Support PyMongo3, thanks to @craigls #72
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache.egg-info/SOURCES.txt 
new/requests-cache-0.5.0/requests_cache.egg-info/SOURCES.txt
--- old/requests-cache-0.4.13/requests_cache.egg-info/SOURCES.txt       
2016-12-23 10:36:39.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache.egg-info/SOURCES.txt        
2019-04-18 20:06:34.000000000 +0200
@@ -25,17 +25,23 @@
 requests_cache.egg-info/top_level.txt
 requests_cache/backends/__init__.py
 requests_cache/backends/base.py
+requests_cache/backends/dynamodb.py
+requests_cache/backends/gridfs.py
 requests_cache/backends/mongo.py
 requests_cache/backends/redis.py
 requests_cache/backends/sqlite.py
 requests_cache/backends/storage/__init__.py
 requests_cache/backends/storage/dbdict.py
+requests_cache/backends/storage/dynamodbdict.py
+requests_cache/backends/storage/gridfspickledict.py
 requests_cache/backends/storage/mongodict.py
 requests_cache/backends/storage/redisdict.py
 tests/__init__.py
 tests/test_cache.py
 tests/test_custom_dict.py
 tests/test_dbdict.py
+tests/test_dynamodbdict.py
+tests/test_gridfsdict.py
 tests/test_mongodict.py
 tests/test_monkey_patch.py
 tests/test_redisdict.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/requests-cache-0.4.13/requests_cache.egg-info/requires.txt 
new/requests-cache-0.5.0/requests_cache.egg-info/requires.txt
--- old/requests-cache-0.4.13/requests_cache.egg-info/requires.txt      
2016-12-23 10:36:39.000000000 +0100
+++ new/requests-cache-0.5.0/requests_cache.egg-info/requires.txt       
2019-04-18 20:06:34.000000000 +0200
@@ -1 +1 @@
-requests>=1.1.0
\ No newline at end of file
+requests>=1.1.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/setup.cfg 
new/requests-cache-0.5.0/setup.cfg
--- old/requests-cache-0.4.13/setup.cfg 2016-12-23 10:36:39.000000000 +0100
+++ new/requests-cache-0.5.0/setup.cfg  2019-04-18 20:06:34.000000000 +0200
@@ -4,5 +4,4 @@
 [egg_info]
 tag_build = 
 tag_date = 0
-tag_svn_revision = 0
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/setup.py 
new/requests-cache-0.5.0/setup.py
--- old/requests-cache-0.4.13/setup.py  2016-12-23 10:35:51.000000000 +0100
+++ new/requests-cache-0.5.0/setup.py   2019-04-18 20:04:34.000000000 +0200
@@ -20,7 +20,7 @@
     packages=['requests_cache',
               'requests_cache.backends',
               'requests_cache.backends.storage'],
-    version='0.4.13',
+    version='0.5.0',
     description='Persistent cache for requests library',
     author='Roman Haritonov',
     author_email='[email protected]',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/tests/test_cache.py 
new/requests-cache-0.5.0/tests/test_cache.py
--- old/requests-cache-0.4.13/tests/test_cache.py       2016-12-23 
10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/tests/test_cache.py        2019-04-13 
10:14:31.000000000 +0200
@@ -230,6 +230,7 @@
             r = self.s.get(httpbin('get'), params=p)
             self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1')))
 
+    @unittest.skipIf(sys.version_info < (2, 7), "No https in 2.6")
     def test_https_support(self):
         n = 10
         delay = 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/tests/test_dbdict.py 
new/requests-cache-0.5.0/tests/test_dbdict.py
--- old/requests-cache-0.4.13/tests/test_dbdict.py      2016-12-23 
10:28:59.000000000 +0100
+++ new/requests-cache-0.5.0/tests/test_dbdict.py       2019-04-13 
10:14:31.000000000 +0200
@@ -18,6 +18,8 @@
 
     def test_bulk_commit(self):
         d = DbDict(self.NAMESPACE, self.TABLES[0])
+        with d.bulk_commit():
+            pass
         d.clear()
         n = 1000
         with d.bulk_commit():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/tests/test_dynamodbdict.py 
new/requests-cache-0.5.0/tests/test_dynamodbdict.py
--- old/requests-cache-0.4.13/tests/test_dynamodbdict.py        1970-01-01 
01:00:00.000000000 +0100
+++ new/requests-cache-0.5.0/tests/test_dynamodbdict.py 2019-04-13 
10:14:31.000000000 +0200
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Path hack
+import os, sys
+sys.path.insert(0, os.path.abspath('..'))
+
+try:
+    import unittest2 as unittest
+except ImportError:
+    import unittest
+
+from tests.test_custom_dict import BaseCustomDictTestCase
+try:
+    from requests_cache.backends.storage.dynamodbdict import DynamoDbDict
+except ImportError:
+    print("DynamoDb not installed")
+else:
+
+    class WrapDynamoDbDict(DynamoDbDict):
+        def __init__(self, namespace, collection_name='dynamodb_dict_data', 
**options):
+            options['endpoint_url'] = os.environ['DYNAMODB_ENDPOINT_URL'] if 
'DYNAMODB_ENDPOINT_URL' in os.environ else None
+            super(WrapDynamoDbDict,self).__init__( namespace, collection_name, 
**options)
+
+    class DynamoDbDictTestCase(BaseCustomDictTestCase, unittest.TestCase):
+        dict_class = WrapDynamoDbDict
+        pickled_dict_class = WrapDynamoDbDict
+
+    if __name__ == '__main__':
+        unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/requests-cache-0.4.13/tests/test_gridfsdict.py 
new/requests-cache-0.5.0/tests/test_gridfsdict.py
--- old/requests-cache-0.4.13/tests/test_gridfsdict.py  1970-01-01 
01:00:00.000000000 +0100
+++ new/requests-cache-0.5.0/tests/test_gridfsdict.py   2019-04-13 
10:14:31.000000000 +0200
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Path hack
+import os, sys
+sys.path.insert(0, os.path.abspath('..'))
+
+try:
+    import unittest2 as unittest
+except ImportError:
+    import unittest
+
+from tests.test_custom_dict import BaseCustomDictTestCase
+try:
+    from requests_cache.backends.storage.mongodict import MongoDict
+    from requests_cache.backends.storage.gridfspickledict import 
GridFSPickleDict
+
+except ImportError:
+    print("pymongo not installed")
+else:
+    class MongoDictTestCase(BaseCustomDictTestCase, unittest.TestCase):
+        dict_class = MongoDict
+        pickled_dict_class = GridFSPickleDict
+
+    if __name__ == '__main__':
+        unittest.main()


Reply via email to