Hello community,
here is the log from the commit of package python-requests-futures for
openSUSE:Factory checked in at 2019-05-17 23:42:26
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-requests-futures (Old)
and /work/SRC/openSUSE:Factory/.python-requests-futures.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-requests-futures"
Fri May 17 23:42:26 2019 rev:3 rq:701329 version:0.9.9
Changes:
--------
---
/work/SRC/openSUSE:Factory/python-requests-futures/python-requests-futures.changes
2018-01-06 18:48:21.773548366 +0100
+++
/work/SRC/openSUSE:Factory/.python-requests-futures.new.5148/python-requests-futures.changes
2019-05-17 23:42:27.689935942 +0200
@@ -1,0 +2,6 @@
+Tue May 7 12:54:06 UTC 2019 - [email protected]
+
+- version update to 0.9.9
+ * no upstream changelog
+
+-------------------------------------------------------------------
Old:
----
requests-futures-0.9.7.tar.gz
New:
----
requests-futures-0.9.9.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-requests-futures.spec ++++++
--- /var/tmp/diff_new_pack.4gDrlX/_old 2019-05-17 23:42:28.837935312 +0200
+++ /var/tmp/diff_new_pack.4gDrlX/_new 2019-05-17 23:42:28.877935290 +0200
@@ -1,7 +1,7 @@
#
# spec file for package python-requests-futures
#
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -12,31 +12,28 @@
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.
-# Please submit bugfixes or comments via http://bugs.opensuse.org/
+# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
-
%define short_name requests-futures
Name: python-%{short_name}
-Version: 0.9.7
+Version: 0.9.9
Release: 0
Summary: Asynchronous Python HTTP Requests for Humans using Futures
License: Apache-2.0
Group: Development/Languages/Python
-Url: https://github.com/ross/%{short_name}
-Source:
https://files.pythonhosted.org/packages/2c/f0/d9a6d4472286405956dd5ac6279fe932a86151df9816bc35afe601495819/%{short_name}-%{version}.tar.gz
+URL: https://github.com/ross/%{short_name}
+Source:
https://files.pythonhosted.org/packages/source/r/%{short_name}/%{short_name}-%{version}.tar.gz
BuildRequires: %{python_module setuptools}
BuildRequires: fdupes
BuildRequires: python-rpm-macros
-BuildRoot: %{_tmppath}/%{name}-%{version}-build
+Requires: python-requests >= 1.2.0
BuildArch: noarch
%ifpython2
Requires: python-futures >= 2.1.3
%endif
-Requires: python-requests >= 1.2.0
-
%python_subpackages
%description
@@ -53,9 +50,13 @@
%python_install
%python_expand %fdupes -s %{buildroot}%{$python_sitelib}
-%files %python_files
-%defattr(-,root,root,-)
-%doc LICENSE README.rst
+%check
+# online tests
+# %%python_exec -m unittest test_requests_futures
+
+%files %{python_files}
+%license LICENSE
+%doc README.rst
%{python_sitelib}/*
%changelog
++++++ requests-futures-0.9.7.tar.gz -> requests-futures-0.9.9.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/PKG-INFO
new/requests-futures-0.9.9/PKG-INFO
--- old/requests-futures-0.9.7/PKG-INFO 2016-02-12 04:06:30.000000000 +0100
+++ new/requests-futures-0.9.9/PKG-INFO 2018-12-02 23:50:49.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests-futures
-Version: 0.9.7
+Version: 0.9.9
Summary: Asynchronous Python HTTP for Humans.
Home-page: https://github.com/ross/requests-futures
Author: Ross McFarland
@@ -45,7 +45,7 @@
session = FuturesSession()
# first request is started in background
future_one = session.get('http://httpbin.org/get')
- # second requests is started immediately
+ # second requests is started immediately
future_two = session.get('http://httpbin.org/get?foo=bar')
# wait for the first request to complete, if it hasn't already
response_one = future_one.result()
@@ -90,13 +90,31 @@
are shifted (thrown) to the future.result() call so try/except blocks
should be
moved there.
+ Canceling queued requests (a.k.a cleaning up after yourself)
+ =========================
+
+ If you know that you won't be needing any additional responses from
futures that
+ haven't yet resolved, it's a good idea to cancel those requests. You
can do this
+ by using the session as a context manager:
+
+ .. code-block:: python
+
+ from requests_futures.sessions import FuturesSession
+ with FuturesSession(max_workers=1) as session:
+ future = session.get('https://httpbin.org/get')
+ future2 = session.get('https://httpbin.org/delay/10')
+ future3 = session.get('https://httpbin.org/delay/10')
+ response = future.result()
+
+ In this example, the second or third request will be skipped, saving
time and
+ resources that would otherwise be wasted.
+
Working in the Background
=========================
- There is one additional parameter to the various request functions,
- background_callback, which allows you to work with the Response
objects in the
- background thread. This can be useful for shifting work out of the
foreground,
- for a simple example take json parsing.
+ Additional processing can be done in the background using requests's
hooks_
+ functionality. This can be useful for shifting work out of the
foreground, for
+ a simple example take json parsing.
.. code-block:: python
@@ -105,17 +123,136 @@
session = FuturesSession()
- def bg_cb(sess, resp):
+ def response_hook(resp, *args, **kwargs):
# parse the json storing the result on the response object
resp.data = resp.json()
- future = session.get('http://httpbin.org/get',
background_callback=bg_cb)
+ future = session.get('http://httpbin.org/get', hooks={
+ 'response': response_hook,
+ })
# do some other stuff, send some more requests while this one works
response = future.result()
print('response status {0}'.format(response.status_code))
# data will have been attached to the response object in the
background
pprint(response.data)
+ Hooks can also be applied to the session.
+
+ .. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+
+ def response_hook(resp, *args, **kwargs):
+ # parse the json storing the result on the response object
+ resp.data = resp.json()
+
+ session = FuturesSession()
+ session.hooks['response'] = response_hook
+
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ # data will have been attached to the response object in the
background
+ pprint(response.data) pprint(response.data)
+
+ A more advanced example that adds an `elapsed` property to all
requests.
+
+ .. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+ from time import time
+
+
+ class ElapsedFuturesSession(FuturesSession):
+
+ def request(self, method, url, hooks={}, *args, **kwargs):
+ start = time()
+
+ def timing(r, *args, **kwargs):
+ r.elapsed = time() - start
+
+ try:
+ if isinstance(hooks['response'], (list, tuple)):
+ # needs to be first so we don't time other hooks
execution
+ hooks['response'].prepend(timing)
+ else:
+ hooks['response'] = [timing, hooks['response']]
+ except KeyError:
+ hooks['response'] = timing
+
+ return super(ElapsedFuturesSession, self) \
+ .request(method, url, hooks=hooks, *args, **kwargs)
+
+
+
+ session = ElapsedFuturesSession()
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ print('response elapsed {0}'.format(response.elapsed))
+
+ Using ProcessPoolExecutor
+ =========================
+
+ Similarly to `ThreadPoolExecutor`, it is possible to use an instance of
+ `ProcessPoolExecutor`. As the name suggest, the requests will be
executed
+ concurrently in separate processes rather than threads.
+
+ .. code-block:: python
+
+ from concurrent.futures import ProcessPoolExecutor
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10))
+ # ... use as before
+
+ .. HINT::
+ Using the `ProcessPoolExecutor` is useful, in cases where memory
+ usage per request is very high (large response) and cycling the
interpretor
+ is required to release memory back to OS.
+
+ A base requirement of using `ProcessPoolExecutor` is that the
`Session.request`,
+ `FutureSession` all be pickle-able.
+
+ This means that only Python 3.5 is fully supported, while Python
versions
+ 3.4 and above REQUIRE an existing `requests.Session` instance to be
passed
+ when initializing `FutureSession`. Python 2.X and < 3.4 are currently
not
+ supported.
+
+ .. code-block:: python
+
+ # Using python 3.4
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ # ... use as before
+
+ In case pickling fails, an exception is raised pointing to this
documentation.
+
+ .. code-block:: python
+
+ # Using python 2.7
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ Traceback (most recent call last):
+ ...
+ RuntimeError: Cannot pickle function. Refer to documentation:
https://github.com/ross/requests-futures/#using-processpoolexecutor
+
+ .. IMPORTANT::
+ * Python >= 3.4 required
+ * A session instance is required when using Python < 3.5
+ * If sub-classing `FuturesSession` it must be importable (module
global)
Installation
============
@@ -125,6 +262,7 @@
.. _`requests`: https://github.com/kennethreitz/requests
.. _`concurrent.futures`:
http://docs.python.org/dev/library/concurrent.futures.html
.. _backport: https://pypi.python.org/pypi/futures
+ .. _hooks:
http://docs.python-requests.org/en/master/user/advanced/#event-hooks
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
@@ -134,7 +272,6 @@
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/README.rst
new/requests-futures-0.9.9/README.rst
--- old/requests-futures-0.9.7/README.rst 2016-02-12 04:04:57.000000000
+0100
+++ new/requests-futures-0.9.9/README.rst 2018-12-02 23:50:06.000000000
+0100
@@ -37,7 +37,7 @@
session = FuturesSession()
# first request is started in background
future_one = session.get('http://httpbin.org/get')
- # second requests is started immediately
+ # second requests is started immediately
future_two = session.get('http://httpbin.org/get?foo=bar')
# wait for the first request to complete, if it hasn't already
response_one = future_one.result()
@@ -82,13 +82,31 @@
are shifted (thrown) to the future.result() call so try/except blocks should be
moved there.
+Canceling queued requests (a.k.a cleaning up after yourself)
+=========================
+
+If you know that you won't be needing any additional responses from futures
that
+haven't yet resolved, it's a good idea to cancel those requests. You can do
this
+by using the session as a context manager:
+
+.. code-block:: python
+
+ from requests_futures.sessions import FuturesSession
+ with FuturesSession(max_workers=1) as session:
+ future = session.get('https://httpbin.org/get')
+ future2 = session.get('https://httpbin.org/delay/10')
+ future3 = session.get('https://httpbin.org/delay/10')
+ response = future.result()
+
+In this example, the second or third request will be skipped, saving time and
+resources that would otherwise be wasted.
+
Working in the Background
=========================
-There is one additional parameter to the various request functions,
-background_callback, which allows you to work with the Response objects in the
-background thread. This can be useful for shifting work out of the foreground,
-for a simple example take json parsing.
+Additional processing can be done in the background using requests's hooks_
+functionality. This can be useful for shifting work out of the foreground, for
+a simple example take json parsing.
.. code-block:: python
@@ -97,17 +115,136 @@
session = FuturesSession()
- def bg_cb(sess, resp):
+ def response_hook(resp, *args, **kwargs):
# parse the json storing the result on the response object
resp.data = resp.json()
- future = session.get('http://httpbin.org/get', background_callback=bg_cb)
+ future = session.get('http://httpbin.org/get', hooks={
+ 'response': response_hook,
+ })
# do some other stuff, send some more requests while this one works
response = future.result()
print('response status {0}'.format(response.status_code))
# data will have been attached to the response object in the background
pprint(response.data)
+Hooks can also be applied to the session.
+
+.. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+
+ def response_hook(resp, *args, **kwargs):
+ # parse the json storing the result on the response object
+ resp.data = resp.json()
+
+ session = FuturesSession()
+ session.hooks['response'] = response_hook
+
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ # data will have been attached to the response object in the background
+ pprint(response.data) pprint(response.data)
+
+A more advanced example that adds an `elapsed` property to all requests.
+
+.. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+ from time import time
+
+
+ class ElapsedFuturesSession(FuturesSession):
+
+ def request(self, method, url, hooks={}, *args, **kwargs):
+ start = time()
+
+ def timing(r, *args, **kwargs):
+ r.elapsed = time() - start
+
+ try:
+ if isinstance(hooks['response'], (list, tuple)):
+ # needs to be first so we don't time other hooks execution
+ hooks['response'].prepend(timing)
+ else:
+ hooks['response'] = [timing, hooks['response']]
+ except KeyError:
+ hooks['response'] = timing
+
+ return super(ElapsedFuturesSession, self) \
+ .request(method, url, hooks=hooks, *args, **kwargs)
+
+
+
+ session = ElapsedFuturesSession()
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ print('response elapsed {0}'.format(response.elapsed))
+
+Using ProcessPoolExecutor
+=========================
+
+Similarly to `ThreadPoolExecutor`, it is possible to use an instance of
+`ProcessPoolExecutor`. As the name suggest, the requests will be executed
+concurrently in separate processes rather than threads.
+
+.. code-block:: python
+
+ from concurrent.futures import ProcessPoolExecutor
+ from requests_futures.sessions import FuturesSession
+
+ session = FuturesSession(executor=ProcessPoolExecutor(max_workers=10))
+ # ... use as before
+
+.. HINT::
+ Using the `ProcessPoolExecutor` is useful, in cases where memory
+ usage per request is very high (large response) and cycling the interpretor
+ is required to release memory back to OS.
+
+A base requirement of using `ProcessPoolExecutor` is that the
`Session.request`,
+`FutureSession` all be pickle-able.
+
+This means that only Python 3.5 is fully supported, while Python versions
+3.4 and above REQUIRE an existing `requests.Session` instance to be passed
+when initializing `FutureSession`. Python 2.X and < 3.4 are currently not
+supported.
+
+.. code-block:: python
+
+ # Using python 3.4
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session = FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ # ... use as before
+
+In case pickling fails, an exception is raised pointing to this documentation.
+
+.. code-block:: python
+
+ # Using python 2.7
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session = FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ Traceback (most recent call last):
+ ...
+ RuntimeError: Cannot pickle function. Refer to documentation:
https://github.com/ross/requests-futures/#using-processpoolexecutor
+
+.. IMPORTANT::
+ * Python >= 3.4 required
+ * A session instance is required when using Python < 3.5
+ * If sub-classing `FuturesSession` it must be importable (module global)
Installation
============
@@ -117,3 +254,4 @@
.. _`requests`: https://github.com/kennethreitz/requests
.. _`concurrent.futures`:
http://docs.python.org/dev/library/concurrent.futures.html
.. _backport: https://pypi.python.org/pypi/futures
+.. _hooks: http://docs.python-requests.org/en/master/user/advanced/#event-hooks
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/requests_futures/__init__.py
new/requests-futures-0.9.9/requests_futures/__init__.py
--- old/requests-futures-0.9.7/requests_futures/__init__.py 2016-02-12
04:05:47.000000000 +0100
+++ new/requests-futures-0.9.9/requests_futures/__init__.py 2018-12-02
23:50:24.000000000 +0100
@@ -12,7 +12,7 @@
import logging
__title__ = 'requests-futures'
-__version__ = '0.9.7'
+__version__ = '0.9.9'
__build__ = 0x000000
__author__ = 'Ross McFarland'
__license__ = 'Apache 2.0'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/requests_futures/sessions.py
new/requests-futures-0.9.9/requests_futures/sessions.py
--- old/requests-futures-0.9.7/requests_futures/sessions.py 2016-02-12
04:04:57.000000000 +0100
+++ new/requests-futures-0.9.9/requests_futures/sessions.py 2018-12-02
23:49:48.000000000 +0100
@@ -7,7 +7,7 @@
of python 3.3's concurrent.futures or the futures backport for previous
releases of python.
- from requests_futures import FuturesSession
+ from requests_futures.sessions import FuturesSession
session = FuturesSession()
# request is run in the background
@@ -19,36 +19,54 @@
print(response.content)
"""
+from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
+from functools import partial
+from logging import getLogger
+from pickle import dumps, PickleError
-from concurrent.futures import ThreadPoolExecutor
from requests import Session
from requests.adapters import DEFAULT_POOLSIZE, HTTPAdapter
+def wrap(self, sup, background_callback, *args_, **kwargs_):
+ """ A global top-level is required for ProcessPoolExecutor """
+ resp = sup(*args_, **kwargs_)
+ return background_callback(self, resp) or resp
+
+
+PICKLE_ERROR = ('Cannot pickle function. Refer to documentation: https://'
+ 'github.com/ross/requests-futures/#using-processpoolexecutor')
+
+
class FuturesSession(Session):
- def __init__(self, executor=None, max_workers=2, session=None, *args,
- **kwargs):
+ def __init__(self, executor=None, max_workers=8, session=None,
+ adapter_kwargs=None, *args, **kwargs):
"""Creates a FuturesSession
Notes
~~~~~
-
- * ProcessPoolExecutor is not supported b/c Response objects are
- not picklable.
+ * `ProcessPoolExecutor` may be used with Python > 3.4;
+ see README for more information.
* If you provide both `executor` and `max_workers`, the latter is
ignored and provided executor is used as is.
"""
+ _adapter_kwargs = {}
super(FuturesSession, self).__init__(*args, **kwargs)
+ self._owned_executor = executor is None
if executor is None:
executor = ThreadPoolExecutor(max_workers=max_workers)
# set connection pool size equal to max_workers if needed
if max_workers > DEFAULT_POOLSIZE:
- adapter_kwargs = dict(pool_connections=max_workers,
- pool_maxsize=max_workers)
- self.mount('https://', HTTPAdapter(**adapter_kwargs))
- self.mount('http://', HTTPAdapter(**adapter_kwargs))
+ _adapter_kwargs.update({'pool_connections': max_workers,
+ 'pool_maxsize': max_workers})
+
+ _adapter_kwargs.update(adapter_kwargs or {})
+
+ if _adapter_kwargs:
+ self.mount('https://', HTTPAdapter(**_adapter_kwargs))
+ self.mount('http://', HTTPAdapter(**_adapter_kwargs))
self.executor = executor
self.session = session
@@ -61,25 +79,33 @@
The background_callback param allows you to do some processing on the
response in the background, e.g. call resp.json() so that json parsing
happens in the background thread.
+
+ :rtype : concurrent.futures.Future
"""
if self.session:
- func = sup = self.session.request
+ func = self.session.request
else:
- func = sup = super(FuturesSession, self).request
+ # avoid calling super to not break pickled method
+ func = partial(Session.request, self)
background_callback = kwargs.pop('background_callback', None)
if background_callback:
- def wrap(*args_, **kwargs_):
- resp = sup(*args_, **kwargs_)
- background_callback(self, resp)
- return resp
-
- func = wrap
+ logger = getLogger(self.__class__.__name__)
+ logger.warn('`background_callback` is deprecated and will be '
+ 'removed in 1.0, use `hooks` instead')
+ func = partial(wrap, self, func, background_callback)
+
+ if isinstance(self.executor, ProcessPoolExecutor):
+ # verify function can be pickled
+ try:
+ dumps(func)
+ except (TypeError, PickleError):
+ raise RuntimeError(PICKLE_ERROR)
return self.executor.submit(func, *args, **kwargs)
- def __enter__(self):
- return self
+ def close(self):
+ super(FuturesSession, self).close()
+ if self._owned_executor:
+ self.executor.shutdown()
- def __exit__(self, type, value, traceback):
- self.executor.shutdown()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/requests-futures-0.9.7/requests_futures.egg-info/PKG-INFO
new/requests-futures-0.9.9/requests_futures.egg-info/PKG-INFO
--- old/requests-futures-0.9.7/requests_futures.egg-info/PKG-INFO
2016-02-12 04:06:30.000000000 +0100
+++ new/requests-futures-0.9.9/requests_futures.egg-info/PKG-INFO
2018-12-02 23:50:47.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests-futures
-Version: 0.9.7
+Version: 0.9.9
Summary: Asynchronous Python HTTP for Humans.
Home-page: https://github.com/ross/requests-futures
Author: Ross McFarland
@@ -45,7 +45,7 @@
session = FuturesSession()
# first request is started in background
future_one = session.get('http://httpbin.org/get')
- # second requests is started immediately
+ # second requests is started immediately
future_two = session.get('http://httpbin.org/get?foo=bar')
# wait for the first request to complete, if it hasn't already
response_one = future_one.result()
@@ -90,13 +90,31 @@
are shifted (thrown) to the future.result() call so try/except blocks
should be
moved there.
+ Canceling queued requests (a.k.a cleaning up after yourself)
+ =========================
+
+ If you know that you won't be needing any additional responses from
futures that
+ haven't yet resolved, it's a good idea to cancel those requests. You
can do this
+ by using the session as a context manager:
+
+ .. code-block:: python
+
+ from requests_futures.sessions import FuturesSession
+ with FuturesSession(max_workers=1) as session:
+ future = session.get('https://httpbin.org/get')
+ future2 = session.get('https://httpbin.org/delay/10')
+ future3 = session.get('https://httpbin.org/delay/10')
+ response = future.result()
+
+ In this example, the second or third request will be skipped, saving
time and
+ resources that would otherwise be wasted.
+
Working in the Background
=========================
- There is one additional parameter to the various request functions,
- background_callback, which allows you to work with the Response
objects in the
- background thread. This can be useful for shifting work out of the
foreground,
- for a simple example take json parsing.
+ Additional processing can be done in the background using requests's
hooks_
+ functionality. This can be useful for shifting work out of the
foreground, for
+ a simple example take json parsing.
.. code-block:: python
@@ -105,17 +123,136 @@
session = FuturesSession()
- def bg_cb(sess, resp):
+ def response_hook(resp, *args, **kwargs):
# parse the json storing the result on the response object
resp.data = resp.json()
- future = session.get('http://httpbin.org/get',
background_callback=bg_cb)
+ future = session.get('http://httpbin.org/get', hooks={
+ 'response': response_hook,
+ })
# do some other stuff, send some more requests while this one works
response = future.result()
print('response status {0}'.format(response.status_code))
# data will have been attached to the response object in the
background
pprint(response.data)
+ Hooks can also be applied to the session.
+
+ .. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+
+ def response_hook(resp, *args, **kwargs):
+ # parse the json storing the result on the response object
+ resp.data = resp.json()
+
+ session = FuturesSession()
+ session.hooks['response'] = response_hook
+
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ # data will have been attached to the response object in the
background
+ pprint(response.data) pprint(response.data)
+
+ A more advanced example that adds an `elapsed` property to all
requests.
+
+ .. code-block:: python
+
+ from pprint import pprint
+ from requests_futures.sessions import FuturesSession
+ from time import time
+
+
+ class ElapsedFuturesSession(FuturesSession):
+
+ def request(self, method, url, hooks={}, *args, **kwargs):
+ start = time()
+
+ def timing(r, *args, **kwargs):
+ r.elapsed = time() - start
+
+ try:
+ if isinstance(hooks['response'], (list, tuple)):
+ # needs to be first so we don't time other hooks
execution
+ hooks['response'].prepend(timing)
+ else:
+ hooks['response'] = [timing, hooks['response']]
+ except KeyError:
+ hooks['response'] = timing
+
+ return super(ElapsedFuturesSession, self) \
+ .request(method, url, hooks=hooks, *args, **kwargs)
+
+
+
+ session = ElapsedFuturesSession()
+ future = session.get('http://httpbin.org/get')
+ # do some other stuff, send some more requests while this one works
+ response = future.result()
+ print('response status {0}'.format(response.status_code))
+ print('response elapsed {0}'.format(response.elapsed))
+
+ Using ProcessPoolExecutor
+ =========================
+
+ Similarly to `ThreadPoolExecutor`, it is possible to use an instance of
+ `ProcessPoolExecutor`. As the name suggest, the requests will be
executed
+ concurrently in separate processes rather than threads.
+
+ .. code-block:: python
+
+ from concurrent.futures import ProcessPoolExecutor
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10))
+ # ... use as before
+
+ .. HINT::
+ Using the `ProcessPoolExecutor` is useful, in cases where memory
+ usage per request is very high (large response) and cycling the
interpretor
+ is required to release memory back to OS.
+
+ A base requirement of using `ProcessPoolExecutor` is that the
`Session.request`,
+ `FutureSession` all be pickle-able.
+
+ This means that only Python 3.5 is fully supported, while Python
versions
+ 3.4 and above REQUIRE an existing `requests.Session` instance to be
passed
+ when initializing `FutureSession`. Python 2.X and < 3.4 are currently
not
+ supported.
+
+ .. code-block:: python
+
+ # Using python 3.4
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ # ... use as before
+
+ In case pickling fails, an exception is raised pointing to this
documentation.
+
+ .. code-block:: python
+
+ # Using python 2.7
+ from concurrent.futures import ProcessPoolExecutor
+ from requests import Session
+ from requests_futures.sessions import FuturesSession
+
+ session =
FuturesSession(executor=ProcessPoolExecutor(max_workers=10),
+ session=Session())
+ Traceback (most recent call last):
+ ...
+ RuntimeError: Cannot pickle function. Refer to documentation:
https://github.com/ross/requests-futures/#using-processpoolexecutor
+
+ .. IMPORTANT::
+ * Python >= 3.4 required
+ * A session instance is required when using Python < 3.5
+ * If sub-classing `FuturesSession` it must be importable (module
global)
Installation
============
@@ -125,6 +262,7 @@
.. _`requests`: https://github.com/kennethreitz/requests
.. _`concurrent.futures`:
http://docs.python.org/dev/library/concurrent.futures.html
.. _backport: https://pypi.python.org/pypi/futures
+ .. _hooks:
http://docs.python-requests.org/en/master/user/advanced/#event-hooks
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
@@ -134,7 +272,6 @@
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/setup.py
new/requests-futures-0.9.9/setup.py
--- old/requests-futures-0.9.7/setup.py 2016-02-12 04:04:57.000000000 +0100
+++ new/requests-futures-0.9.9/setup.py 2017-11-03 14:09:48.000000000 +0100
@@ -48,7 +48,6 @@
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/requests-futures-0.9.7/test_requests_futures.py
new/requests-futures-0.9.9/test_requests_futures.py
--- old/requests-futures-0.9.7/test_requests_futures.py 2016-02-12
04:04:57.000000000 +0100
+++ new/requests-futures-0.9.9/test_requests_futures.py 2018-12-02
23:49:48.000000000 +0100
@@ -3,13 +3,24 @@
"""Tests for Requests."""
-from concurrent.futures import Future
-from requests import Response, session
+from concurrent.futures import Future, ProcessPoolExecutor
from os import environ
+from sys import version_info
+try:
+ from sys import pypy_version_info
+except ImportError:
+ pypy_version_info = None
+from unittest import TestCase, main, skipIf
+import logging
+
+from requests import Response, session
+from requests.adapters import DEFAULT_POOLSIZE
from requests_futures.sessions import FuturesSession
-from unittest import TestCase, main
HTTPBIN = environ.get('HTTPBIN_URL', 'http://httpbin.org/')
+logging.basicConfig(level=logging.DEBUG)
+logging.getLogger('urllib3.connectionpool').level = logging.WARN
+logging.getLogger('FuturesSession').level = logging.ERROR
def httpbin(*suffix):
@@ -70,7 +81,7 @@
""" Tests the `max_workers` shortcut. """
from concurrent.futures import ThreadPoolExecutor
session = FuturesSession()
- self.assertEqual(session.executor._max_workers, 2)
+ self.assertEqual(session.executor._max_workers, 8)
session = FuturesSession(max_workers=5)
self.assertEqual(session.executor._max_workers, 5)
session = FuturesSession(executor=ThreadPoolExecutor(max_workers=10))
@@ -79,6 +90,21 @@
max_workers=5)
self.assertEqual(session.executor._max_workers, 10)
+ def test_adapter_kwargs(self):
+ """ Tests the `adapter_kwargs` shortcut. """
+ from concurrent.futures import ThreadPoolExecutor
+ session = FuturesSession()
+ self.assertFalse(session.get_adapter('http://')._pool_block)
+ session = FuturesSession(max_workers=DEFAULT_POOLSIZE + 1,
+ adapter_kwargs={'pool_block': True})
+ adapter = session.get_adapter('http://')
+ self.assertTrue(adapter._pool_block)
+ self.assertEqual(adapter._pool_connections, DEFAULT_POOLSIZE + 1)
+ self.assertEqual(adapter._pool_maxsize, DEFAULT_POOLSIZE + 1)
+ session = FuturesSession(executor=ThreadPoolExecutor(max_workers=10),
+ adapter_kwargs={'pool_connections': 20})
+ self.assertEqual(session.get_adapter('http://')._pool_connections, 20)
+
def test_redirect(self):
""" Tests for the ability to cleanly handle redirects. """
sess = FuturesSession()
@@ -117,5 +143,148 @@
self.assertTrue(passout._exit_called)
+# << test process pool executor >>
+# see discussion https://github.com/ross/requests-futures/issues/11
+def global_cb_modify_response(s, r):
+ """ add the parsed json data to the response """
+ assert s, FuturesSession
+ assert r, Response
+ r.data = r.json()
+ r.__attrs__.append('data') # required for pickling new attribute
+
+
+def global_cb_return_result(s, r):
+ """ simply return parsed json data """
+ assert s, FuturesSession
+ assert r, Response
+ return r.json()
+
+
+def global_rasing_cb(s, r):
+ raise Exception('boom')
+
+
+# pickling instance method supported only from here
+unsupported_platform = version_info < (3, 4) and not pypy_version_info
+session_required = version_info < (3, 5,) and not pypy_version_info
+
+
+@skipIf(unsupported_platform, 'not supported in python < 3.4')
+class RequestsProcessPoolTestCase(TestCase):
+
+ def setUp(self):
+ self.proc_executor = ProcessPoolExecutor(max_workers=2)
+ self.session = session()
+
+ @skipIf(session_required, 'not supported in python < 3.5')
+ def test_futures_session(self):
+ self._assert_futures_session()
+
+ @skipIf(not session_required, 'fully supported on python >= 3.5')
+ def test_exception_raised(self):
+ with self.assertRaises(RuntimeError):
+ self._assert_futures_session()
+
+ def test_futures_existing_session(self):
+ self.session.headers['Foo'] = 'bar'
+ self._assert_futures_session(session=self.session)
+
+ def _assert_futures_session(self, session=None):
+ # basic futures get
+ if session:
+ sess = FuturesSession(executor=self.proc_executor, session=session)
+ else:
+ sess = FuturesSession(executor=self.proc_executor)
+
+ future = sess.get(httpbin('get'))
+ self.assertIsInstance(future, Future)
+ resp = future.result()
+ self.assertIsInstance(resp, Response)
+ self.assertEqual(200, resp.status_code)
+
+ # non-200, 404
+ future = sess.get(httpbin('status/404'))
+ resp = future.result()
+ self.assertEqual(404, resp.status_code)
+
+ future = sess.get(httpbin('get'),
+ background_callback=global_cb_modify_response)
+ # this should block until complete
+ resp = future.result()
+ if session:
+ self.assertEqual(resp.json()['headers']['Foo'], 'bar')
+ self.assertEqual(200, resp.status_code)
+ # make sure the callback was invoked
+ self.assertTrue(hasattr(resp, 'data'))
+
+ future = sess.get(httpbin('get'),
+ background_callback=global_cb_return_result)
+ # this should block until complete
+ resp = future.result()
+ # make sure the callback was invoked
+ self.assertIsInstance(resp, dict)
+
+ future = sess.get(httpbin('get'), background_callback=global_rasing_cb)
+ with self.assertRaises(Exception) as cm:
+ resp = future.result()
+ self.assertEqual('boom', cm.exception.args[0])
+
+ # Tests for the ability to cleanly handle redirects
+ future = sess.get(httpbin('redirect-to?url=get'))
+ self.assertIsInstance(future, Future)
+ resp = future.result()
+ self.assertIsInstance(resp, Response)
+ self.assertEqual(200, resp.status_code)
+
+ future = sess.get(httpbin('redirect-to?url=status/404'))
+ resp = future.result()
+ self.assertEqual(404, resp.status_code)
+
+ @skipIf(session_required, 'not supported in python < 3.5')
+ def test_context(self):
+ self._assert_context()
+
+ def test_context_with_session(self):
+ self._assert_context(session=self.session)
+
+ def _assert_context(self, session=None):
+ if session:
+ helper_instance =
TopLevelContextHelper(executor=self.proc_executor,
+ session=self.session)
+ else:
+ helper_instance =
TopLevelContextHelper(executor=self.proc_executor)
+ passout = None
+ with helper_instance as sess:
+ passout = sess
+ future = sess.get(httpbin('get'))
+ self.assertIsInstance(future, Future)
+ resp = future.result()
+ self.assertIsInstance(resp, Response)
+ self.assertEqual(200, resp.status_code)
+
+ self.assertTrue(passout._exit_called)
+
+
+class TopLevelContextHelper(FuturesSession):
+ def __init__(self, *args, **kwargs):
+ super(TopLevelContextHelper, self).__init__(
+ *args, **kwargs)
+ self._exit_called = False
+
+ def __exit__(self, *args, **kwargs):
+ self._exit_called = True
+ return super(TopLevelContextHelper, self).__exit__(
+ *args, **kwargs)
+
+
+@skipIf(not unsupported_platform, 'Exception raised when unsupported')
+class ProcessPoolExceptionRaisedTestCase(TestCase):
+ def test_exception_raised(self):
+ executor = ProcessPoolExecutor(max_workers=2)
+ sess = FuturesSession(executor=executor, session=session())
+ with self.assertRaises(RuntimeError):
+ sess.get(httpbin('get'))
+
+
if __name__ == '__main__':
main()