Hello community,
here is the log from the commit of package python3-jupyter_ipyparallel for
openSUSE:Factory checked in at 2015-11-18 22:34:22
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python3-jupyter_ipyparallel (Old)
and /work/SRC/openSUSE:Factory/.python3-jupyter_ipyparallel.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python3-jupyter_ipyparallel"
Changes:
--------
---
/work/SRC/openSUSE:Factory/python3-jupyter_ipyparallel/python3-jupyter_ipyparallel.changes
2015-09-30 05:50:22.000000000 +0200
+++
/work/SRC/openSUSE:Factory/.python3-jupyter_ipyparallel.new/python3-jupyter_ipyparallel.changes
2015-11-18 22:34:26.000000000 +0100
@@ -1,0 +2,18 @@
+Tue Oct 27 15:14:11 UTC 2015 - [email protected]
+
+- update to version 4.1.0:
+ * Add :meth:`.Client.wait_interactive`
+ * Improvements for specifying engines with SSH launcher.
+
+-------------------------------------------------------------------
+Wed Oct 21 13:28:07 UTC 2015 - [email protected]
+
+- Don't build with python3-buildservice-tweak.
+ It causes package tests to fail .
+
+-------------------------------------------------------------------
+Mon Oct 5 10:01:17 UTC 2015 - [email protected]
+
+- Build documentation
+
+-------------------------------------------------------------------
Old:
----
ipyparallel-4.0.2.tar.gz
New:
----
ipyparallel-4.1.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python3-jupyter_ipyparallel.spec ++++++
--- /var/tmp/diff_new_pack.BnZr6e/_old 2015-11-18 22:34:26.000000000 +0100
+++ /var/tmp/diff_new_pack.BnZr6e/_new 2015-11-18 22:34:26.000000000 +0100
@@ -1,7 +1,7 @@
#
-# spec file for package python3-ipyparallel
+# spec file for package python3-jupyter_ipyparallel
#
-# Copyright (c) 2015 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -13,24 +13,33 @@
# published by the Open Source Initiative.
# Please submit bugfixes or comments via http://bugs.opensuse.org/
+#
Name: python3-jupyter_ipyparallel
-Version: 4.0.2
+Version: 4.1.0
Release: 0
-License: BSD-3-Clause
Summary: Interactive Parallel Computing with IPython
-Url: http://ipython.org
+License: BSD-3-Clause
Group: Development/Languages/Python
+Url: http://ipython.org
Source:
https://pypi.python.org/packages/source/i/ipyparallel/ipyparallel-%{version}.tar.gz
-BuildRequires: python3-devel
-BuildRequires: python3-setuptools
BuildRequires: python3-decorator
+BuildRequires: python3-devel
BuildRequires: python3-ipython_genutils
BuildRequires: python3-jupyter_client
BuildRequires: python3-jupyter_ipykernel
BuildRequires: python3-jupyter_ipython >= 4
BuildRequires: python3-pyzmq >= 13
+BuildRequires: python3-setuptools
+# Test requirements
+BuildRequires: python3-jupyter_ipython-iptest
+BuildConflicts: python3-buildservice-tweak
+# Documentation requirements
+BuildRequires: python3-Sphinx
+%if 0%{?suse_version} && ( 0%{?suse_version} != 1315 && 0%{?suse_version} >
1110 )
+BuildRequires: python3-Sphinx-latex
+%endif
Requires: python3-decorator
Requires: python3-ipython_genutils
Requires: python3-jupyter_client
@@ -50,6 +59,22 @@
%description
Use multiple instances of IPython in parallel, interactively.
+%package doc-html
+Summary: HTML documentation for %{name}
+Group: Development/Languages/Python
+Recommends: %{name} = %{version}
+
+%description doc-html
+Documentation and help files for %{name} in HTML format
+
+%package doc-pdf
+Summary: HTML documentation for %{name}
+Group: Development/Languages/Python
+Recommends: %{name} = %{version}
+
+%description doc-pdf
+Documentation and help files for %{name} in PDF format
+
%prep
%setup -q -n ipyparallel-%{version}
@@ -62,7 +87,8 @@
# Prepare for update-alternatives usage
mkdir -p %{buildroot}%{_sysconfdir}/alternatives
for p in ipcluster ipcontroller ipengine ; do
- mv %{buildroot}%{_bindir}/$p %{buildroot}%{_bindir}/$p-%{py3_ver}
+ mv %{buildroot}%{_bindir}/$p %{buildroot}%{_bindir}/${p}3
+ ln -s -f %{_bindir}/${p}3 %{buildroot}%{_bindir}/$p-%{py3_ver}
ln -s -f %{_sysconfdir}/alternatives/$p %{buildroot}%{_bindir}/$p
# create a dummy target for /etc/alternatives/$p
touch %{buildroot}%{_sysconfdir}/alternatives/$p
@@ -73,6 +99,16 @@
chmod a-x %{buildroot}%{python3_sitelib}/ipyparallel/apps/baseapp.py
chmod a+x %{buildroot}%{python3_sitelib}/ipyparallel/controller/heartmonitor.py
+# Build the documentation
+pushd docs
+# PDF documentation currently doesn't build
+# %if 0%{?suse_version} && ( 0%{?suse_version} != 1315 && 0%{?suse_version} >
1110 )
+# PYTHONPATH=%{buildroot}%{python3_sitelib} make latexpdf
+# %endif
+PYTHONPATH=%{buildroot}%{python3_sitelib} make html
+rm -rf build/html/.buildinfo
+popd
+
%post
%_sbindir/update-alternatives \
--install %{_bindir}/ipcluster ipcluster %{_bindir}/ipcluster-%{py3_ver} 30
\
@@ -84,19 +120,38 @@
%_sbindir/update-alternatives --remove ipcluster
%{_bindir}/ipcluster-%{py3_ver}
fi
+%check
+export LANG=en_US.UTF-8
+iptest
+
%files
%defattr(-,root,root,-)
%doc COPYING.md README.md
%doc examples/
%{_bindir}/ipcluster
-%{_bindir}/ipcontroller
-%{_bindir}/ipengine
+%{_bindir}/ipcluster3
%{_bindir}/ipcluster-%{py3_ver}
-%{_bindir}/ipcontroller-%{py3_ver}
-%{_bindir}/ipengine-%{py3_ver}
%ghost %{_sysconfdir}/alternatives/ipcluster
+%{_bindir}/ipcontroller
+%{_bindir}/ipcontroller3
+%{_bindir}/ipcontroller-%{py3_ver}
%ghost %{_sysconfdir}/alternatives/ipcontroller
+%{_bindir}/ipengine
+%{_bindir}/ipengine3
+%{_bindir}/ipengine-%{py3_ver}
%ghost %{_sysconfdir}/alternatives/ipengine
%{python3_sitelib}/*
+%files doc-html
+%defattr(-,root,root,-)
+%doc COPYING.md
+%doc docs/build/html/
+
+# %if 0%{?suse_version} && ( 0%{?suse_version} != 1315 && 0%{?suse_version} >
1110 )
+# %files doc-pdf
+# %defattr(-,root,root,-)
+# doc COPYING.md
+# %doc docs/build/latex/*.pdf
+# %endif
+
%changelog
++++++ ipyparallel-4.0.2.tar.gz -> ipyparallel-4.1.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/PKG-INFO
new/ipyparallel-4.1.0/PKG-INFO
--- old/ipyparallel-4.0.2/PKG-INFO 2015-08-19 20:58:52.000000000 +0200
+++ new/ipyparallel-4.1.0/PKG-INFO 2015-10-27 14:25:09.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: ipyparallel
-Version: 4.0.2
+Version: 4.1.0
Summary: Interactive Parallel Computing with IPython
Home-page: http://ipython.org
Author: IPython Development Team
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/README.md
new/ipyparallel-4.1.0/README.md
--- old/ipyparallel-4.0.2/README.md 2015-08-19 20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/README.md 2015-09-28 15:06:23.000000000 +0200
@@ -14,6 +14,8 @@
c.NotebookApp.server_extensions.append('ipyparallel.nbextension')
```
+See the [documentation on configuring the notebook
server](https://jupyter-notebook.readthedocs.org/en/latest/public_server.html)
+to find your config or setup your initial `jupyter_notebook_config.py`.
## Run
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/asyncresult.rst
new/ipyparallel-4.1.0/docs/source/asyncresult.rst
--- old/ipyparallel-4.0.2/docs/source/asyncresult.rst 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/docs/source/asyncresult.rst 2015-09-28
15:06:23.000000000 +0200
@@ -147,4 +147,4 @@
handling individual results as they arrive, but with metadata), you can
always
just split the original result's ``msg_ids`` attribute, and handle them as
you like.
- For an example of this, see :file:`examples/parallel/customresult.py`
+ For an example of this, see :file:`examples/customresult.py`
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/changelog.rst
new/ipyparallel-4.1.0/docs/source/changelog.rst
--- old/ipyparallel-4.0.2/docs/source/changelog.rst 1970-01-01
01:00:00.000000000 +0100
+++ new/ipyparallel-4.1.0/docs/source/changelog.rst 2015-10-27
14:24:50.000000000 +0100
@@ -0,0 +1,19 @@
+.. _changelog:
+
+Changes in IPython Parallel
+===========================
+
+4.1
+---
+
+`4.1 on GitHub <https://github.com/ipython/ipyparallel/milestones/4.1>`__
+
+- Add :meth:`.Client.wait_interactive`
+- Improvements for specifying engines with SSH launcher.
+
+4.0
+---
+
+`4.0 on GitHub <https://github.com/ipython/ipyparallel/milestones/4.0>`__
+
+First release of ``ipyparallel`` as a standalone package.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/conf.py
new/ipyparallel-4.1.0/docs/source/conf.py
--- old/ipyparallel-4.0.2/docs/source/conf.py 2015-08-19 20:58:34.000000000
+0200
+++ new/ipyparallel-4.1.0/docs/source/conf.py 2015-10-27 14:24:50.000000000
+0100
@@ -135,7 +135,7 @@
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
-html_style = 'default.css'
+# html_style = 'default.css'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/index.rst
new/ipyparallel-4.1.0/docs/source/index.rst
--- old/ipyparallel-4.0.2/docs/source/index.rst 2015-08-19 20:58:34.000000000
+0200
+++ new/ipyparallel-4.1.0/docs/source/index.rst 2015-10-27 14:24:50.000000000
+0100
@@ -8,12 +8,28 @@
Welcome to the official IPython parallel documentation.
+.. _install:
-Contents:
+Installing IPython Parallel
+---------------------------
+
+As of 4.0, IPython parallel is now a standalone package called
:mod:`ipyparallel`.
+You can install it with::
+
+ pip install ipyparallel
+
+or::
+
+ conda install ipyparallel
+
+
+Contents
+--------
.. toctree::
:maxdepth: 1
+ changelog
intro
process
multiengine
@@ -36,7 +52,7 @@
.. toctree::
:maxdepth: 2
-
+
api/ipyparallel.rst
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/process.rst
new/ipyparallel-4.1.0/docs/source/process.rst
--- old/ipyparallel-4.0.2/docs/source/process.rst 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/docs/source/process.rst 2015-10-15
14:54:39.000000000 +0200
@@ -424,7 +424,7 @@
c.SSHEngineSetLauncher.engines = { 'host1.example.com' : 2,
'host2.example.com' : 5,
'host3.example.com' : (1,
['--profile-dir=/home/different/location']),
- 'host4.example.com' : {'n': 3, 'engine_args':
['--profile-dir=/away/location'], 'engine_cmd': '/home/venv/bin/python'},
+ 'host4.example.com' : {'n': 3, 'engine_args':
['--profile-dir=/away/location'], 'engine_cmd': ['/home/venv/bin/python', '-m',
'ipyparallel.engine']},
'host5.example.com' : 8 }
* The `engines` dict, where the keys are the host we want to run engines on and
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/docs/source/task.rst
new/ipyparallel-4.1.0/docs/source/task.rst
--- old/ipyparallel-4.0.2/docs/source/task.rst 2015-08-19 20:58:34.000000000
+0200
+++ new/ipyparallel-4.1.0/docs/source/task.rst 2015-10-27 14:24:50.000000000
+0100
@@ -162,6 +162,22 @@
Now, any time you apply :func:`myfunc`, the task will only run on a machine
that has
numpy and pyzmq available, and when :func:`myfunc` is called, numpy and zmq
will be imported.
+You can also require specific objects, not just module names:
+
+.. sourcecode:: python
+
+ def foo(a):
+ return a*a
+
+ @parallel.require(foo)
+ def bar(b):
+ return foo(b)
+
+ @parallel.require(bar)
+ def baz(c, d):
+ return bar(c) - bar(d)
+
+ view.apply_sync(baz, 4, 5)
@depend
*******
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/_version.py
new/ipyparallel-4.1.0/ipyparallel/_version.py
--- old/ipyparallel-4.0.2/ipyparallel/_version.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/_version.py 2015-10-27
14:24:41.000000000 +0100
@@ -1,2 +1,2 @@
-version_info = (4, 0, 2)
+version_info = (4, 1, 0)
__version__ = '.'.join(map(str, version_info))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/ipyparallel-4.0.2/ipyparallel/apps/ipcontrollerapp.py
new/ipyparallel-4.1.0/ipyparallel/apps/ipcontrollerapp.py
--- old/ipyparallel-4.0.2/ipyparallel/apps/ipcontrollerapp.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/apps/ipcontrollerapp.py 2015-10-27
13:49:51.000000000 +0100
@@ -403,7 +403,7 @@
if 'TaskScheduler.scheme_name' in self.config:
scheme = self.config.TaskScheduler.scheme_name
else:
- scheme = TaskScheduler.scheme_name.get_default_value()
+ scheme = TaskScheduler.scheme_name.default_value
# Task Queue (in a Process)
if scheme == 'pure':
self.log.warn("task::using pure DEALER Task scheduler")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/apps/launcher.py
new/ipyparallel-4.1.0/ipyparallel/apps/launcher.py
--- old/ipyparallel-4.0.2/ipyparallel/apps/launcher.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/apps/launcher.py 2015-10-15
14:54:39.000000000 +0200
@@ -409,12 +409,13 @@
def _notice_engine_stopped(self, data):
pid = data['pid']
- for idx,el in iteritems(self.launchers):
+ for idx, el in iteritems(self.launchers):
if el.process.pid == pid:
break
- self.launchers.pop(idx)
- self.stop_data[idx] = data
- if not self.launchers:
+ if self.launchers:
+ self.launchers.pop(idx)
+ self.stop_data[idx] = data
+ else:
self.notify_stop(self.stop_data)
@@ -546,6 +547,8 @@
help="args to pass to ssh")
scp_cmd = List(['scp'], config=True,
help="command for sending files")
+ scp_args = List([], config=True,
+ help="args to pass to scp")
program = List(['date'],
help="Program to launch via ssh")
program_args = List([],
@@ -589,7 +592,7 @@
[self.location, 'mkdir', '-p', '--', remote_dir]
)
self.log.info("sending %s to %s", local, full_remote)
- check_output(self.scp_cmd + [local, full_remote])
+ check_output(self.scp_cmd + self.scp_args + [local, full_remote])
def send_files(self):
"""send our files (called before start)"""
@@ -622,11 +625,16 @@
for remote_file, local_file in self.to_fetch:
self._fetch_file(remote_file, local_file)
- def start(self, hostname=None, user=None):
+ def start(self, hostname=None, user=None, port=None):
if hostname is not None:
self.hostname = hostname
if user is not None:
self.user = user
+ if port is not None:
+ self.ssh_args.append('-p')
+ self.ssh_args.append(port)
+ self.scp_args.append('-P')
+ self.scp_args.append(port)
self.send_files()
super(SSHLauncher, self).start()
@@ -740,6 +748,8 @@
for n in itervalues(self.engines):
if isinstance(n, (tuple,list)):
n,args = n
+ if isinstance(n, dict):
+ n = n['n']
count += n
return count
@@ -772,6 +782,11 @@
user,host = host.split('@',1)
else:
user=None
+ if ':' in host:
+ host,port = host.split(':',1)
+ else:
+ port = None
+
for i in range(n):
if i > 0:
time.sleep(self.delay)
@@ -786,7 +801,7 @@
el.engine_cmd = cmd
el.engine_args = args
el.on_stop(self._notice_engine_stopped)
- d = el.start(user=user, hostname=host)
+ d = el.start(user=user, hostname=host, port=port)
self.launchers[ "%s/%i" % (host,i) ] = el
dlist.append(d)
self.notify_start(dlist)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/client/client.py
new/ipyparallel-4.1.0/ipyparallel/client/client.py
--- old/ipyparallel-4.0.2/ipyparallel/client/client.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/client/client.py 2015-10-27
13:49:51.000000000 +0100
@@ -10,6 +10,7 @@
import sys
from threading import Thread, Event
import time
+import types
import warnings
from datetime import datetime
from getpass import getpass
@@ -30,15 +31,17 @@
from IPython.paths import get_ipython_dir
from IPython.utils.path import compress_user
from ipython_genutils.py3compat import cast_bytes, string_types, xrange,
iteritems
-from traitlets import (HasTraits, Integer, Instance, Unicode,
- Dict, List, Bool, Set, Any)
+from traitlets import (
+ HasTraits, Integer, Instance, Unicode,
+ Dict, List, Bool, Set, Any, Float,
+)
from decorator import decorator
from ipyparallel import Reference
from ipyparallel import error
from ipyparallel import util
-from jupyter_client.session import Session, Message
+from jupyter_client.session import Session
from ipykernel import serialize
from .asyncresult import AsyncResult, AsyncHubResult
@@ -292,6 +295,12 @@
debug = Bool(False)
_spin_thread = Any()
_stop_spinning = Any()
+ _spin_interval = Float(0.2,
+ help="""
+ The maximum spin interval (seconds) when waiting for results.
+ Starts as 1ms with exponential growth to this value.
+ """
+ )
profile=Unicode()
def _profile_default(self):
@@ -1063,13 +1072,26 @@
if not theids.intersection(self.outstanding):
return True
self.spin()
+ interval = 1e-3
while theids.intersection(self.outstanding):
if timeout >= 0 and ( time.time()-tic ) > timeout:
break
- time.sleep(1e-3)
+ time.sleep(interval)
self.spin()
+ interval = min(1.1 * interval, self._spin_interval)
return len(theids.intersection(self.outstanding)) == 0
-
+
+ def wait_interactive(self, jobs=None, interval=1., timeout=-1.):
+ """Wait interactively for jobs
+
+ If no job is specified, will wait for all outstanding jobs to complete.
+ """
+ if jobs is None:
+ jobs = self.outstanding
+ msg_ids = self._msg_ids_from_jobs(jobs)
+ ar = AsyncResult(self, msg_ids=msg_ids, owner=False)
+ return ar.wait_interactive(interval=interval, timeout=timeout)
+
#--------------------------------------------------------------------------
# Control methods
#--------------------------------------------------------------------------
@@ -1346,7 +1368,7 @@
#--------------------------------------------------------------------------
# Query methods
#--------------------------------------------------------------------------
-
+
@spin_first
def get_result(self, indices_or_msg_ids=None, block=None, owner=True):
"""Retrieve a result by msg_id or history index, wrapped in an
AsyncResult object.
@@ -1369,7 +1391,8 @@
Parameters
----------
- indices_or_msg_ids : integer history index, str msg_id, or list of
either
+ indices_or_msg_ids : integer history index, str msg_id, AsyncResult,
+ or a list of same.
The indices or msg_ids of indices to be retrieved
block : bool
@@ -1394,18 +1417,8 @@
if indices_or_msg_ids is None:
indices_or_msg_ids = -1
- single_result = False
- if not isinstance(indices_or_msg_ids, (list,tuple)):
- indices_or_msg_ids = [indices_or_msg_ids]
- single_result = True
-
- theids = []
- for id in indices_or_msg_ids:
- if isinstance(id, int):
- id = self.history[id]
- if not isinstance(id, string_types):
- raise TypeError("indices must be str or int, not %r"%id)
- theids.append(id)
+ single_result = isinstance(indices_or_msg_ids, string_types + (int,))
+ theids = self._msg_ids_from_jobs(indices_or_msg_ids)
local_ids = [msg_id for msg_id in theids if (msg_id in
self.outstanding or msg_id in self.results)]
remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
@@ -1451,17 +1464,7 @@
if indices_or_msg_ids is None:
indices_or_msg_ids = -1
- if not isinstance(indices_or_msg_ids, (list,tuple)):
- indices_or_msg_ids = [indices_or_msg_ids]
-
- theids = []
- for id in indices_or_msg_ids:
- if isinstance(id, int):
- id = self.history[id]
- if not isinstance(id, string_types):
- raise TypeError("indices must be str or int, not %r"%id)
- theids.append(id)
-
+ theids = self._msg_ids_from_jobs(indices_or_msg_ids)
content = dict(msg_ids = theids)
self.session.send(self._query_socket, 'resubmit_request', content)
@@ -1508,16 +1511,7 @@
be lists of msg_ids that are incomplete or complete. If
`status_only`
is False, then completed results will be keyed by their `msg_id`.
"""
- if not isinstance(msg_ids, (list,tuple)):
- msg_ids = [msg_ids]
-
- theids = []
- for msg_id in msg_ids:
- if isinstance(msg_id, int):
- msg_id = self.history[msg_id]
- if not isinstance(msg_id, string_types):
- raise TypeError("msg_ids must be str, not %r"%msg_id)
- theids.append(msg_id)
+ theids = self._msg_ids_from_jobs(msg_ids)
completed = []
local_results = {}
@@ -1628,30 +1622,36 @@
else:
return content
- def _build_msgids_from_target(self, targets=None):
+ def _msg_ids_from_target(self, targets=None):
"""Build a list of msg_ids from the list of engine targets"""
if not targets: # needed as _build_targets otherwise uses all engines
return []
target_ids = self._build_targets(targets)[0]
return [md_id for md_id in self.metadata if
self.metadata[md_id]["engine_uuid"] in target_ids]
- def _build_msgids_from_jobs(self, jobs=None):
- """Build a list of msg_ids from "jobs" """
- if not jobs:
- return []
- msg_ids = []
- if isinstance(jobs, string_types + (AsyncResult,)):
+ def _msg_ids_from_jobs(self, jobs=None):
+ """Given a 'jobs' argument, convert it to a list of msg_ids.
+
+ Can be either one or a list of:
+
+ - msg_id strings
+ - integer indices to this Client's history
+ - AsyncResult objects
+ """
+ if not isinstance(jobs, (list, tuple, set, types.GeneratorType)):
jobs = [jobs]
- bad_ids = [obj for obj in jobs if not isinstance(obj, string_types +
(AsyncResult,))]
- if bad_ids:
- raise TypeError("Invalid msg_id type %r, expected str or
AsyncResult"%bad_ids[0])
- for j in jobs:
- if isinstance(j, AsyncResult):
- msg_ids.extend(j.msg_ids)
+ msg_ids = []
+ for job in jobs:
+ if isinstance(job, int):
+ msg_ids.append(self.history[job])
+ elif isinstance(job, string_types):
+ msg_ids.append(job)
+ elif isinstance(job, AsyncResult):
+ msg_ids.extend(job.msg_ids)
else:
- msg_ids.append(j)
- return msg_ids
-
+ raise TypeError("Expected msg_id, int, or AsyncResult, got %r"
% job)
+ return msg_ids
+
def purge_local_results(self, jobs=[], targets=[]):
"""Clears the client caches of results and their metadata.
@@ -1692,8 +1692,8 @@
self.metadata.clear()
else:
msg_ids = set()
- msg_ids.update(self._build_msgids_from_target(targets))
- msg_ids.update(self._build_msgids_from_jobs(jobs))
+ msg_ids.update(self._msg_ids_from_target(targets))
+ msg_ids.update(self._msg_ids_from_jobs(jobs))
still_outstanding = self.outstanding.intersection(msg_ids)
if still_outstanding:
raise RuntimeError("Can't purge outstanding tasks: %s" %
still_outstanding)
@@ -1730,7 +1730,7 @@
if jobs == 'all':
msg_ids = jobs
else:
- msg_ids = self._build_msgids_from_jobs(jobs)
+ msg_ids = self._msg_ids_from_jobs(jobs)
content = dict(engine_ids=targets, msg_ids=msg_ids)
self.session.send(self._query_socket, "purge_request", content=content)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/controller/hub.py
new/ipyparallel-4.1.0/ipyparallel/controller/hub.py
--- old/ipyparallel-4.0.2/ipyparallel/controller/hub.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/controller/hub.py 2015-10-27
13:49:51.000000000 +0100
@@ -258,7 +258,7 @@
scheme = self.config.TaskScheduler.scheme_name
else:
from .scheduler import TaskScheduler
- scheme = TaskScheduler.scheme_name.get_default_value()
+ scheme = TaskScheduler.scheme_name.default_value
# build connection dicts
engine = self.engine_info = {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/controller/mongodb.py
new/ipyparallel-4.1.0/ipyparallel/controller/mongodb.py
--- old/ipyparallel-4.0.2/ipyparallel/controller/mongodb.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/controller/mongodb.py 2015-09-28
15:06:23.000000000 +0200
@@ -11,7 +11,10 @@
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
-from pymongo import Connection
+try:
+ from pymongo import MongoClient
+except ImportError:
+ from pymongo import Connection as MongoClient
# bson.Binary import moved
try:
@@ -31,11 +34,11 @@
"""MongoDB TaskRecord backend."""
connection_args = List(config=True,
- help="""Positional arguments to be passed to pymongo.Connection. Only
+ help="""Positional arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance.""")
connection_kwargs = Dict(config=True,
- help="""Keyword arguments to be passed to pymongo.Connection. Only
+ help="""Keyword arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance."""
)
@@ -45,12 +48,12 @@
in tasks from previous sessions being available via Clients' db_query
and
get_result methods.""")
- _connection = Instance(Connection, allow_none=True) # pymongo connection
+ _connection = Instance(MongoClient, allow_none=True) # pymongo connection
def __init__(self, **kwargs):
super(MongoDB, self).__init__(**kwargs)
if self._connection is None:
- self._connection = Connection(*self.connection_args,
**self.connection_kwargs)
+ self._connection = MongoClient(*self.connection_args,
**self.connection_kwargs)
if not self.database:
self.database = self.session
self._db = self._connection[self.database]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/tests/test_client.py
new/ipyparallel-4.1.0/ipyparallel/tests/test_client.py
--- old/ipyparallel-4.0.2/ipyparallel/tests/test_client.py 2015-08-19
20:58:34.000000000 +0200
+++ new/ipyparallel-4.1.0/ipyparallel/tests/test_client.py 2015-10-27
13:49:51.000000000 +0100
@@ -149,6 +149,10 @@
ar2 = self.client.get_result(ar.msg_ids[0])
self.assertNotIsInstance(ar2, AsyncHubResult)
self.assertEqual(ahr.get(), ar2.get())
+ ar3 = self.client.get_result(ar2)
+ self.assertEqual(ar3.msg_ids, ar2.msg_ids)
+ ar3 = self.client.get_result([ar2])
+ self.assertEqual(ar3.msg_ids, ar2.msg_ids)
c.close()
def test_get_execute_result(self):
@@ -548,3 +552,8 @@
self.assertTrue('pxall' in magics['line'])
self.assertTrue('pxall' in magics['cell'])
self.assertEqual(v0.targets, 'all')
+
+ def test_wait_interactive(self):
+ ar = self.client[-1].apply_async(lambda : 1)
+ self.client.wait_interactive()
+ self.assertEqual(self.client.outstanding, set())
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/ipyparallel-4.0.2/ipyparallel/util.py
new/ipyparallel-4.1.0/ipyparallel/util.py
--- old/ipyparallel-4.0.2/ipyparallel/util.py 2015-08-19 20:58:34.000000000
+0200
+++ new/ipyparallel-4.1.0/ipyparallel/util.py 2015-10-27 14:24:50.000000000
+0100
@@ -339,6 +339,7 @@
handler.root_topic = root
logger.addHandler(handler)
logger.setLevel(loglevel)
+ return logger
def connect_engine_logger(context, iface, engine, loglevel=logging.DEBUG):
logger = logging.getLogger()