Hello community, here is the log from the commit of package python-distributed for openSUSE:Factory checked in at 2020-09-21 17:43:17 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-distributed (Old) and /work/SRC/openSUSE:Factory/.python-distributed.new.4249 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-distributed" Mon Sep 21 17:43:17 2020 rev:35 rq:835582 version:2.27.0 Changes: -------- --- /work/SRC/openSUSE:Factory/python-distributed/python-distributed.changes 2020-09-14 12:35:43.701381788 +0200 +++ /work/SRC/openSUSE:Factory/.python-distributed.new.4249/python-distributed.changes 2020-09-21 17:46:04.813063253 +0200 @@ -1,0 +2,12 @@ +Sat Sep 19 15:08:52 UTC 2020 - Arun Persaud <[email protected]> + +- update to version 2.27.0: + * Fix registering a worker plugin with name arg (GH#4105) Nick Evans + * Support different remote_python paths on cluster nodes (GH#4085) + Abdulelah Bin Mahfoodh + * Allow RuntimeError s when closing global clients (GH#4115) Matthew + Rocklin + * Match pre-commit in dask (GH#4049) Julia Signell + * Update super usage (GH#4110) Poruri Sai Rahul + +------------------------------------------------------------------- Old: ---- distributed-2.26.0.tar.gz New: ---- distributed-2.27.0.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-distributed.spec ++++++ --- /var/tmp/diff_new_pack.vIJuQB/_old 2020-09-21 17:46:06.409064757 +0200 +++ /var/tmp/diff_new_pack.vIJuQB/_new 2020-09-21 17:46:06.413064761 +0200 @@ -21,7 +21,7 @@ # Test requires network connection %bcond_with test Name: python-distributed -Version: 2.26.0 +Version: 2.27.0 Release: 0 Summary: Library for distributed computing with Python License: BSD-3-Clause ++++++ distributed-2.26.0.tar.gz -> distributed-2.27.0.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/PKG-INFO new/distributed-2.27.0/PKG-INFO --- old/distributed-2.26.0/PKG-INFO 2020-09-11 23:28:19.954074000 +0200 +++ new/distributed-2.27.0/PKG-INFO 2020-09-19 04:34:27.506927000 +0200 @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: distributed -Version: 2.26.0 +Version: 2.27.0 Summary: Distributed scheduler for Dask Home-page: https://distributed.dask.org Maintainer: Matthew Rocklin diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/_version.py new/distributed-2.27.0/distributed/_version.py --- old/distributed-2.26.0/distributed/_version.py 2020-09-11 23:28:19.955338200 +0200 +++ new/distributed-2.27.0/distributed/_version.py 2020-09-19 04:34:27.507908000 +0200 @@ -8,11 +8,11 @@ version_json = ''' { - "date": "2020-09-11T16:27:23-0500", + "date": "2020-09-18T21:33:59-0500", "dirty": false, "error": null, - "full-revisionid": "73d381a29906bf6e08fad013c921922bd73fd9d4", - "version": "2.26.0" + "full-revisionid": "ecaf14097f5e69e5b884e9c87b708c85d181a9ef", + "version": "2.27.0" } ''' # END VERSION_JSON diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/client.py new/distributed-2.27.0/distributed/client.py --- old/distributed-2.26.0/distributed/client.py 2020-09-09 05:39:40.000000000 +0200 +++ new/distributed-2.27.0/distributed/client.py 2020-09-19 04:28:28.000000000 +0200 @@ -4202,7 +4202,7 @@ def __init__(self, *args, **kwargs): warnings.warn("Executor has been renamed to Client") - super(Executor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def CompatibleExecutor(*args, **kwargs): @@ -4817,8 +4817,8 @@ c = _get_global_client() if c is not None: c._should_close_loop = False - with suppress(TimeoutError): - c.close(timeout=2) + with suppress(TimeoutError, RuntimeError): + c.close(timeout=3) atexit.register(_close_global_client) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/deploy/adaptive.py new/distributed-2.27.0/distributed/deploy/adaptive.py --- old/distributed-2.26.0/distributed/deploy/adaptive.py 2020-09-11 23:15:38.000000000 +0200 +++ new/distributed-2.27.0/distributed/deploy/adaptive.py 2020-09-18 22:52:06.000000000 +0200 @@ -152,7 +152,7 @@ # are in sync before making recommendations. await self.cluster - return await super(Adaptive, self).recommendations(target) + return await super().recommendations(target) async def workers_to_close(self, target: int): """ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/deploy/local.py new/distributed-2.27.0/distributed/deploy/local.py --- old/distributed-2.26.0/distributed/deploy/local.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/deploy/local.py 2020-09-18 22:52:06.000000000 +0200 @@ -226,7 +226,7 @@ workers = {i: worker for i in range(n_workers)} - super(LocalCluster, self).__init__( + super().__init__( scheduler=scheduler, workers=workers, worker=worker, diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/deploy/ssh.py new/distributed-2.27.0/distributed/deploy/ssh.py --- old/distributed-2.26.0/distributed/deploy/ssh.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/deploy/ssh.py 2020-09-18 22:52:06.000000000 +0200 @@ -108,10 +108,13 @@ "Worker failed to set DASK_INTERNAL_INHERIT_CONFIG variable " ) + if not self.remote_python: + self.remote_python = sys.executable + cmd = " ".join( [ set_env, - self.remote_python or sys.executable, + self.remote_python, "-m", self.worker_module, self.scheduler, @@ -186,10 +189,13 @@ "Scheduler failed to set DASK_INTERNAL_INHERIT_CONFIG variable " ) + if not self.remote_python: + self.remote_python = sys.executable + cmd = " ".join( [ set_env, - self.remote_python or sys.executable, + self.remote_python, "-m", "distributed.cli.dask_scheduler", ] @@ -235,7 +241,7 @@ worker_options: dict = {}, scheduler_options: dict = {}, worker_module: str = "distributed.cli.dask_worker", - remote_python: str = None, + remote_python: Union[str, List[str]] = None, **kwargs, ): """Deploy a Dask cluster using SSH @@ -274,7 +280,7 @@ Keywords to pass on to scheduler. worker_module: str, optional Python module to call to start the worker. - remote_python: str, optional + remote_python: str or list of str, optional Path to Python on remote nodes. Examples @@ -326,6 +332,12 @@ "dictionary for each address." ) + if isinstance(remote_python, list) and len(remote_python) != len(hosts): + raise RuntimeError( + "When specifying a list of remote_python you must provide a " + "path for each address." + ) + scheduler = { "cls": Scheduler, "options": { @@ -334,7 +346,9 @@ if isinstance(connect_options, dict) else connect_options[0], "kwargs": scheduler_options, - "remote_python": remote_python, + "remote_python": remote_python[0] + if isinstance(remote_python, list) + else remote_python, }, } workers = { @@ -347,7 +361,9 @@ else connect_options[i + 1], "kwargs": worker_options, "worker_module": worker_module, - "remote_python": remote_python, + "remote_python": remote_python[i + 1] + if isinstance(remote_python, list) + else remote_python, }, } for i, host in enumerate(hosts[1:]) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/deploy/tests/test_ssh.py new/distributed-2.27.0/distributed/deploy/tests/test_ssh.py --- old/distributed-2.26.0/distributed/deploy/tests/test_ssh.py 2020-08-25 19:17:41.000000000 +0200 +++ new/distributed-2.27.0/distributed/deploy/tests/test_ssh.py 2020-09-18 22:52:06.000000000 +0200 @@ -2,6 +2,7 @@ pytest.importorskip("asyncssh") +import sys import dask from dask.distributed import Client from distributed.deploy.ssh import SSHCluster @@ -156,3 +157,43 @@ worker_options={"death_timeout": "5s"}, ) as _: pass + + [email protected] +async def test_remote_python(): + async with SSHCluster( + ["127.0.0.1"] * 3, + connect_options=[dict(known_hosts=None)] * 3, + asynchronous=True, + scheduler_options={"port": 0, "idle_timeout": "5s"}, + worker_options={"death_timeout": "5s"}, + remote_python=sys.executable, + ) as cluster: + assert cluster.workers[0].remote_python == sys.executable + + [email protected] +async def test_remote_python_as_dict(): + async with SSHCluster( + ["127.0.0.1"] * 3, + connect_options=[dict(known_hosts=None)] * 3, + asynchronous=True, + scheduler_options={"port": 0, "idle_timeout": "5s"}, + worker_options={"death_timeout": "5s"}, + remote_python=[sys.executable] * 3, + ) as cluster: + assert cluster.workers[0].remote_python == sys.executable + + [email protected] +async def test_list_of_remote_python_raises(): + with pytest.raises(RuntimeError): + async with SSHCluster( + ["127.0.0.1"] * 3, + connect_options=[dict(known_hosts=None)] * 3, + asynchronous=True, + scheduler_options={"port": 0, "idle_timeout": "5s"}, + worker_options={"death_timeout": "5s"}, + remote_python=[sys.executable] * 4, # Mismatch in length 4 != 3 + ) as _: + pass diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/diagnostics/progressbar.py new/distributed-2.27.0/distributed/diagnostics/progressbar.py --- old/distributed-2.26.0/distributed/diagnostics/progressbar.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/diagnostics/progressbar.py 2020-09-18 22:52:06.000000000 +0200 @@ -115,7 +115,7 @@ start=True, **kwargs, ): - super(TextProgressBar, self).__init__(keys, scheduler, interval, complete) + super().__init__(keys, scheduler, interval, complete) self.width = width self.loop = loop or IOLoop() @@ -158,7 +158,7 @@ loop=None, **kwargs, ): - super(ProgressWidget, self).__init__(keys, scheduler, interval, complete) + super().__init__(keys, scheduler, interval, complete) from ipywidgets import FloatProgress, HBox, VBox, HTML @@ -314,9 +314,7 @@ complete=False, **kwargs, ): - super(MultiProgressWidget, self).__init__( - keys, scheduler, func, interval, complete - ) + super().__init__(keys, scheduler, func, interval, complete) from ipywidgets import VBox self.widget = VBox([]) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/diagnostics/tests/test_worker_plugin.py new/distributed-2.27.0/distributed/diagnostics/tests/test_worker_plugin.py --- old/distributed-2.26.0/distributed/diagnostics/tests/test_worker_plugin.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/diagnostics/tests/test_worker_plugin.py 2020-09-18 22:52:06.000000000 +0200 @@ -137,6 +137,23 @@ @gen_cluster(nthreads=[("127.0.0.1", 1)], client=True) +async def test_registering_with_name_arg(c, s, w): + class FooWorkerPlugin: + def setup(self, worker): + if hasattr(worker, "foo"): + raise RuntimeError(f"Worker {worker.address} already has foo!") + + worker.foo = True + + responses = await c.register_worker_plugin(FooWorkerPlugin(), name="foo") + assert list(responses.values()) == [{"status": "OK"}] + + async with Worker(s.address, loop=s.loop): + responses = await c.register_worker_plugin(FooWorkerPlugin(), name="foo") + assert list(responses.values()) == [{"status": "repeat"}] * 2 + + +@gen_cluster(nthreads=[("127.0.0.1", 1)], client=True) async def test_empty_plugin(c, s, w): class EmptyPlugin: pass diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/http/scheduler/prometheus/__init__.py new/distributed-2.27.0/distributed/http/scheduler/prometheus/__init__.py --- old/distributed-2.26.0/distributed/http/scheduler/prometheus/__init__.py 2020-09-08 06:05:25.000000000 +0200 +++ new/distributed-2.27.0/distributed/http/scheduler/prometheus/__init__.py 2020-09-18 22:52:06.000000000 +0200 @@ -78,9 +78,7 @@ def __init__(self, *args, dask_server=None, **kwargs): import prometheus_client - super(PrometheusHandler, self).__init__( - *args, dask_server=dask_server, **kwargs - ) + super().__init__(*args, dask_server=dask_server, **kwargs) if PrometheusHandler._collectors: # Especially during testing, multiple schedulers are started diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/http/worker/prometheus.py new/distributed-2.27.0/distributed/http/worker/prometheus.py --- old/distributed-2.26.0/distributed/http/worker/prometheus.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/http/worker/prometheus.py 2020-09-18 22:52:06.000000000 +0200 @@ -77,7 +77,7 @@ def __init__(self, *args, **kwargs): import prometheus_client - super(PrometheusHandler, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if PrometheusHandler._initialized: return diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/nanny.py new/distributed-2.27.0/distributed/nanny.py --- old/distributed-2.26.0/distributed/nanny.py 2020-09-11 23:15:38.000000000 +0200 +++ new/distributed-2.27.0/distributed/nanny.py 2020-09-18 22:52:06.000000000 +0200 @@ -198,7 +198,7 @@ "run": self.run, } - super(Nanny, self).__init__( + super().__init__( handlers=handlers, io_loop=self.loop, connection_args=self.connection_args ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/scheduler.py new/distributed-2.27.0/distributed/scheduler.py --- old/distributed-2.26.0/distributed/scheduler.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/scheduler.py 2020-09-19 04:28:28.000000000 +0200 @@ -1379,7 +1379,7 @@ connection_limit = get_fileno_limit() / 2 - super(Scheduler, self).__init__( + super().__init__( handlers=self.handlers, stream_handlers=merge(worker_handlers, client_handlers), io_loop=self.loop, @@ -1580,7 +1580,7 @@ self.status = Status.closed self.stop() - await super(Scheduler, self).close() + await super().close() setproctitle("dask-scheduler [closed]") disable_gc_diagnosis() @@ -3844,7 +3844,7 @@ async def register_worker_plugin(self, comm, plugin, name=None): """ Registers a setup function, and call it on every worker """ - self.worker_plugins.append(plugin) + self.worker_plugins.append({"plugin": plugin, "name": name}) responses = await self.broadcast( msg=dict(op="plugin-add", plugin=plugin, name=name) @@ -5586,7 +5586,7 @@ class KilledWorker(Exception): def __init__(self, task, last_worker): - super(KilledWorker, self).__init__(task, last_worker) + super().__init__(task, last_worker) self.task = task self.last_worker = last_worker diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/tests/test_scheduler.py new/distributed-2.27.0/distributed/tests/test_scheduler.py --- old/distributed-2.26.0/distributed/tests/test_scheduler.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/tests/test_scheduler.py 2020-09-18 22:52:06.000000000 +0200 @@ -1963,12 +1963,12 @@ def __init__(self, *args, failing_connections=0, **kwargs): self.cnn_count = 0 self.failing_connections = failing_connections - super(FlakyConnectionPool, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) async def connect(self, *args, **kwargs): self.cnn_count += 1 if self.cnn_count > self.failing_connections: - return await super(FlakyConnectionPool, self).connect(*args, **kwargs) + return await super().connect(*args, **kwargs) else: return BrokenComm() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/threadpoolexecutor.py new/distributed-2.27.0/distributed/threadpoolexecutor.py --- old/distributed-2.26.0/distributed/threadpoolexecutor.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/threadpoolexecutor.py 2020-09-18 22:52:06.000000000 +0200 @@ -70,7 +70,7 @@ _counter = itertools.count() def __init__(self, *args, **kwargs): - super(ThreadPoolExecutor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._rejoin_list = [] self._rejoin_lock = threading.Lock() self._thread_name_prefix = kwargs.get( diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/utils.py new/distributed-2.27.0/distributed/utils.py --- old/distributed-2.26.0/distributed/utils.py 2020-08-29 00:26:28.000000000 +0200 +++ new/distributed-2.27.0/distributed/utils.py 2020-09-19 04:28:28.000000000 +0200 @@ -1150,7 +1150,7 @@ def __init__(self, *args, n=10000, **kwargs): self.deque = deque(maxlen=n) - super(DequeHandler, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._instances.add(self) def emit(self, record): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed/worker.py new/distributed-2.27.0/distributed/worker.py --- old/distributed-2.26.0/distributed/worker.py 2020-09-09 06:19:27.000000000 +0200 +++ new/distributed-2.27.0/distributed/worker.py 2020-09-18 22:52:06.000000000 +0200 @@ -627,7 +627,7 @@ "steal-request": self.steal_request, } - super(Worker, self).__init__( + super().__init__( handlers=handlers, stream_handlers=stream_handlers, io_loop=self.loop, @@ -845,8 +845,8 @@ else: await asyncio.gather( *[ - self.plugin_add(plugin=plugin) - for plugin in response["worker-plugins"] + self.plugin_add(**plugin_kwargs) + for plugin_kwargs in response["worker-plugins"] ] ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/distributed.egg-info/PKG-INFO new/distributed-2.27.0/distributed.egg-info/PKG-INFO --- old/distributed-2.26.0/distributed.egg-info/PKG-INFO 2020-09-11 23:28:19.000000000 +0200 +++ new/distributed-2.27.0/distributed.egg-info/PKG-INFO 2020-09-19 04:34:27.000000000 +0200 @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: distributed -Version: 2.26.0 +Version: 2.27.0 Summary: Distributed scheduler for Dask Home-page: https://distributed.dask.org Maintainer: Matthew Rocklin diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/docs/source/changelog.rst new/distributed-2.27.0/docs/source/changelog.rst --- old/distributed-2.26.0/docs/source/changelog.rst 2020-09-11 23:27:00.000000000 +0200 +++ new/distributed-2.27.0/docs/source/changelog.rst 2020-09-19 04:33:08.000000000 +0200 @@ -1,6 +1,16 @@ Changelog ========= +2.27.0 - 2020-09-18 +------------------- + +- Fix registering a worker plugin with ``name`` arg (:pr:`4105`) `Nick Evans`_ +- Support different ``remote_python`` paths on cluster nodes (:pr:`4085`) `Abdulelah Bin Mahfoodh`_ +- Allow ``RuntimeError`` s when closing global clients (:pr:`4115`) `Matthew Rocklin`_ +- Match ``pre-commit`` in dask (:pr:`4049`) `Julia Signell`_ +- Update ``super`` usage (:pr:`4110`) `Poruri Sai Rahul`_ + + 2.26.0 - 2020-09-11 ------------------- @@ -1954,3 +1964,4 @@ .. _`Willi Rath`: https://github.com/willirath .. _`Roberto Panai`: https://github.com/rpanai .. _`Dror Speiser`: https://github.com/drorspei +.. _`Poruri Sai Rahul`: https://github.com/rahulporuri diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/distributed-2.26.0/setup.cfg new/distributed-2.27.0/setup.cfg --- old/distributed-2.26.0/setup.cfg 2020-09-11 23:28:19.954796800 +0200 +++ new/distributed-2.27.0/setup.cfg 2020-09-19 04:34:27.507513800 +0200 @@ -1,5 +1,5 @@ [flake8] -exclude = __init__.py,distributed/_concurrent_futures_thread.py +exclude = __init__.py,versioneer.py,distributed/_concurrent_futures_thread.py ignore = E20, # Extra space in brackets E231,E241, # Multiple spaces around ","
