Hello community,

here is the log from the commit of package python-distributed for 
openSUSE:Factory checked in at 2020-05-03 22:47:04
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-distributed (Old)
 and      /work/SRC/openSUSE:Factory/.python-distributed.new.2738 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-distributed"

Sun May  3 22:47:04 2020 rev:29 rq:799810 version:2.15.2

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-distributed/python-distributed.changes    
2020-04-27 23:37:41.299442454 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-distributed.new.2738/python-distributed.changes
  2020-05-03 22:47:09.539145467 +0200
@@ -1,0 +2,20 @@
+Sat May  2 20:27:24 UTC 2020 - Arun Persaud <[email protected]>
+
+- update to version 2.15.2:
+  * Connect to dashboard when address provided (GH#3758) Tom
+    Augspurger
+  * Move test_gpu_metrics test (GH#3721) Tom Augspurger
+  * Nanny closing worker on KeyboardInterrupt (GH#3747) Mads
+    R. B. Kristensen
+  * Replace OrderedDict with dict in scheduler (GH#3740) Matthew
+    Rocklin
+  * Fix exception handling typo (GH#3751) Jonas Haag
+
+- changes from version 2.15.1:
+  * Ensure BokehTornado uses prefix (GH#3746) James Bourbeau
+  * Warn if cluster closes before starting (GH#3735) Matthew Rocklin
+  * Memoryview serialisation (GH#3743) Martin Durant
+  * Allows logging config under distributed key (GH#2952) Dillon
+    Niederhut
+
+-------------------------------------------------------------------

Old:
----
  distributed-2.15.0.tar.gz

New:
----
  distributed-2.15.2.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-distributed.spec ++++++
--- /var/tmp/diff_new_pack.RCspin/_old  2020-05-03 22:47:10.695147819 +0200
+++ /var/tmp/diff_new_pack.RCspin/_new  2020-05-03 22:47:10.699147827 +0200
@@ -21,7 +21,7 @@
 # Test requires network connection
 %bcond_with     test
 Name:           python-distributed
-Version:        2.15.0
+Version:        2.15.2
 Release:        0
 Summary:        Library for distributed computing with Python
 License:        BSD-3-Clause

++++++ distributed-2.15.0.tar.gz -> distributed-2.15.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/PKG-INFO 
new/distributed-2.15.2/PKG-INFO
--- old/distributed-2.15.0/PKG-INFO     2020-04-25 06:24:36.600871600 +0200
+++ new/distributed-2.15.2/PKG-INFO     2020-05-01 16:57:55.892567900 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.2
 Name: distributed
-Version: 2.15.0
+Version: 2.15.2
 Summary: Distributed scheduler for Dask
 Home-page: https://distributed.dask.org
 Maintainer: Matthew Rocklin
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/_version.py 
new/distributed-2.15.2/distributed/_version.py
--- old/distributed-2.15.0/distributed/_version.py      2020-04-25 
06:24:36.602284000 +0200
+++ new/distributed-2.15.2/distributed/_version.py      2020-05-01 
16:57:55.893940000 +0200
@@ -8,11 +8,11 @@
 
 version_json = '''
 {
- "date": "2020-04-24T23:22:18-0500",
+ "date": "2020-05-01T09:56:24-0500",
  "dirty": false,
  "error": null,
- "full-revisionid": "4199c546154a75afa5404a7cfbaa8e864286dcf3",
- "version": "2.15.0"
+ "full-revisionid": "fdc4327bd236c5cf678d86edb0387ded2dcb8024",
+ "version": "2.15.2"
 }
 '''  # END VERSION_JSON
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/config.py 
new/distributed-2.15.2/distributed/config.py
--- old/distributed-2.15.0/distributed/config.py        2019-11-11 
22:08:57.000000000 +0100
+++ new/distributed-2.15.2/distributed/config.py        2020-04-30 
20:50:57.000000000 +0200
@@ -80,7 +80,8 @@
         "tornado": "critical",
         "tornado.application": "error",
     }
-    loggers.update(config.get("logging", {}))
+    base_config = _find_logging_config(config)
+    loggers.update(base_config.get("logging", {}))
 
     handler = logging.StreamHandler(sys.stderr)
     handler.setFormatter(
@@ -103,7 +104,8 @@
     Initialize logging using logging's "Configuration dictionary schema".
     (ref.: 
https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema)
     """
-    logging.config.dictConfig(config.get("logging"))
+    base_config = _find_logging_config(config)
+    logging.config.dictConfig(base_config.get("logging"))
 
 
 def _initialize_logging_file_config(config):
@@ -111,20 +113,34 @@
     Initialize logging using logging's "Configuration file format".
     (ref.: https://docs.python.org/3/howto/logging.html#configuring-logging)
     """
+    base_config = _find_logging_config(config)
     logging.config.fileConfig(
-        config.get("logging-file-config"), disable_existing_loggers=False
+        base_config.get("logging-file-config"), disable_existing_loggers=False
     )
 
 
+def _find_logging_config(config):
+    """
+    Look for the dictionary containing logging-specific configurations,
+    starting in the 'distributed' dictionary and then trying the top-level
+    """
+    logging_keys = {"logging-file-config", "logging"}
+    if logging_keys & config.get("distributed", {}).keys():
+        return config["distributed"]
+    else:
+        return config
+
+
 def initialize_logging(config):
-    if "logging-file-config" in config:
-        if "logging" in config:
+    base_config = _find_logging_config(config)
+    if "logging-file-config" in base_config:
+        if "logging" in base_config:
             raise RuntimeError(
                 "Config options 'logging-file-config' and 'logging' are 
mutually exclusive."
             )
         _initialize_logging_file_config(config)
     else:
-        log_config = config.get("logging", {})
+        log_config = base_config.get("logging", {})
         if "version" in log_config:
             # logging module mandates version to be an int
             log_config["version"] = int(log_config["version"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/dashboard/core.py 
new/distributed-2.15.2/distributed/dashboard/core.py
--- old/distributed-2.15.0/distributed/dashboard/core.py        2020-04-10 
02:38:04.000000000 +0200
+++ new/distributed-2.15.2/distributed/dashboard/core.py        2020-04-30 
20:50:57.000000000 +0200
@@ -27,10 +27,7 @@
 
     extra = toolz.merge({"prefix": prefix}, template_variables)
 
-    apps = {
-        prefix + k.lstrip("/"): functools.partial(v, server, extra)
-        for k, v in applications.items()
-    }
+    apps = {k: functools.partial(v, server, extra) for k, v in 
applications.items()}
     apps = {k: Application(FunctionHandler(v)) for k, v in apps.items()}
     kwargs = 
dask.config.get("distributed.scheduler.dashboard.bokeh-application").copy()
     extra_websocket_origins = create_hosts_whitelist(
@@ -39,6 +36,7 @@
 
     application = BokehTornado(
         apps,
+        prefix=prefix,
         use_index=False,
         extra_websocket_origins=extra_websocket_origins,
         **kwargs,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2.15.0/distributed/dashboard/tests/test_scheduler_bokeh.py 
new/distributed-2.15.2/distributed/dashboard/tests/test_scheduler_bokeh.py
--- old/distributed-2.15.0/distributed/dashboard/tests/test_scheduler_bokeh.py  
2020-04-25 06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/dashboard/tests/test_scheduler_bokeh.py  
2020-04-30 20:50:57.000000000 +0200
@@ -8,6 +8,7 @@
 import pytest
 
 pytest.importorskip("bokeh")
+from bokeh.server.server import BokehTornado
 from tlz import first
 from tornado.httpclient import AsyncHTTPClient, HTTPRequest
 
@@ -712,3 +713,21 @@
     mbk.update()
     assert mbk.source.data["name"] == ["add", "inc"]
     assert mbk.source.data["nbytes"] == [x.nbytes, sys.getsizeof(1)]
+
+
+@gen_cluster(scheduler_kwargs={"http_prefix": "foo-bar", "dashboard": True})
+async def test_prefix_bokeh(s, a, b):
+    prefix = "foo-bar"
+    http_client = AsyncHTTPClient()
+    response = await http_client.fetch(
+        f"http://localhost:{s.http_server.port}/{prefix}/status";
+    )
+    assert response.code == 200
+    assert (
+        f'<script type="text/javascript" src="/{prefix}/static/'
+        in response.body.decode()
+    )
+
+    bokeh_app = s.http_application.applications[0]
+    assert isinstance(bokeh_app, BokehTornado)
+    assert bokeh_app.prefix == f"/{prefix}"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/deploy/spec.py 
new/distributed-2.15.2/distributed/deploy/spec.py
--- old/distributed-2.15.0/distributed/deploy/spec.py   2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/deploy/spec.py   2020-04-30 
20:50:57.000000000 +0200
@@ -383,7 +383,10 @@
             await future
         async with self._lock:
             with ignoring(CommClosedError):
-                await self.scheduler_comm.close(close_workers=True)
+                if self.scheduler_comm:
+                    await self.scheduler_comm.close(close_workers=True)
+                else:
+                    logger.warning("Cluster closed without starting up")
 
         await self.scheduler.close()
         for w in self._created:
@@ -603,6 +606,6 @@
 @atexit.register
 def close_clusters():
     for cluster in list(SpecCluster._instances):
-        with ignoring((gen.TimeoutError, TimeoutError)):
+        with ignoring(gen.TimeoutError, TimeoutError):
             if cluster.status != "closed":
                 cluster.close(timeout=10)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/nanny.py 
new/distributed-2.15.2/distributed/nanny.py
--- old/distributed-2.15.0/distributed/nanny.py 2020-04-25 06:03:08.000000000 
+0200
+++ new/distributed-2.15.2/distributed/nanny.py 2020-05-01 16:47:45.000000000 
+0200
@@ -762,4 +762,6 @@
             # Loop was stopped before wait_until_closed() returned, ignore
             pass
         except KeyboardInterrupt:
-            pass
+            # At this point the loop is not running thus we have to run
+            # do_stop() explicitly.
+            loop.run_sync(do_stop)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/protocol/serialize.py 
new/distributed-2.15.2/distributed/protocol/serialize.py
--- old/distributed-2.15.0/distributed/protocol/serialize.py    2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/protocol/serialize.py    2020-04-30 
20:50:57.000000000 +0200
@@ -556,7 +556,7 @@
 
 
 # Teach serialize how to handle bytestrings
-@dask_serialize.register((bytes, bytearray))
+@dask_serialize.register((bytes, bytearray, memoryview))
 def _serialize_bytes(obj):
     header = {}  # no special metadata
     frames = [obj]
@@ -568,6 +568,15 @@
     return b"".join(frames)
 
 
+@dask_deserialize.register(memoryview)
+def _serialize_memoryview(header, frames):
+    if len(frames) == 1:
+        out = frames[0]
+    else:
+        out = b"".join(frames)
+    return memoryview(out)
+
+
 #########################
 # Descend into __dict__ #
 #########################
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2.15.0/distributed/protocol/tests/test_serialize.py 
new/distributed-2.15.2/distributed/protocol/tests/test_serialize.py
--- old/distributed-2.15.0/distributed/protocol/tests/test_serialize.py 
2020-04-25 06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/protocol/tests/test_serialize.py 
2020-04-30 20:50:57.000000000 +0200
@@ -409,6 +409,7 @@
         ([MyObj([0, 1, 2]), 1], True),
         (tuple([MyObj(None)]), True),
         ({("x", i): MyObj(5) for i in range(100)}, True),
+        (memoryview(b"hello"), True),
     ],
 )
 def test_check_dask_serializable(data, is_serializable):
@@ -428,3 +429,12 @@
     data_out = deserialize(header, frames)
 
     assert data_in == data_out
+
+
+def test_deser_memoryview():
+    data_in = memoryview(b"hello")
+    header, frames = serialize(data_in)
+    assert header["type"] == "builtins.memoryview"
+    assert frames[0] is data_in
+    data_out = deserialize(header, frames)
+    assert data_in == data_out
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/scheduler.py 
new/distributed-2.15.2/distributed/scheduler.py
--- old/distributed-2.15.0/distributed/scheduler.py     2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/scheduler.py     2020-05-01 
16:47:45.000000000 +0200
@@ -1,5 +1,5 @@
 import asyncio
-from collections import defaultdict, deque, OrderedDict
+from collections import defaultdict, deque
 from collections.abc import Mapping, Set
 from datetime import timedelta
 from functools import partial
@@ -1133,14 +1133,14 @@
         )
         self.start_http_server(routes, dashboard_address, default_port=8787)
 
-        if dashboard:
+        if dashboard or (dashboard is None and dashboard_address):
             try:
                 import distributed.dashboard.scheduler
             except ImportError:
                 logger.debug("To start diagnostics web server please install 
Bokeh")
             else:
                 distributed.dashboard.scheduler.connect(
-                    self.http_application, self.http_server, self, 
prefix=http_prefix,
+                    self.http_application, self.http_server, self, 
prefix=http_prefix
                 )
 
         # Communication state
@@ -1973,7 +1973,7 @@
                 ts.retries = v
 
         # Compute recommendations
-        recommendations = OrderedDict()
+        recommendations = {}
 
         for ts in sorted(runnables, key=operator.attrgetter("priority"), 
reverse=True):
             if ts.state == "released" and ts.run_spec:
@@ -2107,7 +2107,7 @@
                 return {}
             cts = self.tasks.get(cause)
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             if cts is not None and cts.state == "memory":  # couldn't find this
                 for ws in cts.who_has:  # TODO: this behavior is extreme
@@ -2206,7 +2206,7 @@
             ws.status = "closed"
             self.total_occupancy -= ws.occupancy
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             for ts in list(ws.processing):
                 k = ts.key
@@ -3876,7 +3876,7 @@
 
             ts.state = "waiting"
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             for dts in ts.dependencies:
                 if dts.exception_blame:
@@ -3926,7 +3926,7 @@
             if ts.has_lost_dependencies:
                 return {key: "forgotten"}
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             for dts in ts.dependencies:
                 dep = dts.key
@@ -4058,7 +4058,7 @@
 
             self.check_idle_saturated(ws)
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             self._add_to_memory(ts, ws, recommendations, **kwargs)
 
@@ -4157,7 +4157,7 @@
             if nbytes is not None:
                 ts.set_nbytes(nbytes)
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             self._remove_from_processing(ts)
 
@@ -4194,7 +4194,7 @@
                     ts.exception = "Worker holding Actor was lost"
                     return {ts.key: "erred"}  # don't try to recreate
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             for dts in ts.waiters:
                 if dts.state in ("no-worker", "processing"):
@@ -4288,7 +4288,7 @@
                     assert not ts.waiting_on
                     assert not ts.waiters
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             ts.exception = None
             ts.exception_blame = None
@@ -4362,7 +4362,7 @@
 
             ts.state = "released"
 
-            recommendations = OrderedDict()
+            recommendations = {}
 
             if ts.has_lost_dependencies:
                 recommendations[key] = "forgotten"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/tests/test_config.py 
new/distributed-2.15.2/distributed/tests/test_config.py
--- old/distributed-2.15.0/distributed/tests/test_config.py     2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/tests/test_config.py     2020-04-30 
20:50:57.000000000 +0200
@@ -109,6 +109,45 @@
         test_logging_default()
 
 
+def test_logging_simple_under_distributed():
+    """
+    Test simple ("old-style") logging configuration under the distributed key.
+    """
+    c = {
+        "distributed": {
+            "logging": {"distributed.foo": "info", "distributed.foo.bar": 
"error"}
+        }
+    }
+    # Must test using a subprocess to avoid wrecking pre-existing configuration
+    with new_config_file(c):
+        code = """if 1:
+            import logging
+            import dask
+
+            from distributed.utils_test import captured_handler
+
+            d = logging.getLogger('distributed')
+            assert len(d.handlers) == 1
+            assert isinstance(d.handlers[0], logging.StreamHandler)
+            df = logging.getLogger('distributed.foo')
+            dfb = logging.getLogger('distributed.foo.bar')
+
+            with captured_handler(d.handlers[0]) as distributed_log:
+                df.info("1: info")
+                dfb.warning("2: warning")
+                dfb.error("3: error")
+
+            distributed_log = distributed_log.getvalue().splitlines()
+
+            assert distributed_log == [
+                "distributed.foo - INFO - 1: info",
+                "distributed.foo.bar - ERROR - 3: error",
+                ], (dask.config.config, distributed_log)
+            """
+
+        subprocess.check_call([sys.executable, "-c", code])
+
+
 def test_logging_simple():
     """
     Test simple ("old-style") logging configuration.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2.15.0/distributed/tests/test_gpu_metrics.py 
new/distributed-2.15.2/distributed/tests/test_gpu_metrics.py
--- old/distributed-2.15.0/distributed/tests/test_gpu_metrics.py        
1970-01-01 01:00:00.000000000 +0100
+++ new/distributed-2.15.2/distributed/tests/test_gpu_metrics.py        
2020-05-01 16:47:45.000000000 +0200
@@ -0,0 +1,14 @@
+import pytest
+from distributed.utils_test import gen_cluster
+
+pytest.importorskip("pynvml")
+
+
+@gen_cluster()
+async def test_gpu_metrics(s, a, b):
+    from distributed.diagnostics.nvml import handles
+
+    assert "gpu" in a.metrics
+    assert len(s.workers[a.address].metrics["gpu"]["memory-used"]) == 
len(handles)
+    assert "gpu" in a.startup_information
+    assert len(s.workers[a.address].extra["gpu"]["name"]) == len(handles)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/tests/test_nanny.py 
new/distributed-2.15.2/distributed/tests/test_nanny.py
--- old/distributed-2.15.0/distributed/tests/test_nanny.py      2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/tests/test_nanny.py      2020-05-01 
16:47:45.000000000 +0200
@@ -14,7 +14,7 @@
 
 import dask
 from distributed.diagnostics import SchedulerPlugin
-from distributed import Nanny, rpc, Scheduler, Worker, Client, wait
+from distributed import Nanny, rpc, Scheduler, Worker, Client, wait, worker
 from distributed.core import CommClosedError
 from distributed.metrics import time
 from distributed.protocol.pickle import dumps
@@ -502,3 +502,29 @@
             async with Client(s.address, asynchronous=True) as client:
                 config = await client.run(dask.config.get, "foo")
                 assert config[n.worker_address] == "bar"
+
+
+class KeyboardInterruptWorker(worker.Worker):
+    """A Worker that raises KeyboardInterrupt almost immediately"""
+
+    async def heartbeat(self):
+        def raise_err():
+            raise KeyboardInterrupt()
+
+        self.loop.add_callback(raise_err)
+
+
[email protected]("protocol", ["tcp", "ucx"])
[email protected]
+async def test_nanny_closed_by_keyboard_interrupt(cleanup, protocol):
+    if protocol == "ucx":  # Skip if UCX isn't available
+        pytest.importorskip("ucp")
+
+    async with Scheduler(protocol=protocol) as s:
+        async with Nanny(
+            s.address, nthreads=1, worker_class=KeyboardInterruptWorker
+        ) as n:
+            n.auto_restart = False
+            await n.process.stopped.wait()
+            # Check that the scheduler has been notified about the closed 
worker
+            assert len(s.workers) == 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2.15.0/distributed/tests/test_variable.py 
new/distributed-2.15.2/distributed/tests/test_variable.py
--- old/distributed-2.15.0/distributed/tests/test_variable.py   2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/tests/test_variable.py   2020-05-01 
16:47:45.000000000 +0200
@@ -1,6 +1,6 @@
 import asyncio
 import random
-from time import sleep
+from time import sleep, monotonic
 import logging
 
 import pytest
@@ -96,10 +96,10 @@
 async def test_timeout(c, s, a, b):
     v = Variable("v")
 
-    start = IOLoop.current().time()
+    start = monotonic()
     with pytest.raises(TimeoutError):
         await v.get(timeout=0.2)
-    stop = IOLoop.current().time()
+    stop = monotonic()
 
     if WINDOWS:  # timing is weird with asyncio and Windows
         assert 0.1 < stop - start < 2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed/tests/test_worker.py 
new/distributed-2.15.2/distributed/tests/test_worker.py
--- old/distributed-2.15.0/distributed/tests/test_worker.py     2020-04-25 
06:03:08.000000000 +0200
+++ new/distributed-2.15.2/distributed/tests/test_worker.py     2020-05-01 
16:47:45.000000000 +0200
@@ -1559,18 +1559,6 @@
     assert 8 <= b.lifetime <= 12
 
 
-@gen_cluster()
-async def test_gpu_metrics(s, a, b):
-    pytest.importorskip("pynvml")
-    from distributed.diagnostics.nvml import count
-
-    assert "gpu" in a.metrics
-    assert len(s.workers[a.address].metrics["gpu"]["memory-used"]) == count
-
-    assert "gpu" in a.startup_information
-    assert len(s.workers[a.address].extra["gpu"]["name"]) == count
-
-
 @pytest.mark.asyncio
 async def test_bad_metrics(cleanup):
     def bad_metric(w):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed.egg-info/PKG-INFO 
new/distributed-2.15.2/distributed.egg-info/PKG-INFO
--- old/distributed-2.15.0/distributed.egg-info/PKG-INFO        2020-04-25 
06:24:35.000000000 +0200
+++ new/distributed-2.15.2/distributed.egg-info/PKG-INFO        2020-05-01 
16:57:55.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.2
 Name: distributed
-Version: 2.15.0
+Version: 2.15.2
 Summary: Distributed scheduler for Dask
 Home-page: https://distributed.dask.org
 Maintainer: Matthew Rocklin
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/distributed.egg-info/SOURCES.txt 
new/distributed-2.15.2/distributed.egg-info/SOURCES.txt
--- old/distributed-2.15.0/distributed.egg-info/SOURCES.txt     2020-04-25 
06:24:35.000000000 +0200
+++ new/distributed-2.15.2/distributed.egg-info/SOURCES.txt     2020-05-01 
16:57:55.000000000 +0200
@@ -226,6 +226,7 @@
 distributed/tests/test_counter.py
 distributed/tests/test_diskutils.py
 distributed/tests/test_failed_workers.py
+distributed/tests/test_gpu_metrics.py
 distributed/tests/test_ipython.py
 distributed/tests/test_locks.py
 distributed/tests/test_metrics.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2.15.0/docs/source/changelog.rst 
new/distributed-2.15.2/docs/source/changelog.rst
--- old/distributed-2.15.0/docs/source/changelog.rst    2020-04-25 
06:20:08.000000000 +0200
+++ new/distributed-2.15.2/docs/source/changelog.rst    2020-05-01 
16:54:03.000000000 +0200
@@ -1,6 +1,25 @@
 Changelog
 =========
 
+2.15.2 - 2020-05-01
+-------------------
+
+- Connect to dashboard when address provided (:pr:`3758`) `Tom Augspurger`_
+- Move ``test_gpu_metrics test`` (:pr:`3721`) `Tom Augspurger`_
+- Nanny closing worker on ``KeyboardInterrupt`` (:pr:`3747`) `Mads R. B. 
Kristensen`_
+- Replace ``OrderedDict`` with ``dict`` in scheduler (:pr:`3740`) `Matthew 
Rocklin`_
+- Fix exception handling typo (:pr:`3751`) `Jonas Haag`_
+
+
+2.15.1 - 2020-04-28
+-------------------
+
+- Ensure ``BokehTornado`` uses prefix (:pr:`3746`) `James Bourbeau`_
+- Warn if cluster closes before starting (:pr:`3735`) `Matthew Rocklin`_
+- Memoryview serialisation (:pr:`3743`) `Martin Durant`_
+- Allows logging config under distributed key (:pr:`2952`) `Dillon Niederhut`_
+
+
 2.15.0 - 2020-04-24
 -------------------
 
@@ -1714,3 +1733,5 @@
 .. _`Rami Chowdhury`: https://github.com/necaris
 .. _`crusaderky`: https://github.com/crusaderky
 .. _`Nicholas Smith`: https://github.com/nsmith-
+.. _`Dillon Niederhut`: https://github.com/deniederhut
+.. _`Jonas Haag`: https://github.com/jonashaag


Reply via email to