Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-distributed for 
openSUSE:Factory checked in at 2024-02-16 21:42:46
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-distributed (Old)
 and      /work/SRC/openSUSE:Factory/.python-distributed.new.1815 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-distributed"

Fri Feb 16 21:42:46 2024 rev:78 rq:1146836 version:2024.2.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-distributed/python-distributed.changes    
2024-01-30 18:27:12.597827249 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-distributed.new.1815/python-distributed.changes
  2024-02-16 21:42:51.006770687 +0100
@@ -1,0 +2,16 @@
+Wed Feb 14 20:38:44 UTC 2024 - Ben Greiner <c...@bnavigator.de>
+
+- Update to 2024.2.0
+  * Deprecate Dask DataFrame implementation
+  * Improved tokenization
+  * https://docs.dask.org/en/stable/changelog.html#v2024-2-0
+- Add distributed-ignore-daskdepr.patch gh#dask/distributed#8504
+- Drop python39 test flavor
+- Fix python312 tests
+
+-------------------------------------------------------------------
+Sun Feb  4 21:31:24 UTC 2024 - Ben Greiner <c...@bnavigator.de>
+
+- Add python312 test flavor
+
+-------------------------------------------------------------------

Old:
----
  distributed-2024.1.1-gh.tar.gz

New:
----
  distributed-2024.2.0-gh.tar.gz
  distributed-ignore-daskdepr.patch

BETA DEBUG BEGIN:
  New:  * https://docs.dask.org/en/stable/changelog.html#v2024-2-0
- Add distributed-ignore-daskdepr.patch gh#dask/distributed#8504
- Drop python39 test flavor
BETA DEBUG END:

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-distributed.spec ++++++
--- /var/tmp/diff_new_pack.ylduUh/_old  2024-02-16 21:42:51.718796377 +0100
+++ /var/tmp/diff_new_pack.ylduUh/_new  2024-02-16 21:42:51.718796377 +0100
@@ -18,36 +18,36 @@
 
 %define psuffix %{nil}
 %global flavor @BUILD_FLAVOR@%{nil}
-%if "%{flavor}" == "test-py39"
-%define psuffix -test-py39
-%define skip_python310 1
-%define skip_python311 1
-%define skip_python312 1
-%bcond_without test
-%endif
 %if "%{flavor}" == "test-py310"
 %define psuffix -test-py310
-%define skip_python39 1
 %define skip_python311 1
 %define skip_python312 1
 %bcond_without test
 %endif
 %if "%{flavor}" == "test-py311"
 %define psuffix -test-py311
-%define skip_python39 1
 %define skip_python310 1
 %define skip_python312 1
 %bcond_without test
 %endif
+%if "%{flavor}" == "test-py312"
+%define psuffix -test-py312
+%define skip_python310 1
+%define skip_python311 1
+%bcond_without test
+%endif
 %if "%{flavor}" == ""
 %bcond_with test
+%else
+# globally stop testing this one
+%define skip_python39 1
 %endif
 # use this to run tests with xdist in parallel, unfortunately fails server side
 %bcond_with paralleltests
 
 Name:           python-distributed%{psuffix}
 # ===> Note: python-dask MUST be updated in sync with python-distributed! <===
-Version:        2024.1.1
+Version:        2024.2.0
 Release:        0
 Summary:        Library for distributed computing with Python
 License:        BSD-3-Clause
@@ -55,6 +55,8 @@
 # SourceRepository: https://github.com/dask/distributed
 Source:         
https://github.com/dask/distributed/archive/refs/tags/%{version}.tar.gz#/distributed-%{version}-gh.tar.gz
 Source99:       python-distributed-rpmlintrc
+# PATCH-FIX-UPSTREAM distributed-ignore-daskdepr.patch gh#dask/distributed#8504
+Patch0:         distributed-ignore-daskdepr.patch
 # PATCH-FIX-OPENSUSE distributed-ignore-off.patch -- ignore that we can't 
probe addresses on obs, c...@bnavigator.de
 Patch3:         distributed-ignore-offline.patch
 # PATCH-FIX-OPENSUSE distributed-ignore-thread-leaks.patch -- ignore leaking 
threads on obs, c...@bnavigator.de
@@ -167,11 +169,14 @@
 donttest+=" or (test_worker and test_worker_reconnects_mid_compute)"
 donttest+=" or (test_worker_memory and test_digests)"
 donttest+=" or (test_worker_memory and test_pause_while_spilling)"
-donttest+=" or (test_computations_futures)"
+donttest+=" or test_computations_futures"
+donttest+=" or test_task_state_instance_are_garbage_collected"
 # server-side fail due to the non-network warning in a subprocess where the 
patched filter does not apply
 donttest+=" or (test_client and test_quiet_close_process)"
 # should return > 3, returns 3 exactly
 donttest+=" or (test_statistical_profiling_cycle)"
+# pytest7 on py312: returns len==2 instead of 1
+donttest+=" or test_computation_object_code_dask_compute"
 # flakey on 3.10
 donttest+=" or (test_client_worker)"
 if [[ $(getconf LONG_BIT) -eq 32 ]]; then

++++++ _multibuild ++++++
--- /var/tmp/diff_new_pack.ylduUh/_old  2024-02-16 21:42:51.746797387 +0100
+++ /var/tmp/diff_new_pack.ylduUh/_new  2024-02-16 21:42:51.746797387 +0100
@@ -1,6 +1,6 @@
 <multibuild>
-  <package>test-py39</package>
   <package>test-py310</package>
   <package>test-py311</package>
+  <package>test-py312</package>
 </multibuild>
 

++++++ distributed-2024.1.1-gh.tar.gz -> distributed-2024.2.0-gh.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/.github/workflows/update-gpuci.yaml 
new/distributed-2024.2.0/.github/workflows/update-gpuci.yaml
--- old/distributed-2024.1.1/.github/workflows/update-gpuci.yaml        
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/.github/workflows/update-gpuci.yaml        
2024-02-09 23:58:27.000000000 +0100
@@ -54,7 +54,7 @@
           regex: false
 
       - name: Create Pull Request
-        uses: peter-evans/create-pull-request@v5
+        uses: peter-evans/create-pull-request@v6
         if: ${{ env.UCX_PY_VER != env.NEW_UCX_PY_VER }}  # make sure new 
ucx-py nightlies are available
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2024.1.1/distributed/client.py 
new/distributed-2024.2.0/distributed/client.py
--- old/distributed-2024.1.1/distributed/client.py      2024-01-27 
00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/client.py      2024-02-09 
23:58:27.000000000 +0100
@@ -373,7 +373,7 @@
         return self.client.sync(self._exception, callback_timeout=timeout, 
**kwargs)
 
     def add_done_callback(self, fn):
-        """Call callback on future when callback has finished
+        """Call callback on future when future has finished
 
         The callback ``fn`` should take the future as its only argument.  This
         will be called regardless of if the future completes successfully,
@@ -3095,8 +3095,12 @@
                 module_name = fr.f_back.f_globals["__name__"]  # type: ignore
                 if module_name == "__channelexec__":
                     break  # execnet; pytest-xdist  # pragma: nocover
+                try:
+                    module_name = sys.modules[module_name].__name__
+                except KeyError:
+                    # Ignore pathological cases where the module name isn't in 
`sys.modules`
+                    break
                 # Ignore IPython related wrapping functions to user code
-                module_name = sys.modules[module_name].__name__
                 if module_name.endswith("interactiveshell"):
                     break
 
@@ -4924,7 +4928,10 @@
         )
 
     def register_scheduler_plugin(
-        self, plugin: SchedulerPlugin, name: str | None = None, idempotent: 
bool = False
+        self,
+        plugin: SchedulerPlugin,
+        name: str | None = None,
+        idempotent: bool | None = None,
     ):
         """
         Register a scheduler plugin.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/deploy/tests/test_cluster.py 
new/distributed-2024.2.0/distributed/deploy/tests/test_cluster.py
--- old/distributed-2024.1.1/distributed/deploy/tests/test_cluster.py   
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/deploy/tests/test_cluster.py   
2024-02-09 23:58:27.000000000 +0100
@@ -33,13 +33,6 @@
 
 
 @gen_test()
-async def test_logs_deprecated():
-    async with Cluster(asynchronous=True) as cluster:
-        with pytest.warns(FutureWarning, match="get_logs"):
-            cluster.logs()
-
-
-@gen_test()
 async def test_cluster_wait_for_worker():
     async with LocalCluster(n_workers=2, asynchronous=True) as cluster:
         assert len(cluster.scheduler.workers) == 2
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/deploy/tests/test_local.py 
new/distributed-2024.2.0/distributed/deploy/tests/test_local.py
--- old/distributed-2024.1.1/distributed/deploy/tests/test_local.py     
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/deploy/tests/test_local.py     
2024-02-09 23:58:27.000000000 +0100
@@ -1057,7 +1057,7 @@
             n_workers=2, processes=False, threads_per_worker=0, 
asynchronous=True
         ) as cluster:
             assert len(cluster.workers) == 2
-            assert all(w.nthreads < CPU_COUNT for w in 
cluster.workers.values())
+            assert all(w.state.nthreads < CPU_COUNT for w in 
cluster.workers.values())
 
 
 @pytest.mark.parametrize("temporary", [True, False])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/deploy/tests/test_spec_cluster.py 
new/distributed-2024.2.0/distributed/deploy/tests/test_spec_cluster.py
--- old/distributed-2024.1.1/distributed/deploy/tests/test_spec_cluster.py      
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/deploy/tests/test_spec_cluster.py      
2024-02-09 23:58:27.000000000 +0100
@@ -269,7 +269,7 @@
 
 
 @gen_test()
-async def test_logs():
+async def test_get_logs():
     worker = {"cls": Worker, "options": {"nthreads": 1}}
     async with SpecCluster(
         asynchronous=True, scheduler=scheduler, worker=worker
@@ -305,6 +305,14 @@
 
 
 @gen_test()
+async def test_logs_deprecated():
+    async with SpecCluster(asynchronous=True, scheduler=scheduler) as cluster:
+        with pytest.warns(FutureWarning, match="get_logs"):
+            logs = await cluster.logs()
+    assert logs["Scheduler"]
+
+
+@gen_test()
 async def test_scheduler_info():
     async with SpecCluster(
         workers=worker_spec, scheduler=scheduler, asynchronous=True
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_cudf_diagnostics.py 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_cudf_diagnostics.py
--- 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_cudf_diagnostics.py 
    2024-01-27 00:07:21.000000000 +0100
+++ 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_cudf_diagnostics.py 
    2024-02-09 23:58:27.000000000 +0100
@@ -1,5 +1,6 @@
 from __future__ import annotations
 
+import asyncio
 import os
 
 import pytest
@@ -24,22 +25,27 @@
 
     manager = get_global_manager()
 
-    # 24 bytes
+    # Allocate a new dataframe and trigger spilling by setting a 1 byte limit
     df = cudf.DataFrame({"a": [1, 2, 3]})
+    manager.spill_to_device_limit(1)
 
-    return manager.spill_to_device_limit(1)
+    # Get bytes spilled from GPU to CPU
+    spill_totals, _ = get_global_manager().statistics.spill_totals[("gpu", 
"cpu")]
+    return spill_totals
 
 
 @gen_cluster(
     client=True,
     nthreads=[("127.0.0.1", 1)],
 )
-@pytest.mark.flaky(reruns=10, reruns_delay=5)
 async def test_cudf_metrics(c, s, *workers):
     w = list(s.workers.values())[0]
     assert "cudf" in w.metrics
     assert w.metrics["cudf"]["cudf-spilled"] == 0
 
-    await c.run(force_spill)
-
-    assert w.metrics["cudf"]["cudf-spilled"] == 24
+    spill_totals = (await c.run(force_spill, workers=[w.address]))[w.address]
+    assert spill_totals > 0
+    # We have to wait for the worker's metrics to update.
+    # TODO: avoid sleep, is it possible to wait on the next update of metrics?
+    await asyncio.sleep(1)
+    assert w.metrics["cudf"]["cudf-spilled"] == spill_totals
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_nanny_plugin.py 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_nanny_plugin.py
--- old/distributed-2024.1.1/distributed/diagnostics/tests/test_nanny_plugin.py 
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/diagnostics/tests/test_nanny_plugin.py 
2024-02-09 23:58:27.000000000 +0100
@@ -35,8 +35,12 @@
 
     n_existing_plugins = len(a.plugins)
     assert not hasattr(a, "foo")
-    with pytest.warns(UserWarning, match="`NannyPlugin` as a worker plugin"):
+    with (
+        pytest.warns(UserWarning, match="`NannyPlugin` as a worker plugin"),
+        pytest.warns(DeprecationWarning, match="please use 
`Client.register_plugin`"),
+    ):
         await c.register_worker_plugin(DuckPlugin(), nanny=False)
+
     assert len(a.plugins) == n_existing_plugins + 1
     assert a.foo == 123
 
@@ -52,7 +56,10 @@
 
     n_existing_plugins = len(a.plugins)
     assert not hasattr(a, "foo")
-    with pytest.warns(DeprecationWarning, match="duck-typed.*NannyPlugin"):
+    with (
+        pytest.warns(DeprecationWarning, match="duck-typed.*NannyPlugin"),
+        pytest.warns(DeprecationWarning, match="please use 
`Client.register_plugin`"),
+    ):
         await c.register_worker_plugin(DuckPlugin(), nanny=True)
     assert len(a.plugins) == n_existing_plugins + 1
     assert a.foo == 123
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_scheduler_plugin.py 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_scheduler_plugin.py
--- 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_scheduler_plugin.py 
    2024-01-27 00:07:21.000000000 +0100
+++ 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_scheduler_plugin.py 
    2024-02-09 23:58:27.000000000 +0100
@@ -603,7 +603,10 @@
 
     n_existing_plugins = len(s.plugins)
     assert not hasattr(s, "foo")
-    with pytest.warns(UserWarning, match="`SchedulerPlugin` as a worker 
plugin"):
+    with (
+        pytest.warns(UserWarning, match="`SchedulerPlugin` as a worker 
plugin"),
+        pytest.warns(DeprecationWarning, match="use `Client.register_plugin` 
instead"),
+    ):
         await c.register_worker_plugin(DuckPlugin(), nanny=False)
     assert len(s.plugins) == n_existing_plugins + 1
     assert s.foo == 123
@@ -620,7 +623,10 @@
 
     n_existing_plugins = len(s.plugins)
     assert not hasattr(s, "foo")
-    with pytest.warns(UserWarning, match="`SchedulerPlugin` as a nanny 
plugin"):
+    with (
+        pytest.warns(UserWarning, match="`SchedulerPlugin` as a nanny plugin"),
+        pytest.warns(DeprecationWarning, match="use `Client.register_plugin` 
instead"),
+    ):
         await c.register_worker_plugin(DuckPlugin(), nanny=True)
     assert len(s.plugins) == n_existing_plugins + 1
     assert s.foo == 123
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_worker_plugin.py 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_worker_plugin.py
--- 
old/distributed-2024.1.1/distributed/diagnostics/tests/test_worker_plugin.py    
    2024-01-27 00:07:21.000000000 +0100
+++ 
new/distributed-2024.2.0/distributed/diagnostics/tests/test_worker_plugin.py    
    2024-02-09 23:58:27.000000000 +0100
@@ -299,7 +299,10 @@
 
     n_existing_plugins = len(a.plugins)
     assert not hasattr(a, "foo")
-    with pytest.warns(UserWarning, match="`WorkerPlugin` as a nanny plugin"):
+    with (
+        pytest.warns(UserWarning, match="`WorkerPlugin` as a nanny plugin"),
+        pytest.warns(DeprecationWarning, match="use `Client.register_plugin` 
instead"),
+    ):
         await c.register_worker_plugin(DuckPlugin(), nanny=True)
     assert len(a.plugins) == n_existing_plugins + 1
     assert a.foo == 123
@@ -316,7 +319,10 @@
 
     n_existing_plugins = len(a.plugins)
     assert not hasattr(a, "foo")
-    with pytest.warns(DeprecationWarning, match="duck-typed.*WorkerPlugin"):
+    with (
+        pytest.warns(DeprecationWarning, match="duck-typed.*WorkerPlugin"),
+        pytest.warns(DeprecationWarning, match="use `Client.register_plugin` 
instead"),
+    ):
         await c.register_worker_plugin(DuckPlugin())
     assert len(a.plugins) == n_existing_plugins + 1
     assert a.foo == 123
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/tests/test_client.py 
new/distributed-2024.2.0/distributed/tests/test_client.py
--- old/distributed-2024.1.1/distributed/tests/test_client.py   2024-01-27 
00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/tests/test_client.py   2024-02-09 
23:58:27.000000000 +0100
@@ -1008,17 +1008,23 @@
     assert all(result)
 
 
-@gen_cluster()
-async def test_two_consecutive_clients_share_results(s, a, b):
-    async with Client(s.address, asynchronous=True) as c:
-        x = c.submit(random.randint, 0, 1000, pure=True)
-        xx = await x
-
-        async with Client(s.address, asynchronous=True) as f:
-            y = f.submit(random.randint, 0, 1000, pure=True)
-            yy = await y
+@gen_cluster(client=True)
+async def test_two_consecutive_clients_share_results(c, s, a, b):
+    # Calling c.submit(random.randint) directly would cause the client to 
tokenize and
+    # deep-copy the global random state. Also, Client and/or Scheduler draw 
from the
+    # global random state, so its state (and thus, token) would be different 
between the
+    # two calls to submit().
+    def f():
+        return random.randint(0, 1000)
+
+    x = c.submit(f)
+    xx = await x
+
+    async with Client(s.address, asynchronous=True) as c2:
+        y = c2.submit(f)
+        yy = await y
 
-            assert xx == yy
+    assert xx == yy
 
 
 @gen_cluster(client=True)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/distributed-2024.1.1/distributed/tests/test_semaphore.py 
new/distributed-2024.2.0/distributed/tests/test_semaphore.py
--- old/distributed-2024.1.1/distributed/tests/test_semaphore.py        
2024-01-27 00:07:21.000000000 +0100
+++ new/distributed-2024.2.0/distributed/tests/test_semaphore.py        
2024-02-09 23:58:27.000000000 +0100
@@ -219,7 +219,8 @@
     while not semaphore_object.metrics["pending"]["t2"]:  # Wait for the 
pending lease
         await asyncio.sleep(0.01)
     with pytest.warns(
-        RuntimeWarning, match="Closing semaphore .* but there remain pending 
leases"
+        RuntimeWarning,
+        match=r"Closing semaphore .* but there remain (pending|unreleased) 
leases",
     ):
         await sem2.close()
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/distributed-2024.1.1/pyproject.toml 
new/distributed-2024.2.0/pyproject.toml
--- old/distributed-2024.1.1/pyproject.toml     2024-01-27 00:07:21.000000000 
+0100
+++ new/distributed-2024.2.0/pyproject.toml     2024-02-09 23:58:27.000000000 
+0100
@@ -28,7 +28,7 @@
 dependencies = [
     "click >= 8.0",
     "cloudpickle >= 1.5.0",
-    "dask == 2024.1.1",
+    "dask == 2024.2.0",
     "jinja2 >= 2.10.3",
     "locket >= 1.0.0",
     "msgpack >= 1.0.0",

++++++ distributed-ignore-daskdepr.patch ++++++
>From dd916f1b90264fe8c2ce82ff0c7bdecb85306a58 Mon Sep 17 00:00:00 2001
From: James Bourbeau <jrbourb...@gmail.com>
Date: Mon, 12 Feb 2024 16:49:21 -0600
Subject: [PATCH] Ignore dask-expr warning in CI

---
 pyproject.toml | 1 +
 1 file changed, 1 insertion(+)

Index: distributed-2024.2.0/pyproject.toml
===================================================================
--- distributed-2024.2.0.orig/pyproject.toml
+++ distributed-2024.2.0/pyproject.toml
@@ -149,6 +149,7 @@ filterwarnings = [
     # https://github.com/dask/dask/pull/10622
     '''ignore:Minimal version of pyarrow will soon be increased to 14.0.1''',
     '''ignore:the matrix subclass is not the recommended way''',
+    '''ignore:The current Dask DataFrame implementation is 
deprecated.*:DeprecationWarning''',
 ]
 minversion = "6"
 markers = [

++++++ distributed-ignore-offline.patch ++++++
--- /var/tmp/diff_new_pack.ylduUh/_old  2024-02-16 21:42:52.002806624 +0100
+++ /var/tmp/diff_new_pack.ylduUh/_new  2024-02-16 21:42:52.010806913 +0100
@@ -1,8 +1,8 @@
-Index: distributed-2023.9.1/pyproject.toml
+Index: distributed-2024.2.0/pyproject.toml
 ===================================================================
---- distributed-2023.9.1.orig/pyproject.toml
-+++ distributed-2023.9.1/pyproject.toml
-@@ -116,7 +116,7 @@ filterwarnings = [
+--- distributed-2024.2.0.orig/pyproject.toml
++++ distributed-2024.2.0/pyproject.toml
+@@ -117,7 +117,7 @@ filterwarnings = [
      '''ignore:unclosed transport 
<_SelectorSocketTransport.*:ResourceWarning''',
      '''ignore:unclosed transport <asyncio\.sslproto\..*:ResourceWarning''',
      '''ignore:unclosed cluster SSHCluster.*:ResourceWarning''',

++++++ distributed-ignore-rerun.patch ++++++
--- /var/tmp/diff_new_pack.ylduUh/_old  2024-02-16 21:42:52.022807346 +0100
+++ /var/tmp/diff_new_pack.ylduUh/_new  2024-02-16 21:42:52.026807490 +0100
@@ -1,8 +1,8 @@
-Index: distributed-2023.11.0/distributed/distributed.yaml
+Index: distributed-2024.2.0/distributed/distributed.yaml
 ===================================================================
---- distributed-2023.11.0.orig/distributed/distributed.yaml
-+++ distributed-2023.11.0/distributed/distributed.yaml
-@@ -290,6 +290,10 @@ distributed:
+--- distributed-2024.2.0.orig/distributed/distributed.yaml
++++ distributed-2024.2.0/distributed/distributed.yaml
+@@ -289,6 +289,10 @@ distributed:
        ignore-files:
          - runpy\.py  # `python -m pytest` (or other module) shell command
          - pytest  # `pytest` shell command

++++++ distributed-ignore-thread-leaks.patch ++++++
--- /var/tmp/diff_new_pack.ylduUh/_old  2024-02-16 21:42:52.034807779 +0100
+++ /var/tmp/diff_new_pack.ylduUh/_new  2024-02-16 21:42:52.038807923 +0100
@@ -1,8 +1,8 @@
-Index: distributed-2023.9.1/distributed/utils_test.py
+Index: distributed-2024.2.0/distributed/utils_test.py
 ===================================================================
---- distributed-2023.9.1.orig/distributed/utils_test.py
-+++ distributed-2023.9.1/distributed/utils_test.py
-@@ -1631,7 +1631,7 @@ def check_thread_leak():
+--- distributed-2024.2.0.orig/distributed/utils_test.py
++++ distributed-2024.2.0/distributed/utils_test.py
+@@ -1644,7 +1644,7 @@ def check_thread_leak():
      yield
  
      start = time()

Reply via email to