Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-celery for openSUSE:Factory 
checked in at 2022-07-19 17:18:57
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-celery (Old)
 and      /work/SRC/openSUSE:Factory/.python-celery.new.1523 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-celery"

Tue Jul 19 17:18:57 2022 rev:40 rq:989562 version:5.2.7

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-celery/python-celery.changes      
2021-08-24 10:54:32.704346995 +0200
+++ /work/SRC/openSUSE:Factory/.python-celery.new.1523/python-celery.changes    
2022-07-19 17:18:59.768337385 +0200
@@ -1,0 +2,40 @@
+Mon May  2 10:19:23 UTC 2022 - Mark??ta Machov?? <mmach...@suse.com>
+
+- Update to version 5.2.7
+  * Depend on the maintained python-consul2 library. (#6544).
+  * Use result_chord_join_timeout instead of hardcoded default value.
+  * Upgrade AzureBlockBlob storage backend to use Azure blob storage
+    library v12 (#6580).
+  * Exit celery with non zero exit value if failing (#6602).
+  * Raise BackendStoreError when set value is too large for Redis.
+  * Trace task optimizations are now set via Celery app instance.
+  * Add store_eager_result setting so eager tasks can store result on
+    the result backend (#6614).
+  * Allow heartbeats to be sent in tests (#6632).
+  * Simulate more exhaustive delivery info in apply().
+  * Start chord header tasks as soon as possible (#6576).
+  * --quiet flag now actually makes celery avoid producing logs
+    (#6599).
+  * Update platforms.py "superuser privileges" check (#6600).
+  * fnmatch.translate() already translates globs for us. (#6668).
+  * Upgrade some syntax to Python 3.6+.
+  * Fix checking expiration of X.509 certificates (#6678).
+  * Fix JSON decoding errors when using MongoDB as backend (#6675).
+  * Allow configuration of RedisBackend's health_check_interval
+    (#6666).
+  * Tasks can now have required kwargs at any order (#6699).
+  * Initial support of python 3.9 added.
+  * Add Python 3.10 support (#6807).
+  * Fix docstring for Signal.send to match code (#6835).
+  * Chords get body_type independently to handle cases where body.type
+    does not exist (#6847).
+  * Fix multithreaded backend usage (#6851).
+  * Fix Open Collective donate button (#6848).
+  * Make ResultSet.on_ready promise hold a weakref to self (#6784).
+  * Amend IRC network link to Libera (#6837).
+  * The Consul backend must correctly associate requests and responses
+    (#6823).
+- Drop upstreamed relax-click.patch
+- Add upstream tests.patch
+
+-------------------------------------------------------------------

Old:
----
  celery-5.0.5.tar.gz
  relax-click.patch

New:
----
  celery-5.2.7.tar.gz
  tests.patch

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-celery.spec ++++++
--- /var/tmp/diff_new_pack.1xgZMu/_old  2022-07-19 17:19:00.924338926 +0200
+++ /var/tmp/diff_new_pack.1xgZMu/_new  2022-07-19 17:19:00.928338931 +0200
@@ -1,7 +1,7 @@
 #
 # spec file
 #
-# Copyright (c) 2021 SUSE LLC
+# Copyright (c) 2022 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -28,25 +28,25 @@
 %endif
 %bcond_with ringdisabled
 Name:           python-celery%{psuffix}
-Version:        5.0.5
+Version:        5.2.7
 Release:        0
 Summary:        Distributed Task Queue module for Python
 License:        BSD-3-Clause
 URL:            http://celeryproject.org
 Source:         
https://files.pythonhosted.org/packages/source/c/celery/celery-%{version}.tar.gz
 Patch0:         move-pytest-configuration-to-conftest.patch
-Patch1:         relax-click.patch
+Patch1:         tests.patch
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  fdupes
 BuildRequires:  netcfg
 BuildRequires:  python-rpm-macros
-Requires:       python-billiard >= 3.6.3.0
-Requires:       python-click >= 8.0
+Requires:       python-billiard >= 3.6.4
+Requires:       python-click >= 8.0.3
 Requires:       python-click-didyoumean >= 0.0.3
 Requires:       python-click-plugins >= 1.1.1
 Requires:       python-click-repl >= 0.2.0
-Requires:       python-kombu >= 5.0.0
-Requires:       python-pytz >= 2016.7
+Requires:       python-kombu >= 5.2.3
+Requires:       python-pytz >= 2021.3
 Requires:       python-vine >= 5.0.0
 Requires(post): update-alternatives
 Requires(postun):update-alternatives
@@ -59,17 +59,17 @@
 Suggests:       python-pytyrant
 BuildArch:      noarch
 %if %{with test}
-BuildRequires:  %{python_module PyYAML}
+BuildRequires:  %{python_module PyYAML >= 3.10}
 BuildRequires:  %{python_module SQLAlchemy}
 BuildRequires:  %{python_module boto3 >= 1.9.178}
 BuildRequires:  %{python_module case >= 1.3.1}
 BuildRequires:  %{python_module celery = %{version}}
-BuildRequires:  %{python_module cryptography}
-BuildRequires:  %{python_module eventlet >= 0.26.1}
+BuildRequires:  %{python_module cryptography >= 36.0.2}
+BuildRequires:  %{python_module eventlet >= 0.32.0}
 BuildRequires:  %{python_module gevent}
-BuildRequires:  %{python_module moto >= 1.3.7}
+BuildRequires:  %{python_module moto >= 2.2.6}
 BuildRequires:  %{python_module msgpack}
-BuildRequires:  %{python_module pymongo >= 3.3.0}
+BuildRequires:  %{python_module pymongo >= 4.0.2}
 BuildRequires:  %{python_module pytest >= 4.5.0}
 BuildRequires:  %{python_module pytest-subtests}
 %if %{with ringdisabled}
@@ -86,8 +86,6 @@
 %prep
 %setup -q -n celery-%{version}
 %autopatch -p1
-# do not hardcode versions
-sed -i -e 's:==:>=:g' requirements/*.txt
 
 %build
 %if !%{with test}
@@ -103,8 +101,8 @@
 
 %check
 %if %{with test}
-# test_init_mongodb_dns_seedlist - does not work with new pymongo, will be 
fixed in 5.1
-%pytest -k 'not test_init_mongodb_dns_seedlist'
+# test_check_privileges_no_fchown - first it deletes fchown from the system, 
so it needs root privileges, and then it runs the worker and complains about 
root privileges
+%pytest -k "not test_check_privileges_no_fchown"
 %endif
 
 %if !%{with test}

++++++ celery-5.0.5.tar.gz -> celery-5.2.7.tar.gz ++++++
++++ 23112 lines of diff (skipped)

++++++ move-pytest-configuration-to-conftest.patch ++++++
--- /var/tmp/diff_new_pack.1xgZMu/_old  2022-07-19 17:19:01.188339278 +0200
+++ /var/tmp/diff_new_pack.1xgZMu/_new  2022-07-19 17:19:01.188339278 +0200
@@ -1,15 +1,15 @@
-Index: celery-5.0.2/celery/contrib/pytest.py
+Index: celery-5.2.6/celery/contrib/pytest.py
 ===================================================================
---- celery-5.0.2.orig/celery/contrib/pytest.py
-+++ celery-5.0.2/celery/contrib/pytest.py
-@@ -13,16 +13,6 @@ NO_WORKER = os.environ.get('NO_WORKER')
+--- celery-5.2.6.orig/celery/contrib/pytest.py
++++ celery-5.2.6/celery/contrib/pytest.py
+@@ -19,16 +19,6 @@ NO_WORKER = os.environ.get('NO_WORKER')
  # Well, they're called fixtures....
  
  
 -def pytest_configure(config):
 -    """Register additional pytest configuration."""
 -    # add the pytest.mark.celery() marker registration to the pytest.ini 
[markers] section
--    # this prevents pytest 4.5 and newer from issueing a warning about an 
unknown marker
+-    # this prevents pytest 4.5 and newer from issuing a warning about an 
unknown marker
 -    # and shows helpful marker documentation when running pytest --markers.
 -    config.addinivalue_line(
 -        "markers", "celery(**overrides): override celery configuration for a 
test case"
@@ -19,12 +19,12 @@
  @contextmanager
  def _create_app(enable_logging=False,
                  use_trap=False,
-Index: celery-5.0.2/t/unit/conftest.py
+Index: celery-5.2.6/t/unit/conftest.py
 ===================================================================
---- celery-5.0.2.orig/t/unit/conftest.py
-+++ celery-5.0.2/t/unit/conftest.py
-@@ -40,6 +40,16 @@ CASE_LOG_LEVEL_EFFECT = 'Test {0} modifi
- CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
+--- celery-5.2.6.orig/t/unit/conftest.py
++++ celery-5.2.6/t/unit/conftest.py
+@@ -64,6 +64,16 @@ class WhateverIO(io.StringIO):
+         _SIO_write(self, data.decode() if isinstance(data, bytes) else data)
  
  
 +def pytest_configure(config):

++++++ tests.patch ++++++
>From 9e324caaa6b175d8e51d3582378b78757e66a12d Mon Sep 17 00:00:00 2001
From: dobosevych <dobosev...@users.noreply.github.com>
Date: Thu, 14 Apr 2022 18:22:33 +0300
Subject: [PATCH] Integration test fix (#7460)

* Integration debugging

* Integration debugging

* Integration debugging

* Commented tasks that aren't working

* Fixed test_inspect.py

* Fixed serialization test_canvas.py

* Request fixes

* Setup full pipeline

* Setup full pipeline

* Setup full pipeline

* Setup python-package.yml

* Setup python-package.yml

* Added 3.10 to integration tests

* test_task.py fixed

* test_generator fixed

* Added parametrization to test_generation

* fixed test_generator

* Reverted encoding in test_canvas.py

* Rollback codecov

* Retries now respect additional options.

Previously, expires and other options were not merged with
the current task's options. This commit fixes the issue.

Co-authored-by: Omer Katz <omer.k...@kcg.tech>
---
 celery/app/task.py                   |  2 +-
 celery/canvas.py                     | 13 +++++---
 celery/contrib/pytest.py             |  2 +-
 celery/worker/request.py             |  2 +-
 requirements/test-integration.txt    |  1 +
 t/integration/tasks.py               |  7 +++--
 t/integration/test_canvas.py         | 19 ++++++------
 t/integration/test_tasks.py          | 11 +++++--
 9 files changed, 79 insertions(+), 24 deletions(-)

Index: celery-5.2.6/celery/app/task.py
===================================================================
--- celery-5.2.6.orig/celery/app/task.py
+++ celery-5.2.6/celery/app/task.py
@@ -605,7 +605,7 @@ class Task:
         request = self.request if request is None else request
         args = request.args if args is None else args
         kwargs = request.kwargs if kwargs is None else kwargs
-        options = request.as_execution_options()
+        options = {**request.as_execution_options(), **extra_options}
         delivery_info = request.delivery_info or {}
         priority = delivery_info.get('priority')
         if priority is not None:
Index: celery-5.2.6/celery/canvas.py
===================================================================
--- celery-5.2.6.orig/celery/canvas.py
+++ celery-5.2.6/celery/canvas.py
@@ -26,7 +26,7 @@ from celery.utils import abstract
 from celery.utils.collections import ChainMap
 from celery.utils.functional import _regen
 from celery.utils.functional import chunks as _chunks
-from celery.utils.functional import (is_list, lookahead, maybe_list, regen,
+from celery.utils.functional import (is_list, maybe_list, regen,
                                      seq_concat_item, seq_concat_seq)
 from celery.utils.objects import getitem_property
 from celery.utils.text import remove_repeating_from_task, truncate
@@ -1184,9 +1184,11 @@ class group(Signature):
             # next_task is None.  This enables us to set the chord size
             # without burning through the entire generator.  See #3021.
             chord_size = 0
-            for task_index, (current_task, next_task) in enumerate(
-                lookahead(tasks)
-            ):
+            tasks_shifted, tasks = itertools.tee(tasks)
+            next(tasks_shifted, None)
+            next_task = next(tasks_shifted, None)
+
+            for task_index, current_task in enumerate(tasks):
                 # We expect that each task must be part of the same group which
                 # seems sensible enough. If that's somehow not the case we'll
                 # end up messing up chord counts and there are all sorts of
@@ -1212,6 +1214,7 @@ class group(Signature):
                 if p and not p.cancelled and not p.ready:
                     p.size += 1
                     res.then(p, weak=True)
+                next_task = next(tasks_shifted, None)
                 yield res  # <-- r.parent, etc set in the frozen result.
 
     def _freeze_gid(self, options):
@@ -1249,7 +1252,7 @@ class group(Signature):
             # we freeze all tasks in the clone tasks1, and then zip the results
             # with the IDs of tasks in the second clone, tasks2. and then, we 
build
             # a generator that takes only the task IDs from tasks2.
-            self.tasks = regen(x[0] for x in zip(tasks2, results))
+            self.tasks = regen(tasks2)
         else:
             new_tasks = []
             # Need to unroll subgroups early so that chord gets the
Index: celery-5.2.6/celery/contrib/pytest.py
===================================================================
--- celery-5.2.6.orig/celery/contrib/pytest.py
+++ celery-5.2.6/celery/contrib/pytest.py
@@ -88,7 +88,7 @@ def celery_session_worker(
         for module in celery_includes:
             celery_session_app.loader.import_task_module(module)
         for class_task in celery_class_tasks:
-            celery_session_app.tasks.register(class_task)
+            celery_session_app.register_task(class_task)
         with worker.start_worker(celery_session_app,
                                  pool=celery_worker_pool,
                                  **celery_worker_parameters) as w:
Index: celery-5.2.6/celery/worker/request.py
===================================================================
--- celery-5.2.6.orig/celery/worker/request.py
+++ celery-5.2.6/celery/worker/request.py
@@ -155,7 +155,7 @@ class Request:
             'exchange': delivery_info.get('exchange'),
             'routing_key': delivery_info.get('routing_key'),
             'priority': properties.get('priority'),
-            'redelivered': delivery_info.get('redelivered'),
+            'redelivered': delivery_info.get('redelivered', False),
         }
         self._request_dict.update({
             'properties': properties,
Index: celery-5.2.6/requirements/test-integration.txt
===================================================================
--- celery-5.2.6.orig/requirements/test-integration.txt
+++ celery-5.2.6/requirements/test-integration.txt
@@ -3,3 +3,4 @@
 -r extras/auth.txt
 -r extras/memcache.txt
 pytest-rerunfailures>=6.0
+git+https://github.com/celery/kombu.git
Index: celery-5.2.6/t/integration/tasks.py
===================================================================
--- celery-5.2.6.orig/t/integration/tasks.py
+++ celery-5.2.6/t/integration/tasks.py
@@ -197,16 +197,17 @@ def retry(self, return_value=None):
     raise self.retry(exc=ExpectedException(), countdown=5)
 
 
-@shared_task(bind=True, expires=60.0, max_retries=1)
-def retry_once(self, *args, expires=60.0, max_retries=1, countdown=0.1):
+@shared_task(bind=True, expires=120.0, max_retries=1)
+def retry_once(self, *args, expires=None, max_retries=1, countdown=0.1):
     """Task that fails and is retried. Returns the number of retries."""
     if self.request.retries:
         return self.request.retries
     raise self.retry(countdown=countdown,
+                     expires=expires,
                      max_retries=max_retries)
 
 
-@shared_task(bind=True, expires=60.0, max_retries=1)
+@shared_task(bind=True, max_retries=1)
 def retry_once_priority(self, *args, expires=60.0, max_retries=1,
                         countdown=0.1):
     """Task that fails and is retried. Returns the priority."""
Index: celery-5.2.6/t/integration/test_canvas.py
===================================================================
--- celery-5.2.6.orig/t/integration/test_canvas.py
+++ celery-5.2.6/t/integration/test_canvas.py
@@ -124,7 +124,7 @@ class test_link_error:
         )
         assert result.get(timeout=TIMEOUT, propagate=False) == exception
 
-    @flaky
+    @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of 
returning exception")
     def test_link_error_callback_retries(self):
         exception = ExpectedException("Task expected to fail", "test")
         result = fail.apply_async(
@@ -144,7 +144,7 @@ class test_link_error:
         assert (fail.apply().get(timeout=TIMEOUT, propagate=False), True) == (
             exception, True)
 
-    @flaky
+    @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of 
returning exception")
     def test_link_error_using_signature(self):
         fail = signature('t.integration.tasks.fail', args=("test",))
         retrun_exception = signature('t.integration.tasks.return_exception')
@@ -179,7 +179,7 @@ class test_chain:
         res = c()
         assert res.get(timeout=TIMEOUT) == [64, 65, 66, 67]
 
-    @flaky
+    @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout")
     def test_group_results_in_chain(self, manager):
         # This adds in an explicit test for the special case added in commit
         # 1e3fcaa969de6ad32b52a3ed8e74281e5e5360e6
@@ -477,7 +477,7 @@ class test_chain:
         res = c()
         assert res.get(timeout=TIMEOUT) == [8, 8]
 
-    @flaky
+    @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout")
     def test_nested_chain_group_lone(self, manager):
         """
         Test that a lone group in a chain completes.
@@ -1233,7 +1233,7 @@ class test_chord:
         result = c()
         assert result.get(timeout=TIMEOUT) == 4
 
-    @flaky
+    @pytest.mark.xfail(reason="async_results aren't performed in async way")
     def test_redis_subscribed_channels_leak(self, manager):
         if not manager.app.conf.result_backend.startswith('redis'):
             raise pytest.skip('Requires redis result backend.')
@@ -1566,11 +1566,12 @@ class test_chord:
                    ) == 1
 
     @flaky
-    def test_generator(self, manager):
+    @pytest.mark.parametrize('size', [3, 4, 5, 6, 7, 8, 9])
+    def test_generator(self, manager, size):
         def assert_generator(file_name):
-            for i in range(3):
+            for i in range(size):
                 sleep(1)
-                if i == 2:
+                if i == size - 1:
                     with open(file_name) as file_handle:
                         # ensures chord header generators tasks are processed 
incrementally #3021
                         assert file_handle.readline() == '0\n', "Chord header 
was unrolled too early"
@@ -1579,7 +1580,7 @@ class test_chord:
         with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file:
             file_name = tmp_file.name
             c = chord(assert_generator(file_name), tsum.s())
-            assert c().get(timeout=TIMEOUT) == 3
+            assert c().get(timeout=TIMEOUT) == size * (size - 1) // 2
 
     @flaky
     def test_parallel_chords(self, manager):
Index: celery-5.2.6/t/integration/test_tasks.py
===================================================================
--- celery-5.2.6.orig/t/integration/test_tasks.py
+++ celery-5.2.6/t/integration/test_tasks.py
@@ -29,7 +29,7 @@ class test_class_based_tasks:
     def test_class_based_task_retried(self, celery_session_app,
                                       celery_session_worker):
         task = ClassBasedAutoRetryTask()
-        celery_session_app.tasks.register(task)
+        celery_session_app.register_task(task)
         res = task.delay()
         assert res.get(timeout=TIMEOUT) == 1
 
@@ -255,12 +255,17 @@ class test_tasks:
         manager.assert_accepted([r1.id])
 
     @flaky
-    def test_task_retried(self):
+    def test_task_retried_once(self, manager):
         res = retry_once.delay()
         assert res.get(timeout=TIMEOUT) == 1  # retried once
 
     @flaky
-    def test_task_retried_priority(self):
+    def test_task_retried_once_with_expires(self, manager):
+        res = retry_once.delay(expires=60)
+        assert res.get(timeout=TIMEOUT) == 1  # retried once
+
+    @flaky
+    def test_task_retried_priority(self, manager):
         res = retry_once_priority.apply_async(priority=7)
         assert res.get(timeout=TIMEOUT) == 7  # retried once with priority 7
 

Reply via email to