This is an automated email from the ASF dual-hosted git repository.

jedcunningham pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new ddaf4c30e08 In docker-compose test, reserialize DAGs instead of 
looping scheduler (#47317)
ddaf4c30e08 is described below

commit ddaf4c30e089c6bb5bcd029f0f699f0271046515
Author: Jed Cunningham <[email protected]>
AuthorDate: Mon Mar 3 20:29:28 2025 -0700

    In docker-compose test, reserialize DAGs instead of looping scheduler 
(#47317)
    
    Especially with the DAG processor being in another container now,
    starting a second scheduler in the scheduler container is just
    asking for trouble and only acts as an expensive "sleep" really.
---
 docker_tests/test_docker_compose_quick_start.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docker_tests/test_docker_compose_quick_start.py 
b/docker_tests/test_docker_compose_quick_start.py
index 9834a56e55e..846065243c3 100644
--- a/docker_tests/test_docker_compose_quick_start.py
+++ b/docker_tests/test_docker_compose_quick_start.py
@@ -98,7 +98,8 @@ def 
test_trigger_dag_and_wait_for_result(default_docker_image, tmp_path_factory,
     compose.down(remove_orphans=True, volumes=True, quiet=True)
     try:
         compose.up(detach=True, wait=True, color=not 
os.environ.get("NO_COLOR"))
-        compose.execute(service="airflow-scheduler", command=["airflow", 
"scheduler", "-n", "50"])
+        # Before we proceed, let's make sure our DAG has been parsed
+        compose.execute(service="airflow-dag-processor", command=["airflow", 
"dags", "reserialize"])
 
         api_request("PATCH", path=f"dags/{DAG_ID}", json={"is_paused": False})
         api_request(

Reply via email to