This is an automated email from the ASF dual-hosted git repository.
gopidesu pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 1338a59be2a Add better diagnostics for E2E tests (#56742)
1338a59be2a is described below
commit 1338a59be2ab98c8b42d0110aff3bd9d1128b0d1
Author: GPK <[email protected]>
AuthorDate: Fri Oct 17 06:21:15 2025 +0100
Add better diagnostics for E2E tests (#56742)
* Add better diagnostics for E2E tests
* Update airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
Co-authored-by: Tzu-ping Chung <[email protected]>
---------
Co-authored-by: Tzu-ping Chung <[email protected]>
---
.../tests/airflow_e2e_tests/conftest.py | 35 +++++++++++++++++-----
1 file changed, 28 insertions(+), 7 deletions(-)
diff --git a/airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
b/airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
index c639bffa80f..20a0dd2fcbb 100644
--- a/airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
+++ b/airflow-e2e-tests/tests/airflow_e2e_tests/conftest.py
@@ -98,15 +98,32 @@ def spin_up_airflow_environment(tmp_path_factory):
# as it is already available and loaded using prepare_breeze_and_image
step in workflow
pull = False if DOCKER_IMAGE.startswith("ghcr.io/apache/airflow/main/")
else True
- console.print(f"[blue]Spinning up airflow environment using
{DOCKER_IMAGE}")
- compose_instance = DockerCompose(tmp_dir,
compose_file_name=compose_file_names, pull=pull)
+ try:
+ console.print(f"[blue]Spinning up airflow environment using
{DOCKER_IMAGE}")
+ compose_instance = DockerCompose(tmp_dir,
compose_file_name=compose_file_names, pull=pull)
- compose_instance.start()
+ compose_instance.start()
+
+
compose_instance.wait_for(f"http://{DOCKER_COMPOSE_HOST_PORT}/api/v2/version")
+ compose_instance.exec_in_container(
+ command=["airflow", "dags", "reserialize"],
service_name="airflow-dag-processor"
+ )
+
+ except Exception:
+ console.print("[red]Failed to start docker compose")
+ _print_logs(compose_instance)
+ compose_instance.stop()
+ raise
-
compose_instance.wait_for(f"http://{DOCKER_COMPOSE_HOST_PORT}/api/v2/version")
- compose_instance.exec_in_container(
- command=["airflow", "dags", "reserialize"],
service_name="airflow-dag-processor"
- )
+
+def _print_logs(compose_instance):
+ containers = compose_instance.get_containers()
+ for container in containers:
+ service = container.Service
+ stdout, _ = compose_instance.get_logs(service)
+ console.print(f"::group:: {service} Logs")
+ console.print(f"[red]{stdout}")
+ console.print("::endgroup::")
def pytest_sessionstart(session):
@@ -143,6 +160,10 @@ def pytest_sessionfinish(session, exitstatus):
if airflow_logs_path is not None:
copytree(airflow_logs_path, LOGS_FOLDER, dirs_exist_ok=True)
+ # If any test failures lets print the services logs
+ if any(r["status"] == "failed" for r in test_results):
+ _print_logs(compose_instance=compose_instance)
+
if compose_instance:
if not os.environ.get("SKIP_DOCKER_COMPOSE_DELETION"):
compose_instance.stop()