This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new fac6aa4870 Use `python-on-whales` in docker tests (#38421)
fac6aa4870 is described below

commit fac6aa4870fc794a8be9908b59fd32f692201e98
Author: Andrey Anshin <andrey.ans...@taragol.is>
AuthorDate: Sat Mar 23 14:10:40 2024 +0400

    Use `python-on-whales` in docker tests (#38421)
---
 .../commands/release_management_commands.py        |  35 +----
 dev/breeze/src/airflow_breeze/utils/run_tests.py   |  35 +++--
 .../src/airflow_breeze/utils/virtualenv_utils.py   | 100 +++++++++++++++
 docker_tests/{constants.py => conftest.py}         |  12 +-
 docker_tests/constants.py                          |   5 +
 docker_tests/docker_tests_utils.py                 | 105 ---------------
 docker_tests/docker_utils.py                       | 141 +++++++++++++++++++++
 docker_tests/requirements.txt                      |   4 +-
 docker_tests/test_ci_image.py                      |  30 ++---
 docker_tests/test_docker_compose_quick_start.py    | 122 ++++++------------
 .../test_examples_of_prod_image_building.py        |  19 ++-
 docker_tests/test_prod_image.py                    | 120 +++++++-----------
 12 files changed, 392 insertions(+), 336 deletions(-)

diff --git 
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py 
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index 16bdac52b5..5d725ffa32 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -155,6 +155,7 @@ from airflow_breeze.utils.run_utils import (
 )
 from airflow_breeze.utils.shared_options import get_dry_run, get_verbose
 from airflow_breeze.utils.versions import is_pre_release
+from airflow_breeze.utils.virtualenv_utils import create_pip_command, 
create_venv
 
 argument_provider_packages = click.argument(
     "provider_packages",
@@ -449,36 +450,8 @@ def _check_sdist_to_wheel_dists(dists_info: 
tuple[DistributionPackageInfo, ...])
                 continue
 
             if not venv_created:
-                venv_path = (Path(tmp_dir_name) / ".venv").resolve().absolute()
-                venv_command_result = run_command(
-                    [sys.executable, "-m", "venv", venv_path.as_posix()],
-                    check=False,
-                    capture_output=True,
-                )
-                if venv_command_result.returncode != 0:
-                    get_console().print(
-                        f"[error]Error when initializing virtualenv in 
{venv_path.as_posix()}:[/]\n"
-                        
f"{venv_command_result.stdout}\n{venv_command_result.stderr}"
-                    )
-                python_path = venv_path / "bin" / "python"
-                if not python_path.exists():
-                    get_console().print(
-                        f"\n[errors]Python interpreter is not exist in path 
{python_path}. Exiting!\n"
-                    )
-                    sys.exit(1)
-                pip_command = (python_path.as_posix(), "-m", "pip")
-                result = run_command(
-                    [*pip_command, "install", f"pip=={AIRFLOW_PIP_VERSION}"],
-                    check=False,
-                    capture_output=True,
-                    text=True,
-                )
-                if result.returncode != 0:
-                    get_console().print(
-                        f"[error]Error when installing pip in 
{venv_path.as_posix()}[/]\n"
-                        f"{result.stdout}\n{result.stderr}"
-                    )
-                    sys.exit(1)
+                python_path = create_venv(Path(tmp_dir_name) / ".venv", 
pip_version=AIRFLOW_PIP_VERSION)
+                pip_command = create_pip_command(python_path)
                 venv_created = True
 
             returncode = _check_sdist_to_wheel(di, pip_command, 
str(tmp_dir_name))
@@ -492,7 +465,7 @@ def _check_sdist_to_wheel_dists(dists_info: 
tuple[DistributionPackageInfo, ...])
         sys.exit(1)
 
 
-def _check_sdist_to_wheel(dist_info: DistributionPackageInfo, pip_command: 
tuple[str, ...], cwd: str) -> int:
+def _check_sdist_to_wheel(dist_info: DistributionPackageInfo, pip_command: 
list[str], cwd: str) -> int:
     get_console().print(
         f"[info]Validate build wheel from sdist distribution for package 
{dist_info.package!r}.[/]"
     )
diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py 
b/dev/breeze/src/airflow_breeze/utils/run_tests.py
index 69aac4abc5..3abd58aec7 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_tests.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py
@@ -22,10 +22,15 @@ import sys
 from itertools import chain
 from subprocess import DEVNULL
 
+from airflow_breeze.global_constants import PIP_VERSION
 from airflow_breeze.utils.console import Output, get_console
 from airflow_breeze.utils.packages import get_excluded_provider_folders, 
get_suspended_provider_folders
 from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
 from airflow_breeze.utils.run_utils import run_command
+from airflow_breeze.utils.virtualenv_utils import create_temp_venv
+
+DOCKER_TESTS_ROOT = AIRFLOW_SOURCES_ROOT / "docker_tests"
+DOCKER_TESTS_REQUIREMENTS = DOCKER_TESTS_ROOT / "requirements.txt"
 
 
 def verify_an_image(
@@ -47,19 +52,20 @@ def verify_an_image(
         return command_result.returncode, f"Testing {image_type} python 
{image_name}"
     pytest_args = ("-n", str(os.cpu_count()), "--color=yes")
     if image_type == "PROD":
-        test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / 
"test_prod_image.py"
+        test_path = DOCKER_TESTS_ROOT / "test_prod_image.py"
     else:
-        test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_ci_image.py"
+        test_path = DOCKER_TESTS_ROOT / "test_ci_image.py"
     env = os.environ.copy()
     env["DOCKER_IMAGE"] = image_name
     if slim_image:
         env["TEST_SLIM_IMAGE"] = "true"
-    command_result = run_command(
-        [sys.executable, "-m", "pytest", str(test_path), *pytest_args, 
*extra_pytest_args],
-        env=env,
-        output=output,
-        check=False,
-    )
+    with create_temp_venv(pip_version=PIP_VERSION, 
requirements_file=DOCKER_TESTS_REQUIREMENTS) as py_exe:
+        command_result = run_command(
+            [py_exe, "-m", "pytest", str(test_path), *pytest_args, 
*extra_pytest_args],
+            env=env,
+            output=output,
+            check=False,
+        )
     return command_result.returncode, f"Testing {image_type} python 
{image_name}"
 
 
@@ -73,16 +79,17 @@ def run_docker_compose_tests(
         get_console().print(f"[error]Error when inspecting PROD image: 
{command_result.returncode}[/]")
         return command_result.returncode, f"Testing docker-compose python with 
{image_name}"
     pytest_args = ("--color=yes",)
-    test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / 
"test_docker_compose_quick_start.py"
+    test_path = DOCKER_TESTS_ROOT / "test_docker_compose_quick_start.py"
     env = os.environ.copy()
     env["DOCKER_IMAGE"] = image_name
     if skip_docker_compose_deletion:
         env["SKIP_DOCKER_COMPOSE_DELETION"] = "true"
-    command_result = run_command(
-        [sys.executable, "-m", "pytest", str(test_path), *pytest_args, 
*extra_pytest_args],
-        env=env,
-        check=False,
-    )
+    with create_temp_venv(pip_version=PIP_VERSION, 
requirements_file=DOCKER_TESTS_REQUIREMENTS) as py_exe:
+        command_result = run_command(
+            [py_exe, "-m", "pytest", str(test_path), *pytest_args, 
*extra_pytest_args],
+            env=env,
+            check=False,
+        )
     return command_result.returncode, f"Testing docker-compose python with 
{image_name}"
 
 
diff --git a/dev/breeze/src/airflow_breeze/utils/virtualenv_utils.py 
b/dev/breeze/src/airflow_breeze/utils/virtualenv_utils.py
new file mode 100644
index 0000000000..0288e49b90
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/virtualenv_utils.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import contextlib
+import sys
+import tempfile
+from pathlib import Path
+from typing import Generator
+
+from airflow_breeze.utils.console import get_console
+from airflow_breeze.utils.run_utils import run_command
+
+
+def create_pip_command(python: str | Path) -> list[str]:
+    return [python.as_posix() if hasattr(python, "as_posix") else str(python), 
"-m", "pip"]
+
+
+def create_venv(
+    venv_path: str | Path,
+    python: str | None = None,
+    pip_version: str | None = None,
+    requirements_file: str | Path | None = None,
+) -> str:
+    venv_path = Path(venv_path).resolve().absolute()
+    venv_command_result = run_command(
+        [python or sys.executable, "-m", "venv", venv_path.as_posix()],
+        check=False,
+        capture_output=True,
+    )
+    if venv_command_result.returncode != 0:
+        get_console().print(
+            f"[error]Error when initializing virtualenv in 
{venv_path.as_posix()}:[/]\n"
+            f"{venv_command_result.stdout}\n{venv_command_result.stderr}"
+        )
+        sys.exit(venv_command_result.returncode)
+    python_path = venv_path / "bin" / "python"
+    if not python_path.exists():
+        get_console().print(f"\n[errors]Python interpreter is not exist in 
path {python_path}. Exiting!\n")
+        sys.exit(1)
+    pip_command = create_pip_command(python_path)
+    if pip_version:
+        result = run_command(
+            [*pip_command, "install", f"pip=={pip_version}", "-q"],
+            check=False,
+            capture_output=False,
+            text=True,
+        )
+        if result.returncode != 0:
+            get_console().print(
+                f"[error]Error when installing pip in 
{venv_path.as_posix()}[/]\n"
+                f"{result.stdout}\n{result.stderr}"
+            )
+            sys.exit(result.returncode)
+    if requirements_file:
+        requirements_file = Path(requirements_file).absolute().as_posix()
+        result = run_command(
+            [*pip_command, "install", "-r", requirements_file, "-q"],
+            check=True,
+            capture_output=False,
+            text=True,
+        )
+        if result.returncode != 0:
+            get_console().print(
+                f"[error]Error when installing packages from 
{requirements_file}[/]\n"
+                f"{result.stdout}\n{result.stderr}"
+            )
+            sys.exit(result.returncode)
+    return python_path.as_posix()
+
+
+@contextlib.contextmanager
+def create_temp_venv(
+    python: str | None = None,
+    pip_version: str | None = None,
+    requirements_file: str | Path | None = None,
+    prefix: str | None = None,
+) -> Generator[str, None, None]:
+    with tempfile.TemporaryDirectory(prefix=prefix) as tmp_dir_name:
+        yield create_venv(
+            Path(tmp_dir_name) / ".venv",
+            python=python,
+            pip_version=pip_version,
+            requirements_file=requirements_file,
+        )
diff --git a/docker_tests/constants.py b/docker_tests/conftest.py
similarity index 80%
copy from docker_tests/constants.py
copy to docker_tests/conftest.py
index db79d1d862..c570469506 100644
--- a/docker_tests/constants.py
+++ b/docker_tests/conftest.py
@@ -14,8 +14,16 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from __future__ import annotations
 
-from pathlib import Path
+import os
+
+import pytest
+
+from docker_tests.constants import DEFAULT_DOCKER_IMAGE
+
 
-SOURCE_ROOT = Path(__file__).resolve().parents[1]
+@pytest.fixture
+def default_docker_image() -> str:
+    return os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE
diff --git a/docker_tests/constants.py b/docker_tests/constants.py
index db79d1d862..83c77e2fb4 100644
--- a/docker_tests/constants.py
+++ b/docker_tests/constants.py
@@ -16,6 +16,11 @@
 # under the License.
 from __future__ import annotations
 
+import os
 from pathlib import Path
 
 SOURCE_ROOT = Path(__file__).resolve().parents[1]
+
+DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.8"
+DEFAULT_DOCKER_IMAGE = 
f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest"
+DOCKER_IMAGE = os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE
diff --git a/docker_tests/docker_tests_utils.py 
b/docker_tests/docker_tests_utils.py
deleted file mode 100644
index 7eea98e9bd..0000000000
--- a/docker_tests/docker_tests_utils.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from __future__ import annotations
-
-import os
-
-from docker_tests.command_utils import run_command
-
-DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.8"
-
-docker_image = os.environ.get(
-    "DOCKER_IMAGE", 
f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest"
-)
-
-print("Using docker image: ", docker_image)
-
-
-def run_bash_in_docker(bash_script, **kwargs):
-    docker_command = [
-        "docker",
-        "run",
-        "--rm",
-        "-e",
-        "COLUMNS=180",
-        "--entrypoint",
-        "/bin/bash",
-        docker_image,
-        "-c",
-        bash_script,
-    ]
-    return run_command(docker_command, **kwargs)
-
-
-def run_python_in_docker(python_script, **kwargs):
-    docker_command = [
-        "docker",
-        "run",
-        "--rm",
-        "-e",
-        "COLUMNS=180",
-        "-e",
-        "PYTHONDONTWRITEBYTECODE=true",
-        docker_image,
-        "python",
-        "-c",
-        python_script,
-    ]
-    return run_command(docker_command, **kwargs)
-
-
-def display_dependency_conflict_message():
-    print(
-        """
-***** Beginning of the instructions ****
-
-The image did not pass 'pip check' verification. This means that there are 
some conflicting dependencies
-in the image.
-
-It can mean one of those:
-
-1) The main is currently broken (other PRs will fail with the same error)
-2) You changed some dependencies in pyproject.toml (either manually or 
automatically by pre-commit)
-   and they are conflicting.
-
-
-
-In case 1) - apologies for the trouble.Please let committers know and they 
will fix it. You might
-be asked to rebase to the latest main after the problem is fixed.
-
-In case 2) - Follow the steps below:
-
-* try to build CI and then PROD image locally with breeze, adding 
--upgrade-to-newer-dependencies flag
-  (repeat it for all python versions)
-
-CI image:
-
-     breeze ci-image build --upgrade-to-newer-dependencies --python 3.8
-
-Production image:
-
-     breeze ci-image build --production-image --upgrade-to-newer-dependencies 
--python 3.8
-
-* You will see error messages there telling which requirements are conflicting 
and which packages caused the
-  conflict. Add the limitation that caused the conflict to 
EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS
-  variable in Dockerfile.ci. Note that the limitations might be different for 
Dockerfile.ci and Dockerfile
-  because not all packages are installed by default in the PROD Dockerfile. So 
you might find that you
-  only need to add the limitation to the Dockerfile.ci
-
-***** End of the instructions ****
-"""
-    )
diff --git a/docker_tests/docker_utils.py b/docker_tests/docker_utils.py
new file mode 100644
index 0000000000..1c9aea8a42
--- /dev/null
+++ b/docker_tests/docker_utils.py
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from time import monotonic, sleep
+
+from python_on_whales import docker
+from python_on_whales.exceptions import NoSuchContainer
+
+from docker_tests.constants import DEFAULT_DOCKER_IMAGE
+
+
+def run_cmd_in_docker(
+    cmd: list[str] | None = None,
+    image: str | None = None,
+    entrypoint: str | None = None,
+    envs: dict[str, str] | None = None,
+    remove: bool = True,
+    **kwargs,
+):
+    cmd = cmd or []
+    envs = envs or {}
+    return docker.run(
+        image=image or os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE,
+        entrypoint=entrypoint,
+        command=cmd,
+        remove=remove,
+        envs={"COLUMNS": "180", **envs},
+        **kwargs,
+    )
+
+
+def run_bash_in_docker(bash_script: str, **kwargs):
+    kwargs.pop("entrypoint", None)
+    return run_cmd_in_docker(cmd=["-c", bash_script], entrypoint="/bin/bash", 
**kwargs)
+
+
+def run_python_in_docker(python_script, **kwargs):
+    kwargs.pop("entrypoint", None)
+    envs = {"PYTHONDONTWRITEBYTECODE": "true", **kwargs.pop("envs", {})}
+    return run_cmd_in_docker(cmd=["python", "-c", python_script], envs=envs, 
**kwargs)
+
+
+def run_airflow_cmd_in_docker(cmd: list[str] | None = None, **kwargs):
+    kwargs.pop("entrypoint", None)
+    return run_cmd_in_docker(cmd=["airflow", *(cmd or [])], **kwargs)
+
+
+def display_dependency_conflict_message():
+    print(
+        """
+***** Beginning of the instructions ****
+
+The image did not pass 'pip check' verification. This means that there are 
some conflicting dependencies
+in the image.
+
+It can mean one of those:
+
+1) The main is currently broken (other PRs will fail with the same error)
+2) You changed some dependencies in pyproject.toml (either manually or 
automatically by pre-commit)
+   and they are conflicting.
+
+
+
+In case 1) - apologies for the trouble.Please let committers know and they 
will fix it. You might
+be asked to rebase to the latest main after the problem is fixed.
+
+In case 2) - Follow the steps below:
+
+* try to build CI and then PROD image locally with breeze, adding 
--upgrade-to-newer-dependencies flag
+  (repeat it for all python versions)
+
+CI image:
+
+     breeze ci-image build --upgrade-to-newer-dependencies --python 3.8
+
+Production image:
+
+     breeze ci-image build --production-image --upgrade-to-newer-dependencies 
--python 3.8
+
+* You will see error messages there telling which requirements are conflicting 
and which packages caused the
+  conflict. Add the limitation that caused the conflict to 
EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS
+  variable in Dockerfile.ci. Note that the limitations might be different for 
Dockerfile.ci and Dockerfile
+  because not all packages are installed by default in the PROD Dockerfile. So 
you might find that you
+  only need to add the limitation to the Dockerfile.ci
+
+***** End of the instructions ****
+"""
+    )
+
+
+def wait_for_container(container_id: str, timeout: int = 300):
+    print(f"Waiting for container: [{container_id}] for {timeout} more 
seconds.")
+    start_time = monotonic()
+    while True:
+        if timeout != 0 and monotonic() - start_time > timeout:
+            err_msg = f"Timeout. The operation takes longer than the maximum 
waiting time ({timeout}s)"
+            raise TimeoutError(err_msg)
+
+        try:
+            container = docker.container.inspect(container_id)
+        except NoSuchContainer:
+            msg = f"Container ID {container_id!r} not found."
+            if timeout != 0:
+                msg += f"\nWaiting for {int(timeout - (monotonic() - 
start_time))} more seconds"
+            print(msg)
+            sleep(5)
+            continue
+
+        container_msg = f"Container {container.name}[{container_id}]"
+        if (state := container.state).status in ("running", "restarting"):
+            if state.health is None or state.health.status == "healthy":
+                print(
+                    f"{container_msg}. Status: {state.status!r}. "
+                    f"Healthcheck: {state.health.status if state.health else 
'not set'!r}"
+                )
+                break
+        elif state.status == "exited":
+            print(f"{container_msg}. Status: {state.status!r}. Exit Code: 
{state.exit_code}")
+            break
+
+        msg = f"{container_msg} has state:\n {state}"
+        if timeout != 0:
+            msg += f"\nWaiting for {int(timeout - (monotonic() - start_time))} 
more seconds"
+        print(msg)
+        sleep(1)
diff --git a/docker_tests/requirements.txt b/docker_tests/requirements.txt
index 60b4005325..61a70cded4 100644
--- a/docker_tests/requirements.txt
+++ b/docker_tests/requirements.txt
@@ -2,4 +2,6 @@
 # Internal meta-task for track https://github.com/apache/airflow/issues/37156
 pytest>=7.4.4,<8.0
 pytest-xdist
-requests
+# Requests 3 if it will be released, will be heavily breaking.
+requests>=2.27.0,<3
+python-on-whales>=0.70.0
diff --git a/docker_tests/test_ci_image.py b/docker_tests/test_ci_image.py
index 7ec65c425c..393813ad16 100644
--- a/docker_tests/test_ci_image.py
+++ b/docker_tests/test_ci_image.py
@@ -16,34 +16,22 @@
 # under the License.
 from __future__ import annotations
 
-import subprocess
+from python_on_whales import DockerException
 
-from docker_tests.command_utils import run_command
-from docker_tests.docker_tests_utils import 
display_dependency_conflict_message, docker_image
+from docker_tests.docker_utils import display_dependency_conflict_message, 
run_bash_in_docker
 
 
-def test_pip_dependencies_conflict():
+def test_pip_dependencies_conflict(default_docker_image):
     try:
-        run_command(["docker", "run", "--rm", "--entrypoint", "/bin/bash", 
docker_image, "-c", "pip check"])
-    except subprocess.CalledProcessError as ex:
+        run_bash_in_docker("pip check", image=default_docker_image)
+    except DockerException:
         display_dependency_conflict_message()
-        raise ex
+        raise
 
 
 def test_providers_present():
     try:
-        run_command(
-            [
-                "docker",
-                "run",
-                "--rm",
-                "--entrypoint",
-                "/bin/bash",
-                docker_image,
-                "-c",
-                "airflow providers list",
-            ],
-        )
-    except subprocess.CalledProcessError as ex:
+        run_bash_in_docker("airflow providers list")
+    except DockerException:
         display_dependency_conflict_message()
-        raise ex
+        raise
diff --git a/docker_tests/test_docker_compose_quick_start.py 
b/docker_tests/test_docker_compose_quick_start.py
index 7c70c39f05..eab0a7a62e 100644
--- a/docker_tests/test_docker_compose_quick_start.py
+++ b/docker_tests/test_docker_compose_quick_start.py
@@ -19,18 +19,18 @@ from __future__ import annotations
 import json
 import os
 import shlex
-import subprocess
-import sys
 from pprint import pprint
 from shutil import copyfile
-from time import monotonic, sleep
+from time import sleep
 
+import pytest
 import requests
+from python_on_whales import DockerClient, docker
+from python_on_whales.exceptions import DockerException
 
 # isort:off (needed to workaround isort bug)
 from docker_tests.command_utils import run_command
 from docker_tests.constants import SOURCE_ROOT
-from docker_tests.docker_tests_utils import docker_image
 
 # isort:on (needed to workaround isort bug)
 
@@ -52,56 +52,11 @@ def api_request(method: str, path: str, base_url: str = 
"http://localhost:8080/a
     return response.json()
 
 
-def wait_for_container(container_id: str, timeout: int = 300):
-    container_name = (
-        subprocess.check_output(["docker", "inspect", container_id, 
"--format", "{{ .Name }}"])
-        .decode()
-        .strip()
-    )
-    print(f"Waiting for container: {container_name} [{container_id}] for 
{timeout} more seconds.")
-    waiting_done = False
-    start_time = monotonic()
-    while not waiting_done:
-        container_state = (
-            subprocess.check_output(["docker", "inspect", container_id, 
"--format", "{{ .State.Status }}"])
-            .decode()
-            .strip()
-        )
-        if container_state in ("running", "restarting"):
-            health_status = (
-                subprocess.check_output(
-                    [
-                        "docker",
-                        "inspect",
-                        container_id,
-                        "--format",
-                        "{{ if .State.Health }}{{ .State.Health.Status }}{{ 
else }}no-check{{ end }}",
-                    ]
-                )
-                .decode()
-                .strip()
-            )
-            current_time = monotonic()
-            print(
-                f"{container_name}: container_state={container_state}, 
health_status={health_status}. "
-                f"Waiting for {int(timeout - (current_time - start_time))} 
more seconds"
-            )
-
-            if health_status == "healthy" or health_status == "no-check":
-                waiting_done = True
-        else:
-            print(f"{container_name}: container_state={container_state}")
-            waiting_done = True
-        if timeout != 0 and monotonic() - start_time > timeout:
-            raise Exception(f"Timeout. The operation takes longer than the 
maximum waiting time ({timeout}s)")
-        sleep(1)
-
-
 def wait_for_terminal_dag_state(dag_id, dag_run_id):
     print(f" Simplified representation of DAG {dag_id} ".center(72, "="))
     pprint(api_request("GET", f"dags/{DAG_ID}/details"))
 
-    # Wait 80 seconds
+    # Wait 400 seconds
     for _ in range(400):
         dag_state = api_request("GET", 
f"dags/{dag_id}/dagRuns/{dag_run_id}").get("state")
         print(f"Waiting for DAG Run: dag_state={dag_state}")
@@ -110,11 +65,10 @@ def wait_for_terminal_dag_state(dag_id, dag_run_id):
             break
 
 
-def test_trigger_dag_and_wait_for_result(tmp_path_factory, monkeypatch):
+def test_trigger_dag_and_wait_for_result(default_docker_image, 
tmp_path_factory, monkeypatch):
     """Simple test which reproduce setup docker-compose environment and 
trigger example dag."""
     tmp_dir = tmp_path_factory.mktemp("airflow-quick-start")
-    monkeypatch.chdir(tmp_dir)
-    monkeypatch.setenv("AIRFLOW_IMAGE_NAME", docker_image)
+    monkeypatch.setenv("AIRFLOW_IMAGE_NAME", default_docker_image)
 
     compose_file_path = (
         SOURCE_ROOT / "docs" / "apache-airflow" / "howto" / "docker-compose" / 
"docker-compose.yaml"
@@ -130,19 +84,24 @@ def test_trigger_dag_and_wait_for_result(tmp_path_factory, 
monkeypatch):
     print(" .env file content ".center(72, "="))
     print(dot_env_file.read_text())
 
-    # check if docker-compose is available
-    compose_command = ["docker", "compose"]
-    success = run_command([*compose_command, "version"], check=False)
-    if not success:
-        print("ERROR: `docker compose` not available. Make sure compose plugin 
is installed")
-        sys.exit(1)
-    compose_command.extend(["--project-name", "quick-start"])
-    run_command([*compose_command, "config"])
-    run_command([*compose_command, "down", "--volumes", "--remove-orphans"])
-    run_command([*compose_command, "up", "-d", "--wait"])
-    api_request("PATCH", path=f"dags/{DAG_ID}", json={"is_paused": False})
-    api_request("POST", path=f"dags/{DAG_ID}/dagRuns", json={"dag_run_id": 
DAG_RUN_ID})
+    compose_version = None
     try:
+        compose_version = docker.compose.version()
+    except DockerException:
+        pytest.fail("`docker compose` not available. Make sure compose plugin 
is installed")
+    try:
+        docker_version = docker.version()
+    except NotImplementedError:
+        docker_version = run_command(["docker", "version"], return_output=True)
+
+    compose = DockerClient(compose_project_name="quick-start", 
compose_project_directory=tmp_dir).compose
+    compose.down(remove_orphans=True, volumes=True, quiet=True)
+    try:
+        compose.up(detach=True, wait=True, color=not 
os.environ.get("NO_COLOR"))
+
+        api_request("PATCH", path=f"dags/{DAG_ID}", json={"is_paused": False})
+        api_request("POST", path=f"dags/{DAG_ID}/dagRuns", json={"dag_run_id": 
DAG_RUN_ID})
+
         wait_for_terminal_dag_state(dag_id=DAG_ID, dag_run_id=DAG_RUN_ID)
         dag_state = api_request("GET", 
f"dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}").get("state")
         assert dag_state == "success"
@@ -153,26 +112,29 @@ def 
test_trigger_dag_and_wait_for_result(tmp_path_factory, monkeypatch):
         pprint(api_request("GET", f"dags/{DAG_ID}/dagRuns"))
         print(f"HTTP: GET dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}/taskInstances")
         pprint(api_request("GET", 
f"dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}/taskInstances"))
-        print(f"Current working directory: {os.getcwd()}")
-        run_command(["docker", "version"])
-        run_command([*compose_command, "version"])
-        run_command(["docker", "ps"])
-        run_command([*compose_command, "logs"])
-        ps_output = run_command([*compose_command, "ps", "--format", "json"], 
return_output=True)
-        container_names = [container["Name"] for container in 
json.loads(ps_output)]
-        for container in container_names:
-            print(f"Health check for {container}")
-            result = run_command(
-                ["docker", "inspect", "--format", "{{json .State}}", 
container], return_output=True
-            )
-            pprint(json.loads(result))
+        print(" Docker Version ".center(72, "="))
+        print(docker_version)
+        print(" Docker Compose Version ".center(72, "="))
+        print(compose_version)
+        print(" Compose Config ".center(72, "="))
+        print(json.dumps(compose.config(return_json=True), indent=4))
+
+        for service in compose.ps(all=True):
+            print(f" Service: {service.name} ".center(72, "-"))
+            print(" Service State ".center(72, "."))
+            pprint(service.state)
+            print(" Service Config ".center(72, "."))
+            pprint(service.config)
+            print(" Service Logs ".center(72, "."))
+            print(service.logs())
         raise
     finally:
         if not os.environ.get("SKIP_DOCKER_COMPOSE_DELETION"):
-            run_command([*compose_command, "down", "--volumes"])
+            compose.down(remove_orphans=True, volumes=True, quiet=True)
             print("Docker compose instance deleted")
         else:
             print("Skipping docker-compose deletion")
             print()
             print("You can run inspect your docker-compose by running commands 
starting with:")
-            print(" ".join([shlex.quote(arg) for arg in compose_command]))
+            quoted_command = map(shlex.quote, map(str, 
compose.docker_compose_cmd))
+            print(" ".join(quoted_command))
diff --git a/docker_tests/test_examples_of_prod_image_building.py 
b/docker_tests/test_examples_of_prod_image_building.py
index 991f9c8e2b..6931a78b79 100644
--- a/docker_tests/test_examples_of_prod_image_building.py
+++ b/docker_tests/test_examples_of_prod_image_building.py
@@ -24,6 +24,7 @@ from pathlib import Path
 
 import pytest
 import requests
+from python_on_whales import docker
 
 # isort:off (needed to workaround isort bug)
 from docker_tests.command_utils import run_command
@@ -52,17 +53,21 @@ def test_shell_script_example(script_file):
 
 
 @pytest.mark.parametrize("dockerfile", 
glob.glob(f"{DOCKER_EXAMPLES_DIR}/**/Dockerfile", recursive=True))
-def test_dockerfile_example(dockerfile):
+def test_dockerfile_example(dockerfile, tmp_path):
     rel_dockerfile_path = Path(dockerfile).relative_to(DOCKER_EXAMPLES_DIR)
     image_name = str(rel_dockerfile_path).lower().replace("/", "-")
     content = Path(dockerfile).read_text()
     test_image = os.environ.get("TEST_IMAGE", get_latest_airflow_image())
-    new_content = re.sub(r"FROM apache/airflow:.*", rf"FROM {test_image}", 
content)
+
+    test_image_file = tmp_path / image_name
+    test_image_file.write_text(re.sub(r"FROM apache/airflow:.*", rf"FROM 
{test_image}", content))
     try:
-        run_command(
-            ["docker", "build", ".", "--tag", image_name, "-f", "-"],
-            cwd=str(Path(dockerfile).parent),
-            input=new_content.encode(),
+        image = docker.build(
+            context_path=Path(dockerfile).parent,
+            tags=image_name,
+            file=test_image_file,
+            load=True,  # Load image to docker daemon
         )
+        assert image
     finally:
-        run_command(["docker", "rmi", "--force", image_name])
+        docker.image.remove(image_name, force=True)
diff --git a/docker_tests/test_prod_image.py b/docker_tests/test_prod_image.py
index ac56cc6eff..3e74dc4836 100644
--- a/docker_tests/test_prod_image.py
+++ b/docker_tests/test_prod_image.py
@@ -18,18 +18,18 @@ from __future__ import annotations
 
 import json
 import os
-import subprocess
 from importlib.util import find_spec
 from pathlib import Path
 
 import pytest
+from python_on_whales import DockerException
 
-from docker_tests.command_utils import run_command
 from docker_tests.constants import SOURCE_ROOT
-from docker_tests.docker_tests_utils import (
+from docker_tests.docker_utils import (
     display_dependency_conflict_message,
-    docker_image,
+    run_airflow_cmd_in_docker,
     run_bash_in_docker,
+    run_cmd_in_docker,
     run_python_in_docker,
 )
 
@@ -49,45 +49,36 @@ REGULAR_IMAGE_PROVIDERS = [
 
 
 class TestCommands:
-    def test_without_command(self):
+    def test_without_command(self, default_docker_image):
         """Checking the image without a command. It should return non-zero 
exit code."""
-        with pytest.raises(subprocess.CalledProcessError) as ctx:
-            run_command(["docker", "run", "--rm", "-e", "COLUMNS=180", 
docker_image])
-        assert 2 == ctx.value.returncode
-
-    def test_airflow_command(self):
-        """Checking 'airflow' command  It should return non-zero exit code."""
-        with pytest.raises(subprocess.CalledProcessError) as ctx:
-            run_command(["docker", "run", "--rm", "-e", "COLUMNS=180", 
docker_image, "airflow"])
-        assert 2 == ctx.value.returncode
-
-    def test_airflow_version(self):
-        """Checking 'airflow version' command  It should return zero exit 
code."""
-        output = run_command(
-            ["docker", "run", "--rm", "-e", "COLUMNS=180", docker_image, 
"airflow", "version"],
-            return_output=True,
-        )
+        with pytest.raises(DockerException) as ctx:
+            run_cmd_in_docker(image=default_docker_image)
+        assert 2 == ctx.value.return_code
+
+    def test_airflow_command(self, default_docker_image):
+        """Checking 'airflow' command. It should return non-zero exit code."""
+        with pytest.raises(DockerException) as ctx:
+            run_airflow_cmd_in_docker(image=default_docker_image)
+        assert 2 == ctx.value.return_code
+
+    def test_airflow_version(self, default_docker_image):
+        """Checking 'airflow version' command. It should return zero exit 
code."""
+        output = run_airflow_cmd_in_docker(["version"], 
image=default_docker_image)
         assert "2." in output
 
-    def test_python_version(self):
-        """Checking 'python --version' command  It should return zero exit 
code."""
-        output = run_command(
-            ["docker", "run", "--rm", "-e", "COLUMNS=180", docker_image, 
"python", "--version"],
-            return_output=True,
-        )
+    def test_python_version(self, default_docker_image):
+        """Checking 'python --version' command. It should return zero exit 
code."""
+        output = run_cmd_in_docker(cmd=["python", "--version"], 
image=default_docker_image)
         assert "Python 3." in output
 
-    def test_bash_version(self):
+    def test_bash_version(self, default_docker_image):
         """Checking 'bash --version' command  It should return zero exit 
code."""
-        output = run_command(
-            ["docker", "run", "--rm", "-e", "COLUMNS=180", docker_image, 
"bash", "--version"],
-            return_output=True,
-        )
+        output = run_cmd_in_docker(cmd=["bash", "--version"], 
image=default_docker_image)
         assert "GNU bash," in output
 
 
 class TestPythonPackages:
-    def test_required_providers_are_installed(self):
+    def test_required_providers_are_installed(self, default_docker_image):
         if os.environ.get("TEST_SLIM_IMAGE"):
             packages_to_install = set(SLIM_IMAGE_PROVIDERS)
             package_file = AIRFLOW_PRE_INSTALLED_PROVIDERS_FILE_PATH
@@ -95,9 +86,7 @@ class TestPythonPackages:
             packages_to_install = set(REGULAR_IMAGE_PROVIDERS)
             package_file = PROD_IMAGE_PROVIDERS_FILE_PATH
         assert len(packages_to_install) != 0
-        output = run_bash_in_docker(
-            "airflow providers list --output json", stderr=subprocess.DEVNULL, 
return_output=True
-        )
+        output = run_bash_in_docker("airflow providers list --output json", 
image=default_docker_image)
         providers = json.loads(output)
         packages_installed = set(d["package_name"] for d in providers)
         assert len(packages_installed) != 0
@@ -106,12 +95,12 @@ class TestPythonPackages:
             packages_to_install == packages_installed
         ), f"List of expected installed packages and image content mismatch. 
Check {package_file} file."
 
-    def test_pip_dependencies_conflict(self):
+    def test_pip_dependencies_conflict(self, default_docker_image):
         try:
-            run_bash_in_docker("pip check")
-        except subprocess.CalledProcessError as ex:
+            run_bash_in_docker("pip check", image=default_docker_image)
+        except DockerException:
             display_dependency_conflict_message()
-            raise ex
+            raise
 
     PACKAGE_IMPORTS = {
         "amazon": ["boto3", "botocore", "watchtower"],
@@ -179,47 +168,28 @@ class TestPythonPackages:
 
     @pytest.mark.skipif(os.environ.get("TEST_SLIM_IMAGE") == "true", 
reason="Skipped with slim image")
     @pytest.mark.parametrize("package_name,import_names", 
PACKAGE_IMPORTS.items())
-    def test_check_dependencies_imports(self, package_name, import_names):
-        run_python_in_docker(f"import {','.join(import_names)}")
+    def test_check_dependencies_imports(self, package_name, import_names, 
default_docker_image):
+        run_python_in_docker(f"import {','.join(import_names)}", 
image=default_docker_image)
 
 
 class TestExecuteAsRoot:
-    def test_execute_airflow_as_root(self):
-        run_command(
-            [
-                "docker",
-                "run",
-                "--rm",
-                "--user",
-                "0",
-                "-e",
-                "PYTHONDONTWRITEBYTECODE=true",
-                docker_image,
-                "airflow",
-                "info",
-            ]
+    def test_execute_airflow_as_root(self, default_docker_image):
+        run_cmd_in_docker(
+            cmd=["airflow", "info"],
+            user=0,
+            envs={"PYTHONDONTWRITEBYTECODE": "true"},
+            image=default_docker_image,
         )
 
-    def test_run_custom_python_packages_as_root(self, tmp_path):
+    def test_run_custom_python_packages_as_root(self, tmp_path, 
default_docker_image):
         (tmp_path / "__init__.py").write_text("")
         (tmp_path / "awesome.py").write_text('print("Awesome")')
 
-        run_command(
-            [
-                "docker",
-                "run",
-                "--rm",
-                "-e",
-                f"PYTHONPATH={tmp_path}",
-                "-e",
-                "PYTHONDONTWRITEBYTECODE=true",
-                "-v",
-                f"{tmp_path}:{tmp_path}",
-                "--user",
-                "0",
-                docker_image,
-                "python",
-                "-c",
-                "import awesome",
-            ]
+        output = run_cmd_in_docker(
+            envs={"PYTHONPATH": "/custom/mount", "PYTHONDONTWRITEBYTECODE": 
"true"},
+            volumes=[(tmp_path.as_posix(), "/custom/mount")],
+            user=0,
+            cmd=["python", "-c", "import awesome"],
+            image=default_docker_image,
         )
+        assert output.strip() == "Awesome"


Reply via email to