This is an automated email from the ASF dual-hosted git repository.
taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new ae56caea92 Use tmp_path in cli/command tests (#33709)
ae56caea92 is described below
commit ae56caea921de486113f13bd9182d854ce0ac023
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Thu Aug 24 22:14:31 2023 +0000
Use tmp_path in cli/command tests (#33709)
---
tests/cli/commands/test_celery_command.py | 25 +++--
tests/cli/commands/test_dag_command.py | 13 ++-
tests/cli/commands/test_internal_api_command.py | 124 ++++++++++++------------
tests/cli/commands/test_kubernetes_command.py | 38 ++++----
tests/cli/commands/test_task_command.py | 90 +++++++++--------
tests/cli/commands/test_variable_command.py | 41 +++-----
tests/cli/commands/test_webserver_command.py | 53 +++++-----
7 files changed, 181 insertions(+), 203 deletions(-)
diff --git a/tests/cli/commands/test_celery_command.py
b/tests/cli/commands/test_celery_command.py
index b97278b17a..ae968f1171 100644
--- a/tests/cli/commands/test_celery_command.py
+++ b/tests/cli/commands/test_celery_command.py
@@ -18,8 +18,8 @@
from __future__ import annotations
import importlib
+import os
from argparse import Namespace
-from tempfile import NamedTemporaryFile
from unittest import mock
import pytest
@@ -72,22 +72,21 @@ class TestCeleryStopCommand:
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@mock.patch("airflow.cli.commands.celery_command.psutil.Process")
- def test_if_right_pid_is_read(self, mock_process, mock_setup_locations):
+ def test_if_right_pid_is_read(self, mock_process, mock_setup_locations,
tmp_path):
args = self.parser.parse_args(["celery", "stop"])
pid = "123"
+ path = tmp_path / "testfile"
+ # Create pid file
+ path.write_text(pid)
+ # Setup mock
+ mock_setup_locations.return_value = (os.fspath(path), None, None, None)
# Calling stop_worker should delete the temporary pid file
- with pytest.raises(FileNotFoundError):
- with NamedTemporaryFile("w+") as f:
- # Create pid file
- f.write(pid)
- f.flush()
- # Setup mock
- mock_setup_locations.return_value = (f.name, None, None, None)
- # Check if works as expected
- celery_command.stop_worker(args)
- mock_process.assert_called_once_with(int(pid))
- mock_process.return_value.terminate.assert_called_once_with()
+ celery_command.stop_worker(args)
+ # Check if works as expected
+ assert not path.exists()
+ mock_process.assert_called_once_with(int(pid))
+ mock_process.return_value.terminate.assert_called_once_with()
@mock.patch("airflow.cli.commands.celery_command.read_pid_from_pidfile")
@mock.patch("airflow.providers.celery.executors.celery_executor.app")
diff --git a/tests/cli/commands/test_dag_command.py
b/tests/cli/commands/test_dag_command.py
index f45e877840..5cd09cd9c3 100644
--- a/tests/cli/commands/test_dag_command.py
+++ b/tests/cli/commands/test_dag_command.py
@@ -21,7 +21,6 @@ import contextlib
import io
import json
import os
-import tempfile
from datetime import datetime, timedelta
from unittest import mock
from unittest.mock import MagicMock
@@ -701,17 +700,17 @@ class TestCliDags:
self.parser.parse_args(["dags", "delete",
"does_not_exist_dag", "--yes"]),
)
- def test_delete_dag_existing_file(self):
+ def test_delete_dag_existing_file(self, tmp_path):
# Test to check that the DAG should be deleted even if
# the file containing it is not deleted
+ path = tmp_path / "testfile"
DM = DagModel
key = "my_dag_id"
session = settings.Session()
- with tempfile.NamedTemporaryFile() as f:
- session.add(DM(dag_id=key, fileloc=f.name))
- session.commit()
- dag_command.dag_delete(self.parser.parse_args(["dags", "delete",
key, "--yes"]))
- assert session.query(DM).filter_by(dag_id=key).count() == 0
+ session.add(DM(dag_id=key, fileloc=os.fspath(path)))
+ session.commit()
+ dag_command.dag_delete(self.parser.parse_args(["dags", "delete", key,
"--yes"]))
+ assert session.query(DM).filter_by(dag_id=key).count() == 0
def test_cli_list_jobs(self):
args = self.parser.parse_args(["dags", "list-jobs"])
diff --git a/tests/cli/commands/test_internal_api_command.py
b/tests/cli/commands/test_internal_api_command.py
index 472aab5b1f..58ebfe72b7 100644
--- a/tests/cli/commands/test_internal_api_command.py
+++ b/tests/cli/commands/test_internal_api_command.py
@@ -16,11 +16,10 @@
# under the License.
from __future__ import annotations
+import os
import subprocess
import sys
-import tempfile
import time
-from pathlib import Path
from unittest import mock
import psutil
@@ -89,66 +88,67 @@ class TestCliInternalAPI(_ComonCLIGunicornTestClass):
main_process_regexp = r"airflow internal-api"
@pytest.mark.execution_timeout(210)
- def test_cli_internal_api_background(self):
- with tempfile.TemporaryDirectory(prefix="gunicorn") as tmpdir:
- pidfile_internal_api = f"{tmpdir}/pidflow-internal-api.pid"
- pidfile_monitor = f"{tmpdir}/pidflow-internal-api-monitor.pid"
- stdout = f"{tmpdir}/airflow-internal-api.out"
- stderr = f"{tmpdir}/airflow-internal-api.err"
- logfile = f"{tmpdir}/airflow-internal-api.log"
- try:
- # Run internal-api as daemon in background. Note that the wait
method is not called.
- console.print("[magenta]Starting airflow internal-api
--daemon")
- proc = subprocess.Popen(
- [
- "airflow",
- "internal-api",
- "--daemon",
- "--pid",
- pidfile_internal_api,
- "--stdout",
- stdout,
- "--stderr",
- stderr,
- "--log-file",
- logfile,
- ]
- )
- assert proc.poll() is None
-
- pid_monitor = self._wait_pidfile(pidfile_monitor)
- console.print(f"[blue]Monitor started at {pid_monitor}")
- pid_internal_api = self._wait_pidfile(pidfile_internal_api)
- console.print(f"[blue]Internal API started at
{pid_internal_api}")
- console.print("[blue]Running airflow internal-api process:")
- # Assert that the internal-api and gunicorn processes are
running (by name rather than pid).
- assert self._find_process(r"airflow internal-api --daemon",
print_found_process=True)
- console.print("[blue]Waiting for gunicorn processes:")
- # wait for gunicorn to start
- for i in range(30):
- if self._find_process(r"^gunicorn"):
- break
- console.print("[blue]Waiting for gunicorn to start ...")
- time.sleep(1)
- console.print("[blue]Running gunicorn processes:")
- assert self._find_all_processes("^gunicorn",
print_found_process=True)
- console.print("[magenta]Internal-api process started
successfully.")
- console.print(
- "[magenta]Terminating monitor process and expect "
- "internal-api and gunicorn processes to terminate as well"
- )
- proc = psutil.Process(pid_monitor)
- proc.terminate()
- assert proc.wait(120) in (0, None)
- self._check_processes(ignore_running=False)
- console.print("[magenta]All internal-api and gunicorn
processes are terminated.")
- except Exception:
- console.print("[red]Exception occurred. Dumping all logs.")
- # Dump all logs
- for file in Path(tmpdir).glob("*"):
- console.print(f"Dumping {file} (size:
{file.stat().st_size})")
- console.print(file.read_text())
- raise
+ def test_cli_internal_api_background(self, tmp_path):
+ parent_path = tmp_path / "gunicorn"
+ parent_path.mkdir()
+ pidfile_internal_api = parent_path / "pidflow-internal-api.pid"
+ pidfile_monitor = parent_path / "pidflow-internal-api-monitor.pid"
+ stdout = parent_path / "airflow-internal-api.out"
+ stderr = parent_path / "airflow-internal-api.err"
+ logfile = parent_path / "airflow-internal-api.log"
+ try:
+ # Run internal-api as daemon in background. Note that the wait
method is not called.
+ console.print("[magenta]Starting airflow internal-api --daemon")
+ proc = subprocess.Popen(
+ [
+ "airflow",
+ "internal-api",
+ "--daemon",
+ "--pid",
+ os.fspath(pidfile_internal_api),
+ "--stdout",
+ os.fspath(stdout),
+ "--stderr",
+ os.fspath(stderr),
+ "--log-file",
+ os.fspath(logfile),
+ ]
+ )
+ assert proc.poll() is None
+
+ pid_monitor = self._wait_pidfile(pidfile_monitor)
+ console.print(f"[blue]Monitor started at {pid_monitor}")
+ pid_internal_api = self._wait_pidfile(pidfile_internal_api)
+ console.print(f"[blue]Internal API started at {pid_internal_api}")
+ console.print("[blue]Running airflow internal-api process:")
+ # Assert that the internal-api and gunicorn processes are running
(by name rather than pid).
+ assert self._find_process(r"airflow internal-api --daemon",
print_found_process=True)
+ console.print("[blue]Waiting for gunicorn processes:")
+ # wait for gunicorn to start
+ for i in range(30):
+ if self._find_process(r"^gunicorn"):
+ break
+ console.print("[blue]Waiting for gunicorn to start ...")
+ time.sleep(1)
+ console.print("[blue]Running gunicorn processes:")
+ assert self._find_all_processes("^gunicorn",
print_found_process=True)
+ console.print("[magenta]Internal-api process started
successfully.")
+ console.print(
+ "[magenta]Terminating monitor process and expect "
+ "internal-api and gunicorn processes to terminate as well"
+ )
+ proc = psutil.Process(pid_monitor)
+ proc.terminate()
+ assert proc.wait(120) in (0, None)
+ self._check_processes(ignore_running=False)
+ console.print("[magenta]All internal-api and gunicorn processes
are terminated.")
+ except Exception:
+ console.print("[red]Exception occurred. Dumping all logs.")
+ # Dump all logs
+ for file in parent_path.glob("*"):
+ console.print(f"Dumping {file} (size: {file.stat().st_size})")
+ console.print(file.read_text())
+ raise
def test_cli_internal_api_debug(self, app):
with mock.patch(
diff --git a/tests/cli/commands/test_kubernetes_command.py
b/tests/cli/commands/test_kubernetes_command.py
index 3957790fc9..3d610b3519 100644
--- a/tests/cli/commands/test_kubernetes_command.py
+++ b/tests/cli/commands/test_kubernetes_command.py
@@ -18,7 +18,6 @@ from __future__ import annotations
import importlib
import os
-import tempfile
from unittest import mock
from unittest.mock import MagicMock, call
@@ -37,26 +36,25 @@ class TestGenerateDagYamlCommand:
importlib.reload(cli_parser)
cls.parser = cli_parser.get_parser()
- def test_generate_dag_yaml(self):
- with tempfile.TemporaryDirectory("airflow_dry_run_test/") as directory:
- file_name =
"miscellaneous_test_dag_run_after_loop_2020-11-03T00_00_00_plus_00_00.yml"
- kubernetes_command.generate_pod_yaml(
- self.parser.parse_args(
- [
- "kubernetes",
- "generate-dag-yaml",
- "miscellaneous_test_dag",
- "2020-11-03",
- "--output-path",
- directory,
- ]
- )
+ def test_generate_dag_yaml(self, tmp_path):
+ path = tmp_path /
"miscellaneous_test_dag_run_after_loop_2020-11-03T00_00_00_plus_00_00.yml"
+ kubernetes_command.generate_pod_yaml(
+ self.parser.parse_args(
+ [
+ "kubernetes",
+ "generate-dag-yaml",
+ "miscellaneous_test_dag",
+ "2020-11-03",
+ "--output-path",
+ os.fspath(path.parent),
+ ]
)
- assert len(os.listdir(directory)) == 1
- out_dir = directory + "/airflow_yaml_output/"
- assert len(os.listdir(out_dir)) == 6
- assert os.path.isfile(out_dir + file_name)
- assert os.stat(out_dir + file_name).st_size > 0
+ )
+ assert sum(1 for _ in path.parent.iterdir()) == 1
+ output_path = path.parent / "airflow_yaml_output"
+ assert sum(1 for _ in output_path.iterdir()) == 6
+ assert os.path.isfile(output_path / path.name)
+ assert (output_path / path.name).stat().st_size > 0
class TestCleanUpPodsCommand:
diff --git a/tests/cli/commands/test_task_command.py
b/tests/cli/commands/test_task_command.py
index eac5fe6802..e785b8ef46 100644
--- a/tests/cli/commands/test_task_command.py
+++ b/tests/cli/commands/test_task_command.py
@@ -24,7 +24,6 @@ import os
import re
import shutil
import sys
-import tempfile
import unittest
from argparse import ArgumentParser
from contextlib import contextmanager, redirect_stdout
@@ -171,12 +170,12 @@ class TestCliTasks:
task_command.task_test(args)
assert capsys.readouterr().out.endswith(f"{not_password}\n")
- def test_cli_test_different_path(self, session):
+ def test_cli_test_different_path(self, session, tmp_path):
"""
When thedag processor has a different dags folder
from the worker, ``airflow tasks run --local`` should still work.
"""
- repo_root = Path(__file__).parent.parent.parent.parent
+ repo_root = Path(__file__).parents[3]
orig_file_path = repo_root / "tests/dags/test_dags_folder.py"
orig_dags_folder = orig_file_path.parent
@@ -202,51 +201,50 @@ class TestCliTasks:
# additionally let's update the dags folder to be the new path
# ideally since dags_folder points correctly to the file, airflow
# should be able to find the dag.
- with tempfile.TemporaryDirectory() as td:
- new_file_path = Path(td) / Path(orig_file_path).name
- new_dags_folder = new_file_path.parent
- with move_back(orig_file_path, new_file_path), conf_vars(
- {("core", "dags_folder"): new_dags_folder.as_posix()}
- ):
- ser_dag = (
- session.query(SerializedDagModel)
- .filter(SerializedDagModel.dag_id == "test_dags_folder")
- .one()
- )
- # confirm that the serialized dag location has not been updated
- assert ser_dag.fileloc == orig_file_path.as_posix()
- assert ser_dag.data["dag"]["_processor_dags_folder"] ==
orig_dags_folder.as_posix()
- assert ser_dag.data["dag"]["fileloc"] ==
orig_file_path.as_posix()
- assert ser_dag.dag._processor_dags_folder ==
orig_dags_folder.as_posix()
- from airflow.settings import DAGS_FOLDER
-
- assert DAGS_FOLDER == new_dags_folder.as_posix() !=
orig_dags_folder.as_posix()
- task_command.task_run(
- self.parser.parse_args(
- [
- "tasks",
- "run",
- "--ignore-all-dependencies",
- "--local",
- "test_dags_folder",
- "task",
- "abc123",
- ]
- )
- )
- ti = (
- session.query(TaskInstance)
- .filter(
- TaskInstance.task_id == "task",
- TaskInstance.dag_id == "test_dags_folder",
- TaskInstance.run_id == "abc123",
- TaskInstance.map_index == -1,
- )
+ new_file_path = tmp_path / orig_file_path.name
+ new_dags_folder = new_file_path.parent
+ with move_back(orig_file_path, new_file_path), conf_vars(
+ {("core", "dags_folder"): new_dags_folder.as_posix()}
+ ):
+ ser_dag = (
+ session.query(SerializedDagModel)
+ .filter(SerializedDagModel.dag_id == "test_dags_folder")
.one()
)
- assert ti.state == "success"
- # verify that the file was in different location when run
- assert ti.xcom_pull(ti.task_id) == new_file_path.as_posix()
+ # confirm that the serialized dag location has not been updated
+ assert ser_dag.fileloc == orig_file_path.as_posix()
+ assert ser_dag.data["dag"]["_processor_dags_folder"] ==
orig_dags_folder.as_posix()
+ assert ser_dag.data["dag"]["fileloc"] == orig_file_path.as_posix()
+ assert ser_dag.dag._processor_dags_folder ==
orig_dags_folder.as_posix()
+ from airflow.settings import DAGS_FOLDER
+
+ assert DAGS_FOLDER == new_dags_folder.as_posix() !=
orig_dags_folder.as_posix()
+ task_command.task_run(
+ self.parser.parse_args(
+ [
+ "tasks",
+ "run",
+ "--ignore-all-dependencies",
+ "--local",
+ "test_dags_folder",
+ "task",
+ "abc123",
+ ]
+ )
+ )
+ ti = (
+ session.query(TaskInstance)
+ .filter(
+ TaskInstance.task_id == "task",
+ TaskInstance.dag_id == "test_dags_folder",
+ TaskInstance.run_id == "abc123",
+ TaskInstance.map_index == -1,
+ )
+ .one()
+ )
+ assert ti.state == "success"
+ # verify that the file was in different location when run
+ assert ti.xcom_pull(ti.task_id) == new_file_path.as_posix()
@mock.patch("airflow.cli.commands.task_command.LocalTaskJobRunner")
def test_run_with_existing_dag_run_id(self, mock_local_job_runner):
diff --git a/tests/cli/commands/test_variable_command.py
b/tests/cli/commands/test_variable_command.py
index e48f6bfb7c..b07cfbba8c 100644
--- a/tests/cli/commands/test_variable_command.py
+++ b/tests/cli/commands/test_variable_command.py
@@ -19,7 +19,6 @@ from __future__ import annotations
import io
import os
-import tempfile
from contextlib import redirect_stdout
import pytest
@@ -130,33 +129,25 @@ class TestCliVariables:
"""Test variables_export command"""
variable_command.variables_export(self.parser.parse_args(["variables",
"export", os.devnull]))
- def test_variables_isolation(self):
+ def test_variables_isolation(self, tmp_path):
"""Test isolation of variables"""
- with tempfile.NamedTemporaryFile(delete=True) as tmp1,
tempfile.NamedTemporaryFile(
- delete=True
- ) as tmp2:
+ path1 = tmp_path / "testfile1"
+ path2 = tmp_path / "testfile2"
- # First export
- variable_command.variables_set(
- self.parser.parse_args(["variables", "set", "foo",
'{"foo":"bar"}'])
- )
-
variable_command.variables_set(self.parser.parse_args(["variables", "set",
"bar", "original"]))
-
variable_command.variables_export(self.parser.parse_args(["variables",
"export", tmp1.name]))
-
- with open(tmp1.name) as first_exp:
+ # First export
+ variable_command.variables_set(self.parser.parse_args(["variables",
"set", "foo", '{"foo":"bar"}']))
+ variable_command.variables_set(self.parser.parse_args(["variables",
"set", "bar", "original"]))
+ variable_command.variables_export(self.parser.parse_args(["variables",
"export", os.fspath(path1)]))
-
variable_command.variables_set(self.parser.parse_args(["variables", "set",
"bar", "updated"]))
- variable_command.variables_set(
- self.parser.parse_args(["variables", "set", "foo",
'{"foo":"oops"}'])
- )
-
variable_command.variables_delete(self.parser.parse_args(["variables",
"delete", "foo"]))
-
variable_command.variables_import(self.parser.parse_args(["variables",
"import", tmp1.name]))
+ variable_command.variables_set(self.parser.parse_args(["variables",
"set", "bar", "updated"]))
+ variable_command.variables_set(self.parser.parse_args(["variables",
"set", "foo", '{"foo":"oops"}']))
+ variable_command.variables_delete(self.parser.parse_args(["variables",
"delete", "foo"]))
+ variable_command.variables_import(self.parser.parse_args(["variables",
"import", os.fspath(path1)]))
- assert "original" == Variable.get("bar")
- assert '{\n "foo": "bar"\n}' == Variable.get("foo")
+ assert "original" == Variable.get("bar")
+ assert '{\n "foo": "bar"\n}' == Variable.get("foo")
- # Second export
-
variable_command.variables_export(self.parser.parse_args(["variables",
"export", tmp2.name]))
+ # Second export
+ variable_command.variables_export(self.parser.parse_args(["variables",
"export", os.fspath(path2)]))
- with open(tmp2.name) as second_exp:
- assert first_exp.read() == second_exp.read()
+ assert path1.read_text() == path2.read_text()
diff --git a/tests/cli/commands/test_webserver_command.py
b/tests/cli/commands/test_webserver_command.py
index e7c7656835..f325924bcf 100644
--- a/tests/cli/commands/test_webserver_command.py
+++ b/tests/cli/commands/test_webserver_command.py
@@ -19,9 +19,7 @@ from __future__ import annotations
import os
import subprocess
import sys
-import tempfile
import time
-from pathlib import Path
from unittest import mock
import psutil
@@ -131,20 +129,15 @@ class TestGunicornMonitor:
class TestGunicornMonitorGeneratePluginState:
- @staticmethod
- def _prepare_test_file(filepath: str, size: int):
- os.makedirs(os.path.dirname(filepath), exist_ok=True)
- with open(filepath, "w") as file:
- file.write("A" * size)
- file.flush()
-
- def test_should_detect_changes_in_directory(self):
- with tempfile.TemporaryDirectory() as tempdir, mock.patch(
- "airflow.cli.commands.webserver_command.settings.PLUGINS_FOLDER",
tempdir
+ def test_should_detect_changes_in_directory(self, tmp_path):
+ with mock.patch(
+ "airflow.cli.commands.webserver_command.settings.PLUGINS_FOLDER",
os.fspath(tmp_path)
):
- self._prepare_test_file(f"{tempdir}/file1.txt", 100)
-
self._prepare_test_file(f"{tempdir}/nested/nested/nested/nested/file2.txt", 200)
- self._prepare_test_file(f"{tempdir}/file3.txt", 300)
+ (tmp_path / "file1.txt").write_text("A" * 100)
+ path2 = tmp_path / "nested/nested/nested/nested/file2.txt"
+ path2.parent.mkdir(parents=True)
+ path2.write_text("A" * 200)
+ (tmp_path / "file3.txt").write_text("A" * 300)
monitor = GunicornMonitor(
gunicorn_master_pid=1,
@@ -163,7 +156,7 @@ class TestGunicornMonitorGeneratePluginState:
assert 3 == len(state_a)
# Should detect new file
- self._prepare_test_file(f"{tempdir}/file4.txt", 400)
+ (tmp_path / "file4.txt").write_text("A" * 400)
state_c = monitor._generate_plugin_state()
@@ -171,7 +164,7 @@ class TestGunicornMonitorGeneratePluginState:
assert 4 == len(state_c)
# Should detect changes in files
- self._prepare_test_file(f"{tempdir}/file4.txt", 450)
+ (tmp_path / "file4.txt").write_text("A" * 450)
state_d = monitor._generate_plugin_state()
@@ -179,7 +172,7 @@ class TestGunicornMonitorGeneratePluginState:
assert 4 == len(state_d)
# Should support large files
- self._prepare_test_file(f"{tempdir}/file4.txt", 4000000)
+ (tmp_path / "file4.txt").write_text("A" * 4_000_000)
state_d = monitor._generate_plugin_state()
@@ -238,18 +231,18 @@ class TestCliWebServer(_ComonCLIGunicornTestClass):
main_process_regexp = r"airflow webserver"
@pytest.mark.execution_timeout(210)
- def test_cli_webserver_background(self):
- with tempfile.TemporaryDirectory(prefix="gunicorn") as tmpdir,
mock.patch.dict(
+ def test_cli_webserver_background(self, tmp_path):
+ with mock.patch.dict(
"os.environ",
AIRFLOW__CORE__DAGS_FOLDER="/dev/null",
AIRFLOW__CORE__LOAD_EXAMPLES="False",
AIRFLOW__WEBSERVER__WORKERS="1",
):
- pidfile_webserver = f"{tmpdir}/pidflow-webserver.pid"
- pidfile_monitor = f"{tmpdir}/pidflow-webserver-monitor.pid"
- stdout = f"{tmpdir}/airflow-webserver.out"
- stderr = f"{tmpdir}/airflow-webserver.err"
- logfile = f"{tmpdir}/airflow-webserver.log"
+ pidfile_webserver = tmp_path / "pidflow-webserver.pid"
+ pidfile_monitor = tmp_path / "pidflow-webserver-monitor.pid"
+ stdout = tmp_path / "airflow-webserver.out"
+ stderr = tmp_path / "airflow-webserver.err"
+ logfile = tmp_path / "airflow-webserver.log"
try:
# Run webserver as daemon in background. Note that the wait
method is not called.
@@ -259,13 +252,13 @@ class TestCliWebServer(_ComonCLIGunicornTestClass):
"webserver",
"--daemon",
"--pid",
- pidfile_webserver,
+ os.fspath(pidfile_webserver),
"--stdout",
- stdout,
+ os.fspath(stdout),
"--stderr",
- stderr,
+ os.fspath(stderr),
"--log-file",
- logfile,
+ os.fspath(logfile),
]
)
assert proc.poll() is None
@@ -299,7 +292,7 @@ class TestCliWebServer(_ComonCLIGunicornTestClass):
except Exception:
console.print("[red]Exception occurred. Dumping all logs.")
# Dump all logs
- for file in Path(tmpdir).glob("*"):
+ for file in tmp_path.glob("*"):
console.print(f"Dumping {file} (size:
{file.stat().st_size})")
console.print(file.read_text())
raise