This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 3f984edd00 Refactor: Use Path.unlink(missing_ok=True) (#33666)
3f984edd00 is described below
commit 3f984edd0009ad4e3177a3c95351c563a6ac00da
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Thu Aug 24 23:09:46 2023 +0000
Refactor: Use Path.unlink(missing_ok=True) (#33666)
---
.../commands/production_image_commands.py | 10 ++--
.../src/airflow_breeze/utils/kubernetes_utils.py | 6 +-
dev/breeze/src/airflow_breeze/utils/parallel.py | 5 +-
dev/breeze/src/airflow_breeze/utils/run_utils.py | 10 +---
.../pre_commit_compile_www_assets_dev.py | 5 +-
.../pre_commit_update_common_sql_api_stubs.py | 6 +-
.../task/task_runner/test_standard_task_runner.py | 65 +++++++---------------
7 files changed, 32 insertions(+), 75 deletions(-)
diff --git
a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
index ba13509b58..7cbede2041 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
@@ -16,7 +16,6 @@
# under the License.
from __future__ import annotations
-import contextlib
import os
import sys
from typing import Any
@@ -419,11 +418,10 @@ def clean_docker_context_files():
get_console().print("[info]Cleaning docker-context-files[/]")
if get_dry_run():
return
- with contextlib.suppress(FileNotFoundError):
- context_files_to_delete = DOCKER_CONTEXT_DIR.glob("**/*")
- for file_to_delete in context_files_to_delete:
- if file_to_delete.name != ".README.md":
- file_to_delete.unlink()
+ context_files_to_delete = DOCKER_CONTEXT_DIR.rglob("*")
+ for file_to_delete in context_files_to_delete:
+ if file_to_delete.name != ".README.md":
+ file_to_delete.unlink(missing_ok=True)
def check_docker_context_files(install_packages_from_context: bool):
diff --git a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
index 65458e6208..559d84f137 100644
--- a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
@@ -162,11 +162,7 @@ def _download_tool_if_needed(
f"[info]Error when running `{tool}`: {e}. "
f"Removing and downloading {expected_version} version."
)
- try:
- # We can add missing=ok when we go to python 3.8+
- path.unlink()
- except FileNotFoundError:
- pass
+ path.unlink(missing_ok=True)
get_console().print(f"[info]Downloading from:[/] {url}")
if get_dry_run():
return
diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py
b/dev/breeze/src/airflow_breeze/utils/parallel.py
index 9e2ffb6495..7386366381 100644
--- a/dev/breeze/src/airflow_breeze/utils/parallel.py
+++ b/dev/breeze/src/airflow_breeze/utils/parallel.py
@@ -449,10 +449,7 @@ def check_async_run_results(
finally:
if not skip_cleanup:
for output in outputs:
- try:
- os.unlink(output.file_name)
- except FileNotFoundError:
- pass
+ Path(output.file_name).unlink(missing_ok=True)
@contextmanager
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py
b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index 4517c6f5c4..7a705d2dc5 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -410,10 +410,7 @@ def _run_compile_internally(command_to_execute: list[str],
dev: bool) -> RunComm
)
else:
WWW_ASSET_COMPILE_LOCK.parent.mkdir(parents=True, exist_ok=True)
- try:
- WWW_ASSET_COMPILE_LOCK.unlink()
- except FileNotFoundError:
- pass
+ WWW_ASSET_COMPILE_LOCK.unlink(missing_ok=True)
try:
with SoftFileLock(WWW_ASSET_COMPILE_LOCK, timeout=5):
with open(WWW_ASSET_OUT_FILE, "w") as output_file:
@@ -427,10 +424,7 @@ def _run_compile_internally(command_to_execute: list[str],
dev: bool) -> RunComm
stdout=output_file,
)
if result.returncode == 0:
- try:
- WWW_ASSET_OUT_FILE.unlink()
- except FileNotFoundError:
- pass
+ WWW_ASSET_OUT_FILE.unlink(missing_ok=True)
return result
except Timeout:
get_console().print("[error]Another asset compilation is running.
Exiting[/]\n")
diff --git a/scripts/ci/pre_commit/pre_commit_compile_www_assets_dev.py
b/scripts/ci/pre_commit/pre_commit_compile_www_assets_dev.py
index 1b90c42ba6..91a27f41c7 100755
--- a/scripts/ci/pre_commit/pre_commit_compile_www_assets_dev.py
+++ b/scripts/ci/pre_commit/pre_commit_compile_www_assets_dev.py
@@ -41,10 +41,7 @@ if __name__ == "__main__":
WWW_HASH_FILE.unlink()
env = os.environ.copy()
env["FORCE_COLOR"] = "true"
- try:
- WWW_ASSET_OUT_FILE.unlink()
- except FileNotFoundError:
- pass
+ WWW_ASSET_OUT_FILE.unlink(missing_ok=True)
with open(WWW_ASSET_OUT_DEV_MODE_FILE, "w") as f:
subprocess.run(
["yarn", "install", "--frozen-lockfile"],
diff --git a/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
b/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
index b8fead2861..aa6cf82821 100755
--- a/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
+++ b/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
@@ -212,7 +212,7 @@ def compare_stub_files(generated_stub_path: Path,
force_override: bool) -> tuple
module_name, generated_stub_path, patch_generated_files=True
)
if generated_pyi_content is None:
- os.unlink(generated_stub_path)
+ generated_stub_path.unlink()
if stub_file_target_path.exists():
console.print(
f"[red]The {stub_file_target_path} file is missing in
generated files: "
@@ -223,7 +223,7 @@ def compare_stub_files(generated_stub_path: Path,
force_override: bool) -> tuple
f"[yellow]The file {stub_file_target_path} has been
removed "
"as changes are force-overridden"
)
- os.unlink(stub_file_target_path)
+ stub_file_target_path.unlink()
return 1, 0
else:
console.print(
@@ -345,7 +345,7 @@ if __name__ == "__main__":
console.print(
f"[yellow]The file {target_path} has been removed as
changes are force-overridden"
)
- os.unlink(target_path)
+ target_path.unlink()
if not total_removals and not total_additions:
console.print("\n[green]All OK. The common.sql APIs did not change[/]")
sys.exit(0)
diff --git a/tests/task/task_runner/test_standard_task_runner.py
b/tests/task/task_runner/test_standard_task_runner.py
index 52ac864a41..6f1f60d77c 100644
--- a/tests/task/task_runner/test_standard_task_runner.py
+++ b/tests/task/task_runner/test_standard_task_runner.py
@@ -132,15 +132,11 @@ class TestStandardTaskRunner:
assert task_runner.return_code() is not None
- def test_notifies_about_start_and_stop(self):
- path_listener_writer = "/tmp/test_notifies_about_start_and_stop"
- try:
- os.unlink(path_listener_writer)
- except OSError:
- pass
+ def test_notifies_about_start_and_stop(self, tmp_path):
+ path_listener_writer = tmp_path / "test_notifies_about_start_and_stop"
lm = get_listener_manager()
- lm.add_listener(FileWriteListener(path_listener_writer))
+ lm.add_listener(FileWriteListener(os.fspath(path_listener_writer)))
dagbag = DagBag(
dag_folder=TEST_DAG_FOLDER,
@@ -170,21 +166,17 @@ class TestStandardTaskRunner:
# Wait till process finishes
assert task_runner.return_code(timeout=10) is not None
- with open(path_listener_writer) as f:
+ with path_listener_writer.open() as f:
assert f.readline() == "on_starting\n"
assert f.readline() == "on_task_instance_running\n"
assert f.readline() == "on_task_instance_success\n"
assert f.readline() == "before_stopping\n"
- def test_notifies_about_fail(self):
- path_listener_writer = "/tmp/test_notifies_about_fail"
- try:
- os.unlink(path_listener_writer)
- except OSError:
- pass
+ def test_notifies_about_fail(self, tmp_path):
+ path_listener_writer = tmp_path / "test_notifies_about_fail"
lm = get_listener_manager()
- lm.add_listener(FileWriteListener(path_listener_writer))
+ lm.add_listener(FileWriteListener(os.fspath(path_listener_writer)))
dagbag = DagBag(
dag_folder=TEST_DAG_FOLDER,
@@ -214,24 +206,20 @@ class TestStandardTaskRunner:
# Wait till process finishes
assert task_runner.return_code(timeout=10) is not None
- with open(path_listener_writer) as f:
+ with path_listener_writer.open() as f:
assert f.readline() == "on_starting\n"
assert f.readline() == "on_task_instance_running\n"
assert f.readline() == "on_task_instance_failed\n"
assert f.readline() == "before_stopping\n"
- def test_ol_does_not_block_xcoms(self):
+ def test_ol_does_not_block_xcoms(self, tmp_path):
"""
Test that ensures that pushing and pulling xcoms both in listener and
task does not collide
"""
- path_listener_writer = "/tmp/test_ol_does_not_block_xcoms"
- try:
- os.unlink(path_listener_writer)
- except OSError:
- pass
+ path_listener_writer = tmp_path / "test_ol_does_not_block_xcoms"
- listener = xcom_listener.XComListener(path_listener_writer,
"push_and_pull")
+ listener = xcom_listener.XComListener(os.fspath(path_listener_writer),
"push_and_pull")
get_listener_manager().add_listener(listener)
dagbag = DagBag(
@@ -264,7 +252,7 @@ class TestStandardTaskRunner:
# Wait till process finishes
assert task_runner.return_code(timeout=10) is not None
- with open(path_listener_writer) as f:
+ with path_listener_writer.open() as f:
assert f.readline() == "on_task_instance_running\n"
assert f.readline() == "on_task_instance_success\n"
assert f.readline() == "listener\n"
@@ -354,16 +342,10 @@ class TestStandardTaskRunner:
Test that ensures that clearing in the UI SIGTERMS
the task
"""
- path_on_kill_running = "/tmp/airflow_on_kill_running"
- path_on_kill_killed = "/tmp/airflow_on_kill_killed"
- try:
- os.unlink(path_on_kill_running)
- except OSError:
- pass
- try:
- os.unlink(path_on_kill_killed)
- except OSError:
- pass
+ path_on_kill_running = Path("/tmp/airflow_on_kill_running")
+ path_on_kill_killed = Path("/tmp/airflow_on_kill_killed")
+ path_on_kill_running.unlink(missing_ok=True)
+ path_on_kill_killed.unlink(missing_ok=True)
dagbag = DagBag(
dag_folder=TEST_DAG_FOLDER,
@@ -394,9 +376,7 @@ class TestStandardTaskRunner:
logging.info("Waiting for the task to start")
with timeout(seconds=20):
- while True:
- if os.path.exists(path_on_kill_running):
- break
+ while not path_on_kill_running.exists():
time.sleep(0.01)
logging.info("Task started. Give the task some time to settle")
time.sleep(3)
@@ -405,13 +385,11 @@ class TestStandardTaskRunner:
logging.info("Waiting for the on kill killed file to appear")
with timeout(seconds=4):
- while True:
- if os.path.exists(path_on_kill_killed):
- break
+ while not path_on_kill_killed.exists():
time.sleep(0.01)
logging.info("The file appeared")
- with open(path_on_kill_killed) as f:
+ with path_on_kill_killed.open() as f:
assert "ON_KILL_TEST" == f.readline()
for process in processes:
@@ -419,10 +397,7 @@ class TestStandardTaskRunner:
def test_parsing_context(self):
context_file = Path("/tmp/airflow_parsing_context")
- try:
- context_file.unlink()
- except FileNotFoundError:
- pass
+ context_file.unlink(missing_ok=True)
dagbag = DagBag(
dag_folder=TEST_DAG_FOLDER,
include_examples=False,