This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch v3-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4a1817a062866a935f1b66691b490b11944eeefa
Author: Jens Scheffler <[email protected]>
AuthorDate: Sat Nov 1 17:07:31 2025 +0100

    [v3-1-test] Enable ruff PLW1510 rule (#57660) (#57674)
    
    * Enable ruff PLW1510 rule
    
    * Exclude prek check for run_command
    (cherry picked from commit a79d8db)
---
 airflow-core/tests/unit/charts/helm_template_generator.py        | 2 +-
 dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py        | 5 ++++-
 devel-common/src/sphinx_exts/docs_build/docs_builder.py          | 2 ++
 helm-tests/tests/chart_utils/helm_template_generator.py          | 2 +-
 .../providers/amazon/aws/executors/aws_lambda/docker/app.py      | 6 +++++-
 .../airflow/providers/celery/executors/celery_executor_utils.py  | 9 ++++++++-
 .../google/src/airflow/providers/google/cloud/hooks/cloud_sql.py | 2 +-
 .../google/src/airflow/providers/google/cloud/hooks/dataflow.py  | 2 +-
 .../google/src/airflow/providers/google/cloud/hooks/dataproc.py  | 2 +-
 pyproject.toml                                                   | 1 +
 scripts/in_container/in_container_utils.py                       | 2 +-
 scripts/in_container/run_check_default_configuration.py          | 4 ++--
 scripts/in_container/run_prepare_airflow_distributions.py        | 2 ++
 scripts/tools/initialize_virtualenv.py                           | 8 ++++----
 14 files changed, 34 insertions(+), 15 deletions(-)

diff --git a/airflow-core/tests/unit/charts/helm_template_generator.py 
b/airflow-core/tests/unit/charts/helm_template_generator.py
index 954ac98d92e..04e6828e877 100644
--- a/airflow-core/tests/unit/charts/helm_template_generator.py
+++ b/airflow-core/tests/unit/charts/helm_template_generator.py
@@ -141,7 +141,7 @@ def render_chart(
         if show_only:
             for i in show_only:
                 command.extend(["--show-only", i])
-        result = subprocess.run(command, capture_output=True, cwd=chart_dir)
+        result = subprocess.run(command, check=False, capture_output=True, 
cwd=chart_dir)
         if result.returncode:
             raise HelmFailedError(result.returncode, result.args, 
result.stdout, result.stderr)
         templates = result.stdout
diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py 
b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
index ef2459ff342..07e3e760d81 100644
--- a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
+++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
@@ -123,7 +123,10 @@ class S3DocsPublish:
             return (0, "")
         get_console().print(f"[info]Syncing {source} to {destination}\n")
         result = subprocess.run(
-            ["aws", "s3", "sync", "--delete", source, destination], 
capture_output=True, text=True
+            ["aws", "s3", "sync", "--delete", source, destination],
+            check=False,
+            capture_output=True,
+            text=True,
         )
         return (result.returncode, result.stderr)
 
diff --git a/devel-common/src/sphinx_exts/docs_build/docs_builder.py 
b/devel-common/src/sphinx_exts/docs_build/docs_builder.py
index 1f410fdfe7b..147215a7353 100644
--- a/devel-common/src/sphinx_exts/docs_build/docs_builder.py
+++ b/devel-common/src/sphinx_exts/docs_build/docs_builder.py
@@ -192,6 +192,7 @@ class AirflowDocsBuilder:
         with open(self.log_spelling_filename, "w") as output:
             completed_proc = run(
                 build_cmd,
+                check=False,
                 cwd=AIRFLOW_CONTENT_ROOT_PATH,
                 env=env,
                 stdout=output if not verbose else None,
@@ -274,6 +275,7 @@ class AirflowDocsBuilder:
         with open(self.log_build_filename, "w") as output:
             completed_proc = run(
                 build_cmd,
+                check=False,
                 cwd=AIRFLOW_CONTENT_ROOT_PATH,
                 env=env,
                 stdout=output if not verbose else None,
diff --git a/helm-tests/tests/chart_utils/helm_template_generator.py 
b/helm-tests/tests/chart_utils/helm_template_generator.py
index 3cd93aea09c..0eca0a9f1a2 100644
--- a/helm-tests/tests/chart_utils/helm_template_generator.py
+++ b/helm-tests/tests/chart_utils/helm_template_generator.py
@@ -203,7 +203,7 @@ def render_chart(
         if show_only:
             for i in show_only:
                 command.extend(["--show-only", i])
-        result = subprocess.run(command, capture_output=True, cwd=chart_dir)
+        result = subprocess.run(command, check=False, capture_output=True, 
cwd=chart_dir)
         if result.returncode:
             raise HelmFailedError(result.returncode, result.args, 
result.stdout, result.stderr)
         templates = result.stdout
diff --git 
a/providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py
 
b/providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py
index 4f351fba738..702e9ee1348 100644
--- 
a/providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py
+++ 
b/providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py
@@ -66,7 +66,11 @@ def run_and_report(command, task_key):
     try:
         log.info("Starting execution for task: %s", task_key)
         result = subprocess.run(
-            command, shell=isinstance(command, str), stdout=subprocess.PIPE, 
stderr=subprocess.STDOUT
+            command,
+            check=False,
+            shell=isinstance(command, str),
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
         )
         return_code = result.returncode
         log.info("Execution completed for task %s with return code %s", 
task_key, return_code)
diff --git 
a/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py
 
b/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py
index fe9323fedbf..94bf74f4875 100644
--- 
a/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py
+++ 
b/providers/celery/src/airflow/providers/celery/executors/celery_executor_utils.py
@@ -247,7 +247,14 @@ def _execute_in_subprocess(command_to_exec: CommandType, 
celery_task_id: str | N
     if celery_task_id:
         env["external_executor_id"] = celery_task_id
     try:
-        subprocess.run(command_to_exec, stderr=sys.__stderr__, 
stdout=sys.__stdout__, close_fds=True, env=env)
+        subprocess.run(
+            command_to_exec,
+            check=False,
+            stderr=sys.__stderr__,
+            stdout=sys.__stdout__,
+            close_fds=True,
+            env=env,
+        )
     except subprocess.CalledProcessError as e:
         log.exception("[%s] execute_command encountered a CalledProcessError", 
celery_task_id)
         log.error(e.output)
diff --git 
a/providers/google/src/airflow/providers/google/cloud/hooks/cloud_sql.py 
b/providers/google/src/airflow/providers/google/cloud/hooks/cloud_sql.py
index 409bb2cce30..0313aa67561 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/cloud_sql.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/cloud_sql.py
@@ -1212,7 +1212,7 @@ class CloudSQLDatabaseHook(BaseHook):
         cloud_sql_hook = CloudSQLHook(api_version="v1", 
gcp_conn_id=self.gcp_conn_id)
 
         with cloud_sql_hook.provide_authorized_gcloud():
-            proc = subprocess.run(cmd, capture_output=True)
+            proc = subprocess.run(cmd, check=False, capture_output=True)
 
         if proc.returncode != 0:
             stderr_last_20_lines = 
"\n".join(proc.stderr.decode().strip().splitlines()[-20:])
diff --git 
a/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py 
b/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py
index d52c77ecb8c..0b75a4e9b32 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/dataflow.py
@@ -1005,7 +1005,7 @@ class DataflowHook(GoogleBaseHook):
         success_code = 0
 
         with self.provide_authorized_gcloud():
-            proc = subprocess.run(cmd, capture_output=True)
+            proc = subprocess.run(cmd, check=False, capture_output=True)
 
         if proc.returncode != success_code:
             stderr_last_20_lines = 
"\n".join(proc.stderr.decode().strip().splitlines()[-20:])
diff --git 
a/providers/google/src/airflow/providers/google/cloud/hooks/dataproc.py 
b/providers/google/src/airflow/providers/google/cloud/hooks/dataproc.py
index 872673aae6b..a3d17db447f 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/dataproc.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/dataproc.py
@@ -298,7 +298,7 @@ class DataprocHook(GoogleBaseHook):
         success_code = 0
 
         with self.provide_authorized_gcloud():
-            proc = subprocess.run(cmd, capture_output=True)
+            proc = subprocess.run(cmd, check=False, capture_output=True)
 
         if proc.returncode != success_code:
             stderr_last_20_lines = 
"\n".join(proc.stderr.decode().strip().splitlines()[-20:])
diff --git a/pyproject.toml b/pyproject.toml
index ab54f43e231..4965c934a65 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -601,6 +601,7 @@ extend-select = [
     "PLW1501", # {mode} is not a valid mode for open
     "PLW1507", # Shallow copy of os.environ via copy.copy(os.environ)
     "PLW1508", # Invalid type for environment variable default; expected str 
or None
+    "PLW1510", # subprocess.run without explicit check argument
     # Per rule enables
     "RUF006", # Checks for asyncio dangling task
     "RUF015", # Checks for unnecessary iterable allocation for first element
diff --git a/scripts/in_container/in_container_utils.py 
b/scripts/in_container/in_container_utils.py
index b6f5bd4d95f..1368f0817c0 100644
--- a/scripts/in_container/in_container_utils.py
+++ b/scripts/in_container/in_container_utils.py
@@ -57,7 +57,7 @@ def run_command(cmd: list[str], github_actions: bool, 
**kwargs) -> subprocess.Co
     with ci_group(
         f"Running command: {' '.join([shlex.quote(arg) for arg in cmd])}", 
github_actions=github_actions
     ):
-        result = subprocess.run(cmd, **kwargs)
+        result = subprocess.run(cmd, **kwargs)  # noqa: PLW1510 - check is 
handled below and added by callers
     if result.returncode != 0 and github_actions and kwargs.get("check", 
False):
         console.print(f"[red]Command failed: {' '.join([shlex.quote(entry) for 
entry in cmd])}[/]")
         console.print("[red]Please unfold the above group and to investigate 
the issue[/]")
diff --git a/scripts/in_container/run_check_default_configuration.py 
b/scripts/in_container/run_check_default_configuration.py
index 7510da30689..b32917f2ec7 100755
--- a/scripts/in_container/run_check_default_configuration.py
+++ b/scripts/in_container/run_check_default_configuration.py
@@ -39,7 +39,7 @@ if __name__ == "__main__":
         # Write default config cmd output to a temporary file
         default_config_file = os.path.join(tmp_dir, "airflow.cfg")
         with open(default_config_file, "w") as f:
-            result = subprocess.run(list_default_config_cmd, stdout=f)
+            result = subprocess.run(list_default_config_cmd, check=False, 
stdout=f)
         if result.returncode != 0:
             print(f"\033[0;31mERROR: when running `{' 
'.join(list_default_config_cmd)}`\033[0m\n")
             exit(1)
@@ -47,7 +47,7 @@ if __name__ == "__main__":
         env = os.environ.copy()
         env["AIRFLOW_HOME"] = tmp_dir
         env["AIRFLOW_CONFIG"] = default_config_file
-        result = subprocess.run(lint_config_cmd, capture_output=True, env=env)
+        result = subprocess.run(lint_config_cmd, check=False, 
capture_output=True, env=env)
 
     output: str = result.stdout.decode().strip()
     if result.returncode != 0 or expected_output not in output:
diff --git a/scripts/in_container/run_prepare_airflow_distributions.py 
b/scripts/in_container/run_prepare_airflow_distributions.py
index 2c9a6ad6479..6dfc64a8d38 100755
--- a/scripts/in_container/run_prepare_airflow_distributions.py
+++ b/scripts/in_container/run_prepare_airflow_distributions.py
@@ -93,6 +93,7 @@ def build_airflow_packages(distribution_format: str):
     console.print(f"[bright_blue]Building apache-airflow-core distributions: 
{distribution_format}\n")
     build_process = subprocess.run(
         airflow_core_build_command,
+        check=False,
         capture_output=False,
         cwd=AIRFLOW_CORE_ROOT_PATH,
         env=envcopy,
@@ -105,6 +106,7 @@ def build_airflow_packages(distribution_format: str):
     console.print(f"[bright_blue]Building apache-airflow distributions: 
{distribution_format}\n")
     build_process = subprocess.run(
         airflow_build_command,
+        check=False,
         capture_output=False,
         cwd=AIRFLOW_ROOT_PATH,
         env=envcopy,
diff --git a/scripts/tools/initialize_virtualenv.py 
b/scripts/tools/initialize_virtualenv.py
index f78a2dab40b..96dd60fd28f 100755
--- a/scripts/tools/initialize_virtualenv.py
+++ b/scripts/tools/initialize_virtualenv.py
@@ -93,7 +93,7 @@ system packages. It's easier to install extras one-by-one as 
needed.
     quoted_command = " ".join([shlex.quote(parameter) for parameter in 
uv_install_command])
     print()
     print(f"Running command: \n   {quoted_command}\n")
-    e = subprocess.run(uv_install_command)
+    e = subprocess.run(uv_install_command, check=False)
     return e.returncode
 
 
@@ -116,7 +116,7 @@ def main():
 
     if not check_if_in_virtualenv():
         version = get_python_version()
-        e = subprocess.run(["uv", "venv", "--python", version])
+        e = subprocess.run(["uv", "venv", "--python", version], check=False)
         if e.returncode != 0:
             print(f"There was a problem with 'uv venv'. Error code: 
{e.returncode}")
 
@@ -168,7 +168,7 @@ def main():
     env["AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_ENABLED"] = "False"
     env["AIRFLOW__CORE__DAGS_FOLDER"] = f"{airflow_sources}/empty"
     env["AIRFLOW__CORE__PLUGINS_FOLDER"] = f"{airflow_sources}/empty"
-    subprocess.run(["uv", "run", "airflow", "db", "reset", "--yes"], env=env)
+    subprocess.run(["uv", "run", "airflow", "db", "reset", "--yes"], 
check=False, env=env)
 
     print("\nResetting AIRFLOW sqlite unit test database...")
     env = os.environ.copy()
@@ -177,7 +177,7 @@ def main():
     env["AIRFLOW__DATABASE__SQL_ALCHEMY_POOL_ENABLED"] = "False"
     env["AIRFLOW__CORE__DAGS_FOLDER"] = f"{airflow_sources}/empty"
     env["AIRFLOW__CORE__PLUGINS_FOLDER"] = f"{airflow_sources}/empty"
-    subprocess.run(["uv", "run", "airflow", "db", "reset", "--yes"], env=env)
+    subprocess.run(["uv", "run", "airflow", "db", "reset", "--yes"], 
check=False, env=env)
 
     print("\nInitialization of environment complete! Go ahead and develop 
Airflow!")
 

Reply via email to