This is an automated email from the ASF dual-hosted git repository.
jscheffl pushed a commit to branch v3-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/v3-1-test by this push:
new a4251da0d2d [v3-1-test] Enable ruff PLW1508 rule (#57653) (#57673)
a4251da0d2d is described below
commit a4251da0d2db8feaef6a0435e8f23776901489f3
Author: Jens Scheffler <[email protected]>
AuthorDate: Sat Nov 1 15:02:05 2025 +0100
[v3-1-test] Enable ruff PLW1508 rule (#57653) (#57673)
(cherry picked from commit c846f6f27fe97ebd00cc0a13cf7ee59f1020877b)
---
.../api_fastapi/auth/managers/simple/simple_auth_manager.py | 2 +-
airflow-core/src/airflow/api_fastapi/core_api/app.py | 2 +-
dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py | 2 +-
docker-tests/tests/docker_tests/test_prod_image.py | 2 +-
providers/amazon/tests/system/amazon/aws/example_bedrock.py | 4 ++--
.../providers/databricks/plugins/databricks_workflow.py | 5 -----
.../tests/system/databricks/example_databricks_workflow.py | 2 +-
.../edge3/src/airflow/providers/edge3/cli/api_client.py | 12 +++++++++---
.../example_cloud_storage_transfer_service_aws.py | 2 +-
.../system/microsoft/azure/example_azure_service_bus.py | 2 +-
.../tests/system/microsoft/azure/example_azure_synapse.py | 6 +++---
pyproject.toml | 6 ++++++
scripts/in_container/update_quarantined_test_status.py | 4 ++--
13 files changed, 29 insertions(+), 22 deletions(-)
diff --git
a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/simple_auth_manager.py
b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/simple_auth_manager.py
index b3ce04a94b8..28e3c172f89 100644
---
a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/simple_auth_manager.py
+++
b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/simple_auth_manager.py
@@ -280,7 +280,7 @@ class
SimpleAuthManager(BaseAuthManager[SimpleAuthManagerUser]):
"""
from airflow.api_fastapi.auth.managers.simple.routes.login import
login_router
- dev_mode = os.environ.get("DEV_MODE", False) == "true"
+ dev_mode = os.environ.get("DEV_MODE", str(False)) == "true"
directory = Path(__file__).parent.joinpath("ui", "dev" if dev_mode
else "dist")
directory.mkdir(exist_ok=True)
diff --git a/airflow-core/src/airflow/api_fastapi/core_api/app.py
b/airflow-core/src/airflow/api_fastapi/core_api/app.py
index 5cb7c6570cd..cd34b28f3cb 100644
--- a/airflow-core/src/airflow/api_fastapi/core_api/app.py
+++ b/airflow-core/src/airflow/api_fastapi/core_api/app.py
@@ -47,7 +47,7 @@ def init_views(app: FastAPI) -> None:
app.include_router(ui_router)
app.include_router(public_router)
- dev_mode = os.environ.get("DEV_MODE", False) == "true"
+ dev_mode = os.environ.get("DEV_MODE", str(False)) == "true"
directory = Path(AIRFLOW_PATH) / ("airflow/ui/dev" if dev_mode else
"airflow/ui/dist")
diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
index 26664ccbb1c..ef2459ff342 100644
--- a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
+++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py
@@ -292,7 +292,7 @@ class S3DocsPublish:
"Quantity": 1,
"Items": ["/*"],
},
- "CallerReference": str(int(os.environ.get("GITHUB_RUN_ID",
0))),
+ "CallerReference": str(int(os.environ.get("GITHUB_RUN_ID",
str(0)))),
},
)
get_console().print(
diff --git a/docker-tests/tests/docker_tests/test_prod_image.py
b/docker-tests/tests/docker_tests/test_prod_image.py
index 984ab7d0e90..69777c426ae 100644
--- a/docker-tests/tests/docker_tests/test_prod_image.py
+++ b/docker-tests/tests/docker_tests/test_prod_image.py
@@ -54,7 +54,7 @@ REGULAR_IMAGE_PROVIDERS = [
if not provider_id.startswith("#")
]
-testing_slim_image = os.environ.get("TEST_SLIM_IMAGE", False)
+testing_slim_image = os.environ.get("TEST_SLIM_IMAGE", str(False)).lower() in
("true", "1", "yes")
class TestCommands:
diff --git a/providers/amazon/tests/system/amazon/aws/example_bedrock.py
b/providers/amazon/tests/system/amazon/aws/example_bedrock.py
index 79311a27abd..54fa46dcca0 100644
--- a/providers/amazon/tests/system/amazon/aws/example_bedrock.py
+++ b/providers/amazon/tests/system/amazon/aws/example_bedrock.py
@@ -65,12 +65,12 @@ DAG_ID = "example_bedrock"
# Creating a custom model takes nearly two hours. If SKIP_LONG_TASKS
# is True then these tasks will be skipped. This way we can still have
# the code snippets for docs, and we can manually run the full tests.
-SKIP_LONG_TASKS = environ.get("SKIP_LONG_SYSTEM_TEST_TASKS", default=True)
+SKIP_LONG_TASKS = environ.get("SKIP_LONG_SYSTEM_TEST_TASKS", str(True))
# No-commitment Provisioned Throughput is currently restricted to external
# customers only and will fail with a ServiceQuotaExceededException if run
# on the AWS System Test stack.
-SKIP_PROVISION_THROUGHPUT = environ.get("SKIP_RESTRICTED_SYSTEM_TEST_TASKS",
default=True)
+SKIP_PROVISION_THROUGHPUT = environ.get("SKIP_RESTRICTED_SYSTEM_TEST_TASKS",
str(True))
LLAMA_SHORT_MODEL_ID = "meta.llama3-8b-instruct-v1:0"
TITAN_MODEL_ID = "amazon.titan-text-express-v1:0:8k"
diff --git
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
index 3fd7bfb0daf..7670fb0c172 100644
---
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
+++
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
@@ -17,7 +17,6 @@
from __future__ import annotations
-import os
from typing import TYPE_CHECKING, Any
from urllib.parse import unquote
@@ -48,10 +47,6 @@ if TYPE_CHECKING:
from airflow.utils.context import Context
-REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
-REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)
-
-
def get_auth_decorator():
if AIRFLOW_V_3_0_PLUS:
from airflow.api_fastapi.auth.managers.models.resource_details import
DagAccessEntity
diff --git
a/providers/databricks/tests/system/databricks/example_databricks_workflow.py
b/providers/databricks/tests/system/databricks/example_databricks_workflow.py
index 5921f109d08..39d7db6b6c7 100644
---
a/providers/databricks/tests/system/databricks/example_databricks_workflow.py
+++
b/providers/databricks/tests/system/databricks/example_databricks_workflow.py
@@ -30,7 +30,7 @@ from airflow.providers.databricks.operators.databricks import
(
from airflow.providers.databricks.operators.databricks_workflow import
DatabricksWorkflowTaskGroup
from airflow.utils.timezone import datetime
-EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
+EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))
DATABRICKS_CONN_ID = os.getenv("DATABRICKS_CONN_ID", "databricks_default")
DATABRICKS_NOTIFICATION_EMAIL = os.getenv("DATABRICKS_NOTIFICATION_EMAIL",
"[email protected]")
diff --git a/providers/edge3/src/airflow/providers/edge3/cli/api_client.py
b/providers/edge3/src/airflow/providers/edge3/cli/api_client.py
index c9402ea4541..756e6e5132c 100644
--- a/providers/edge3/src/airflow/providers/edge3/cli/api_client.py
+++ b/providers/edge3/src/airflow/providers/edge3/cli/api_client.py
@@ -53,12 +53,18 @@ logger = logging.getLogger(__name__)
# Note: Given defaults make attempts after 1, 3, 7, 15, 31seconds, 1:03, 2:07,
3:37 and fails after 5:07min
# So far there is no other config facility in Task SDK we use ENV for the
moment
# TODO: Consider these env variables jointly in task sdk together with
task_sdk/src/airflow/sdk/api/client.py
-API_RETRIES = int(os.getenv("AIRFLOW__EDGE__API_RETRIES",
os.getenv("AIRFLOW__WORKERS__API_RETRIES", 10)))
+API_RETRIES = int(
+ os.getenv("AIRFLOW__EDGE__API_RETRIES",
os.getenv("AIRFLOW__WORKERS__API_RETRIES", str(10)))
+)
API_RETRY_WAIT_MIN = float(
- os.getenv("AIRFLOW__EDGE__API_RETRY_WAIT_MIN",
os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MIN", 1.0))
+ os.getenv(
+ "AIRFLOW__EDGE__API_RETRY_WAIT_MIN",
os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MIN", str(1.0))
+ )
)
API_RETRY_WAIT_MAX = float(
- os.getenv("AIRFLOW__EDGE__API_RETRY_WAIT_MAX",
os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MAX", 90.0))
+ os.getenv(
+ "AIRFLOW__EDGE__API_RETRY_WAIT_MAX",
os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MAX", str(90.0))
+ )
)
diff --git
a/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py
b/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py
index 443acfd5081..b6c6277932f 100644
---
a/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py
+++
b/providers/google/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py
@@ -79,7 +79,7 @@ EXAMPLE_BUCKET = "airflow-system-tests-resources"
EXAMPLE_FILE = "storage-transfer/big_file.dat"
BUCKET_SOURCE_AWS = f"bucket-aws-{DAG_ID}-{ENV_ID}".replace("_", "-")
BUCKET_TARGET_GCS = f"bucket-gcs-{DAG_ID}-{ENV_ID}".replace("_", "-")
-WAIT_FOR_OPERATION_POKE_INTERVAL =
int(os.environ.get("WAIT_FOR_OPERATION_POKE_INTERVAL", 5))
+WAIT_FOR_OPERATION_POKE_INTERVAL =
int(os.environ.get("WAIT_FOR_OPERATION_POKE_INTERVAL", str(5)))
GCP_DESCRIPTION = "description"
GCP_TRANSFER_JOB_NAME =
f"transferJobs/sampleJob-{DAG_ID}-{ENV_ID}".replace("-", "_")
diff --git
a/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_service_bus.py
b/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_service_bus.py
index 091fdff3c2c..b6bd6c67d85 100644
---
a/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_service_bus.py
+++
b/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_service_bus.py
@@ -40,7 +40,7 @@ try:
except ImportError:
pytest.skip("Azure Service Bus not available", allow_module_level=True)
-EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
+EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))
CLIENT_ID = os.getenv("CLIENT_ID", "")
QUEUE_NAME = "sb_mgmt_queue_test"
diff --git
a/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_synapse.py
b/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_synapse.py
index 571cd5b46ff..1559734bec2 100644
---
a/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_synapse.py
+++
b/providers/microsoft/azure/tests/system/microsoft/azure/example_azure_synapse.py
@@ -23,12 +23,12 @@ from airflow import DAG
from airflow.providers.microsoft.azure.operators.synapse import
AzureSynapseRunSparkBatchOperator
AIRFLOW_HOME = os.getenv("AIRFLOW_HOME", "/usr/local/airflow")
-EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
+EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))
default_args = {
"execution_timeout": timedelta(hours=EXECUTION_TIMEOUT),
- "retries": int(os.getenv("DEFAULT_TASK_RETRIES", 2)),
- "retry_delay":
timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60))),
+ "retries": int(os.getenv("DEFAULT_TASK_RETRIES", str(2))),
+ "retry_delay":
timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", str(60)))),
}
SPARK_JOB_PAYLOAD = {
diff --git a/pyproject.toml b/pyproject.toml
index 5f4c6d9bfbf..ab54f43e231 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -595,6 +595,12 @@ extend-select = [
"PLW0245", # super call is missing parentheses
"PLW0406", # Module {name} imports itself
"PLW0602", # Using global for {name} but no assignment is done
+ "PLW0604", # global at module level is redundant
+ "PLW0642", # Reassigned {} variable in {method_type} method
+ "PLW0711", # Exception to catch is the result of a binary and operation
+ "PLW1501", # {mode} is not a valid mode for open
+ "PLW1507", # Shallow copy of os.environ via copy.copy(os.environ)
+ "PLW1508", # Invalid type for environment variable default; expected str
or None
# Per rule enables
"RUF006", # Checks for asyncio dangling task
"RUF015", # Checks for unnecessary iterable allocation for first element
diff --git a/scripts/in_container/update_quarantined_test_status.py
b/scripts/in_container/update_quarantined_test_status.py
index 427abadf379..d983dbfc0d7 100755
--- a/scripts/in_container/update_quarantined_test_status.py
+++ b/scripts/in_container/update_quarantined_test_status.py
@@ -213,8 +213,8 @@ if __name__ == "__main__":
raise RuntimeError("GitHub Repository must be defined!")
user, repo = github_repository.split("/")
print(f"User: {user}, Repo: {repo}")
- issue_id = int(os.environ.get("ISSUE_ID", 0))
- num_runs = int(os.environ.get("NUM_RUNS", 10))
+ issue_id = int(os.environ.get("ISSUE_ID", str(0)))
+ num_runs = int(os.environ.get("NUM_RUNS", str(10)))
if issue_id == 0:
raise RuntimeError("You need to define ISSUE_ID as environment
variable")