This is an automated email from the ASF dual-hosted git repository.
husseinawala pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 8e7b1add43 Remove unused loop variable from airflow package (#38308)
8e7b1add43 is described below
commit 8e7b1add435164112b80b9319ec0b2f2df94fa12
Author: Hussein Awala <[email protected]>
AuthorDate: Wed Mar 20 22:55:54 2024 +0100
Remove unused loop variable from airflow package (#38308)
---
airflow/dag_processing/manager.py | 2 +-
airflow/jobs/scheduler_job_runner.py | 2 +-
airflow/providers/google/cloud/hooks/bigquery.py | 2 +-
airflow/providers/google/cloud/transfers/gcs_to_bigquery.py | 2 +-
airflow/providers/weaviate/hooks/weaviate.py | 2 +-
airflow/www/utils.py | 2 +-
airflow/www/views.py | 4 ++--
7 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/airflow/dag_processing/manager.py
b/airflow/dag_processing/manager.py
index 71f25b16ac..ef9ac5c44a 100644
--- a/airflow/dag_processing/manager.py
+++ b/airflow/dag_processing/manager.py
@@ -1177,7 +1177,7 @@ class DagFileProcessorManager(LoggingMixin):
]
if self.log.isEnabledFor(logging.DEBUG):
- for file_path, processor in self._processors.items():
+ for processor in self._processors.values():
self.log.debug(
"File path %s is still being processed (started: %s)",
processor.file_path,
diff --git a/airflow/jobs/scheduler_job_runner.py
b/airflow/jobs/scheduler_job_runner.py
index 9a5ba78b6f..c62ebc53b5 100644
--- a/airflow/jobs/scheduler_job_runner.py
+++ b/airflow/jobs/scheduler_job_runner.py
@@ -110,7 +110,7 @@ class ConcurrencyMap:
@classmethod
def from_concurrency_map(cls, mapping: dict[tuple[str, str, str], int]) ->
ConcurrencyMap:
instance = cls(Counter(), Counter(), Counter(mapping))
- for (d, r, t), c in mapping.items():
+ for (d, _, t), c in mapping.items():
instance.dag_active_tasks_map[d] += c
instance.task_concurrency_map[(d, t)] += c
return instance
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py
b/airflow/providers/google/cloud/hooks/bigquery.py
index 20b1ac10c8..eed3b8ed48 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -3185,7 +3185,7 @@ def _validate_src_fmt_configs(
if k not in src_fmt_configs and k in valid_configs:
src_fmt_configs[k] = v
- for k, v in src_fmt_configs.items():
+ for k in src_fmt_configs:
if k not in valid_configs:
raise ValueError(f"{k} is not a valid src_fmt_configs for type
{source_format}.")
diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
index 0632e54b79..03aefcb8ad 100644
--- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
+++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
@@ -714,7 +714,7 @@ class GCSToBigQueryOperator(BaseOperator):
if k not in src_fmt_configs and k in valid_configs:
src_fmt_configs[k] = v
- for k, v in src_fmt_configs.items():
+ for k in src_fmt_configs:
if k not in valid_configs:
raise ValueError(f"{k} is not a valid src_fmt_configs for type
{source_format}.")
diff --git a/airflow/providers/weaviate/hooks/weaviate.py
b/airflow/providers/weaviate/hooks/weaviate.py
index 649cacfc12..b0c8b8793c 100644
--- a/airflow/providers/weaviate/hooks/weaviate.py
+++ b/airflow/providers/weaviate/hooks/weaviate.py
@@ -459,7 +459,7 @@ class WeaviateHook(BaseHook):
client.batch.configure(**batch_config_params)
with client.batch as batch:
# Batch import all data
- for index, data_obj in enumerate(converted_data):
+ for data_obj in converted_data:
for attempt in Retrying(
stop=stop_after_attempt(retry_attempts_per_object),
retry=(
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index 68704b2aff..c7e64fdba5 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -790,7 +790,7 @@ class
AirflowFilterConverter(fab_sqlafilters.SQLAFilterConverter):
def __init__(self, datamodel):
super().__init__(datamodel)
- for method, filters in self.conversion_table:
+ for _, filters in self.conversion_table:
if FilterIsNull not in filters:
filters.append(FilterIsNull)
if FilterIsNotNull not in filters:
diff --git a/airflow/www/views.py b/airflow/www/views.py
index ebd9db40d9..00361f397e 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -2147,7 +2147,7 @@ class Airflow(AirflowBaseView):
flash(f"{ve}", "error")
form = DateTimeForm(data={"execution_date": execution_date})
# Take over "bad" submitted fields for new form display
- for k, v in form_fields.items():
+ for k in form_fields:
if k in run_conf:
form_fields[k]["value"] = run_conf[k]
return self.render_template(
@@ -4579,7 +4579,7 @@ class ConnectionModelView(AirflowModelView):
)
del form.extra
del extra_json
- for key, field_name, is_sensitive in
self._iter_extra_field_names_and_sensitivity():
+ for key, field_name, _ in
self._iter_extra_field_names_and_sensitivity():
if key in form.data and key.startswith("extra__"):
conn_type_from_extra_field = key.split("__")[1]
if conn_type_from_extra_field == conn_type: