This is an automated email from the ASF dual-hosted git repository.
uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 33e5d0351d Replace sequence concatination by unpacking in Airflow core
(#33934)
33e5d0351d is described below
commit 33e5d0351d7d09033c57a4a0b851b9e1b50bfb01
Author: Hussein Awala <[email protected]>
AuthorDate: Tue Sep 5 11:30:14 2023 +0200
Replace sequence concatination by unpacking in Airflow core (#33934)
---
airflow/api/common/trigger_dag.py | 2 +-
airflow/api_connexion/schemas/common_schema.py | 4 ++--
airflow/api_connexion/schemas/enum_schemas.py | 4 ++--
airflow/cli/cli_config.py | 2 +-
airflow/cli/commands/standalone_command.py | 2 +-
airflow/configuration.py | 2 +-
airflow/jobs/backfill_job_runner.py | 2 +-
airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py | 7 ++++---
airflow/models/dag.py | 2 +-
airflow/utils/python_virtualenv.py | 6 ++----
airflow/utils/state.py | 2 +-
airflow/www/utils.py | 6 ++++--
12 files changed, 21 insertions(+), 20 deletions(-)
diff --git a/airflow/api/common/trigger_dag.py
b/airflow/api/common/trigger_dag.py
index a522b938df..a7b39cb5a6 100644
--- a/airflow/api/common/trigger_dag.py
+++ b/airflow/api/common/trigger_dag.py
@@ -85,7 +85,7 @@ def _trigger_dag(
run_conf = conf if isinstance(conf, dict) else json.loads(conf)
dag_runs = []
- dags_to_run = [dag] + dag.subdags
+ dags_to_run = [dag, *dag.subdags]
for _dag in dags_to_run:
dag_run = _dag.create_dagrun(
run_id=run_id,
diff --git a/airflow/api_connexion/schemas/common_schema.py
b/airflow/api_connexion/schemas/common_schema.py
index cf51013762..a470e6b1c0 100644
--- a/airflow/api_connexion/schemas/common_schema.py
+++ b/airflow/api_connexion/schemas/common_schema.py
@@ -135,7 +135,7 @@ class ColorField(fields.String):
def __init__(self, **metadata):
super().__init__(**metadata)
- self.validators = [validate.Regexp("^#[a-fA-F0-9]{3,6}$")] +
list(self.validators)
+ self.validators = [validate.Regexp("^#[a-fA-F0-9]{3,6}$"),
*self.validators]
class WeightRuleField(fields.String):
@@ -143,7 +143,7 @@ class WeightRuleField(fields.String):
def __init__(self, **metadata):
super().__init__(**metadata)
- self.validators = [validate.OneOf(WeightRule.all_weight_rules())] +
list(self.validators)
+ self.validators = [validate.OneOf(WeightRule.all_weight_rules()),
*self.validators]
class TimezoneField(fields.String):
diff --git a/airflow/api_connexion/schemas/enum_schemas.py
b/airflow/api_connexion/schemas/enum_schemas.py
index 981a3669b1..ba82010783 100644
--- a/airflow/api_connexion/schemas/enum_schemas.py
+++ b/airflow/api_connexion/schemas/enum_schemas.py
@@ -26,7 +26,7 @@ class DagStateField(fields.String):
def __init__(self, **metadata):
super().__init__(**metadata)
- self.validators = [validate.OneOf(State.dag_states)] +
list(self.validators)
+ self.validators = [validate.OneOf(State.dag_states), *self.validators]
class TaskInstanceStateField(fields.String):
@@ -34,4 +34,4 @@ class TaskInstanceStateField(fields.String):
def __init__(self, **metadata):
super().__init__(**metadata)
- self.validators = [validate.OneOf(State.task_states)] +
list(self.validators)
+ self.validators = [validate.OneOf(State.task_states), *self.validators]
diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py
index 01dbe7080c..1e3432180e 100644
--- a/airflow/cli/cli_config.py
+++ b/airflow/cli/cli_config.py
@@ -1621,7 +1621,7 @@ CONNECTIONS_COMMANDS = (
name="add",
help="Add a connection",
func=lazy_load_command("airflow.cli.commands.connection_command.connections_add"),
- args=(ARG_CONN_ID, ARG_CONN_URI, ARG_CONN_JSON, ARG_CONN_EXTRA) +
tuple(ALTERNATIVE_CONN_SPECS_ARGS),
+ args=(ARG_CONN_ID, ARG_CONN_URI, ARG_CONN_JSON, ARG_CONN_EXTRA,
*ALTERNATIVE_CONN_SPECS_ARGS),
),
ActionCommand(
name="delete",
diff --git a/airflow/cli/commands/standalone_command.py
b/airflow/cli/commands/standalone_command.py
index eb5c2af0f5..7821d1d8c1 100644
--- a/airflow/cli/commands/standalone_command.py
+++ b/airflow/cli/commands/standalone_command.py
@@ -291,7 +291,7 @@ class SubCommand(threading.Thread):
def run(self):
"""Run the actual process and captures it output to a queue."""
self.process = subprocess.Popen(
- ["airflow"] + self.command,
+ ["airflow", *self.command],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=self.env,
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 25d1f7b741..68e9a41f22 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -468,7 +468,7 @@ class AirflowConfigParser(ConfigParser):
("logging", "logging_level"): _available_logging_levels,
("logging", "fab_logging_level"): _available_logging_levels,
# celery_logging_level can be empty, which uses logging_level as
fallback
- ("logging", "celery_logging_level"): _available_logging_levels + [""],
+ ("logging", "celery_logging_level"): [*_available_logging_levels, ""],
("webserver", "analytical_tool"): ["google_analytics", "metarouter",
"segment", ""],
}
diff --git a/airflow/jobs/backfill_job_runner.py
b/airflow/jobs/backfill_job_runner.py
index a1fd7a9173..daa549dbe2 100644
--- a/airflow/jobs/backfill_job_runner.py
+++ b/airflow/jobs/backfill_job_runner.py
@@ -789,7 +789,7 @@ class BackfillJobRunner(BaseJobRunner[Job], LoggingMixin):
yield tabulate_ti_keys_set([ti.key for ti in ti_status.deadlocked])
def _get_dag_with_subdags(self) -> list[DAG]:
- return [self.dag] + self.dag.subdags
+ return [self.dag, *self.dag.subdags]
@provide_session
def _execute_dagruns(
diff --git a/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
b/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
index 4576197863..d61a3bc6ce 100644
--- a/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
+++ b/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
@@ -366,9 +366,10 @@ class PodGenerator:
client_container = extend_object_field(base_container,
client_container, "volume_devices")
client_container = merge_objects(base_container, client_container)
- return [client_container] + PodGenerator.reconcile_containers(
- base_containers[1:], client_containers[1:]
- )
+ return [
+ client_container,
+ *PodGenerator.reconcile_containers(base_containers[1:],
client_containers[1:]),
+ ]
@classmethod
def construct_pod(
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 6be920f432..642d617804 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -1708,7 +1708,7 @@ class DAG(LoggingMixin):
if include_subdags:
# Crafting the right filter for dag_id and task_ids combo
conditions = []
- for dag in self.subdags + [self]:
+ for dag in [*self.subdags, self]:
conditions.append(
(TaskInstance.dag_id == dag.dag_id) &
TaskInstance.task_id.in_(dag.task_ids)
)
diff --git a/airflow/utils/python_virtualenv.py
b/airflow/utils/python_virtualenv.py
index e957cce837..d613782f32 100644
--- a/airflow/utils/python_virtualenv.py
+++ b/airflow/utils/python_virtualenv.py
@@ -41,15 +41,13 @@ def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str,
system_site_packages
def _generate_pip_install_cmd_from_file(
tmp_dir: str, requirements_file_path: str, pip_install_options: list[str]
) -> list[str]:
- cmd = [f"{tmp_dir}/bin/pip", "install"] + pip_install_options + ["-r"]
- return cmd + [requirements_file_path]
+ return [f"{tmp_dir}/bin/pip", "install", *pip_install_options, "-r",
requirements_file_path]
def _generate_pip_install_cmd_from_list(
tmp_dir: str, requirements: list[str], pip_install_options: list[str]
) -> list[str]:
- cmd = [f"{tmp_dir}/bin/pip", "install"] + pip_install_options
- return cmd + requirements
+ return [f"{tmp_dir}/bin/pip", "install", *pip_install_options,
*requirements]
def _generate_pip_conf(conf_file: Path, index_urls: list[str]) -> None:
diff --git a/airflow/utils/state.py b/airflow/utils/state.py
index 10355dcaf0..1061970c46 100644
--- a/airflow/utils/state.py
+++ b/airflow/utils/state.py
@@ -119,7 +119,7 @@ class State:
finished_dr_states: frozenset[DagRunState] =
frozenset([DagRunState.SUCCESS, DagRunState.FAILED])
unfinished_dr_states: frozenset[DagRunState] =
frozenset([DagRunState.QUEUED, DagRunState.RUNNING])
- task_states: tuple[TaskInstanceState | None, ...] = (None,) +
tuple(TaskInstanceState)
+ task_states: tuple[TaskInstanceState | None, ...] = (None,
*TaskInstanceState)
dag_states: tuple[DagRunState, ...] = (
DagRunState.QUEUED,
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index f56aca9f37..a415c03911 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -775,7 +775,8 @@ class
AirflowFilterConverter(fab_sqlafilters.SQLAFilterConverter):
"is_extendedjson",
[],
),
- ) + fab_sqlafilters.SQLAFilterConverter.conversion_table
+ *fab_sqlafilters.SQLAFilterConverter.conversion_table,
+ )
def __init__(self, datamodel):
super().__init__(datamodel)
@@ -876,7 +877,8 @@ class DagRunCustomSQLAInterface(CustomSQLAInterface):
# place
FieldConverter.conversion_table = (
("is_utcdatetime", DateTimeWithTimezoneField, AirflowDateTimePickerWidget),
-) + FieldConverter.conversion_table
+ *FieldConverter.conversion_table,
+)
class UIAlert: