This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new e4d44fc60a Refactor: Think positively (#34278)
e4d44fc60a is described below
commit e4d44fc60a0264c97d0253231b9872054865b22a
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Tue Sep 12 23:02:13 2023 +0000
Refactor: Think positively (#34278)
---
airflow/api/common/mark_tasks.py | 2 +-
airflow/api_connexion/endpoints/role_and_permission_endpoint.py | 2 +-
airflow/cli/commands/connection_command.py | 2 +-
airflow/cli/commands/jobs_command.py | 2 +-
airflow/example_dags/example_params_trigger_ui.py | 8 ++++----
airflow/jobs/local_task_job_runner.py | 2 +-
airflow/models/abstractoperator.py | 4 ++--
airflow/models/xcom.py | 2 +-
airflow/serialization/serialized_objects.py | 2 +-
airflow/ti_deps/deps/trigger_rule_dep.py | 2 +-
airflow/utils/db_cleanup.py | 4 ++--
11 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/airflow/api/common/mark_tasks.py b/airflow/api/common/mark_tasks.py
index cfd7471d24..03a008236d 100644
--- a/airflow/api/common/mark_tasks.py
+++ b/airflow/api/common/mark_tasks.py
@@ -541,7 +541,7 @@ def __set_dag_run_state_to_running_or_queued(
"""
res: list[TaskInstance] = []
- if not (execution_date is None) ^ (run_id is None):
+ if not exactly_one(execution_date, run_id):
return res
if not dag:
diff --git a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py
b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py
index b8ace0e1de..4b1a310c86 100644
--- a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py
+++ b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py
@@ -133,7 +133,7 @@ def patch_role(*, role_name: str, update_mask: UpdateMask =
None) -> APIResponse
update_mask = [i.strip() for i in update_mask]
data_ = {}
for field in update_mask:
- if field in data and not field == "permissions":
+ if field in data and field != "permissions":
data_[field] = data[field]
elif field == "actions":
data_["permissions"] = data["permissions"]
diff --git a/airflow/cli/commands/connection_command.py
b/airflow/cli/commands/connection_command.py
index 6990417dc0..dbc6cc583b 100644
--- a/airflow/cli/commands/connection_command.py
+++ b/airflow/cli/commands/connection_command.py
@@ -182,7 +182,7 @@ def connections_export(args):
f"Unsupported file format. The file must have the
extension {', '.join(file_formats)}."
)
- if args.serialization_format and not filetype == ".env":
+ if args.serialization_format and filetype != ".env":
raise SystemExit("Option `--serialization-format` may only be used
with file type `env`.")
with create_session() as session:
diff --git a/airflow/cli/commands/jobs_command.py
b/airflow/cli/commands/jobs_command.py
index 784e21fa79..185f977c3e 100644
--- a/airflow/cli/commands/jobs_command.py
+++ b/airflow/cli/commands/jobs_command.py
@@ -34,7 +34,7 @@ if TYPE_CHECKING:
@provide_session
def check(args, session: Session = NEW_SESSION) -> None:
"""Check if job(s) are still alive."""
- if args.allow_multiple and not args.limit > 1:
+ if args.allow_multiple and args.limit <= 1:
raise SystemExit("To use option --allow-multiple, you must set the
limit to a value greater than 1.")
if args.hostname and args.local:
raise SystemExit("You can't use --hostname and --local at the same
time")
diff --git a/airflow/example_dags/example_params_trigger_ui.py
b/airflow/example_dags/example_params_trigger_ui.py
index 1ca0a19803..f9ab98b233 100644
--- a/airflow/example_dags/example_params_trigger_ui.py
+++ b/airflow/example_dags/example_params_trigger_ui.py
@@ -89,13 +89,13 @@ with DAG(
@task(task_id="print_greetings", trigger_rule=TriggerRule.ALL_DONE)
def print_greetings(greetings1, greetings2, greetings3) -> None:
- for g in greetings1 if greetings1 else []:
+ for g in greetings1 or []:
print(g)
- for g in greetings2 if greetings2 else []:
+ for g in greetings2 or []:
print(g)
- for g in greetings3 if greetings3 else []:
+ for g in greetings3 or []:
print(g)
- if not greetings1 and not greetings2 and not greetings3:
+ if not (greetings1 or greetings2 or greetings3):
print("sad, nobody to greet :-(")
lang_select = select_languages()
diff --git a/airflow/jobs/local_task_job_runner.py
b/airflow/jobs/local_task_job_runner.py
index a1382cf1c0..079ad4cbba 100644
--- a/airflow/jobs/local_task_job_runner.py
+++ b/airflow/jobs/local_task_job_runner.py
@@ -233,7 +233,7 @@ class LocalTaskJobRunner(BaseJobRunner["Job |
JobPydantic"], LoggingMixin):
else:
self.log.info("Task exited with return code %s", return_code)
- if not self.task_instance.test_mode and not is_deferral:
+ if not (self.task_instance.test_mode or is_deferral):
if conf.getboolean("scheduler", "schedule_after_task_execution",
fallback=True):
self.task_instance.schedule_downstream_tasks(max_tis_per_query=self.job.max_tis_per_query)
diff --git a/airflow/models/abstractoperator.py
b/airflow/models/abstractoperator.py
index 675550c82c..62d839ac32 100644
--- a/airflow/models/abstractoperator.py
+++ b/airflow/models/abstractoperator.py
@@ -266,7 +266,7 @@ class AbstractOperator(Templater, DAGNode):
yield task
if task.is_setup:
for t in task.downstream_list:
- if t.is_teardown and not t == self:
+ if t.is_teardown and t != self:
yield t
def get_upstreams_only_setups_and_teardowns(self) -> Iterable[Operator]:
@@ -290,7 +290,7 @@ class AbstractOperator(Templater, DAGNode):
if has_no_teardowns or
task.downstream_task_ids.intersection(downstream_teardown_ids):
yield task
for t in task.downstream_list:
- if t.is_teardown and not t == self:
+ if t.is_teardown and t != self:
yield t
def get_upstreams_only_setups(self) -> Iterable[Operator]:
diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py
index a02fa19fc2..23d33e268d 100644
--- a/airflow/models/xcom.py
+++ b/airflow/models/xcom.py
@@ -867,7 +867,7 @@ def resolve_xcom_backend() -> type[BaseXCom]:
)
base_xcom_params = _get_function_params(BaseXCom.serialize_value)
xcom_params = _get_function_params(clazz.serialize_value)
- if not set(base_xcom_params) == set(xcom_params):
+ if set(base_xcom_params) != set(xcom_params):
_patch_outdated_serializer(clazz=clazz, params=xcom_params)
return clazz
diff --git a/airflow/serialization/serialized_objects.py
b/airflow/serialization/serialized_objects.py
index 38f5515111..c93a0c4b5d 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -1526,7 +1526,7 @@ class DagDependency:
def node_id(self):
"""Node ID for graph rendering."""
val = f"{self.dependency_type}"
- if not self.dependency_type == "dataset":
+ if self.dependency_type != "dataset":
val += f":{self.source}:{self.target}"
if self.dependency_id:
val += f":{self.dependency_id}"
diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py
b/airflow/ti_deps/deps/trigger_rule_dep.py
index 7bb4bf5213..82035a742d 100644
--- a/airflow/ti_deps/deps/trigger_rule_dep.py
+++ b/airflow/ti_deps/deps/trigger_rule_dep.py
@@ -523,7 +523,7 @@ class TriggerRuleDep(BaseTIDep):
f"upstream_task_ids={task.upstream_task_ids}"
)
)
- elif upstream_setup and not success_setup >= 1:
+ elif upstream_setup and not success_setup:
yield self._failing_status(
reason=(
f"Task's trigger rule '{trigger_rule}' requires at
least one upstream setup task "
diff --git a/airflow/utils/db_cleanup.py b/airflow/utils/db_cleanup.py
index b246eb8c40..cbc93c0d44 100644
--- a/airflow/utils/db_cleanup.py
+++ b/airflow/utils/db_cleanup.py
@@ -319,7 +319,7 @@ def _confirm_delete(*, date: DateTime, tables: list[str]):
)
print(question)
answer = input().strip()
- if not answer == "delete rows":
+ if answer != "delete rows":
raise SystemExit("User did not confirm; exiting.")
@@ -339,7 +339,7 @@ def _confirm_drop_archives(*, tables: list[str]):
if show_tables:
print(tables, "\n")
answer = input("Enter 'drop archived tables' (without quotes) to
proceed.\n").strip()
- if not answer == "drop archived tables":
+ if answer != "drop archived tables":
raise SystemExit("User did not confirm; exiting.")