This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new f923bd3ecb Refactor unneeded 'continue' jumps in www (#33838)
f923bd3ecb is described below
commit f923bd3ecbe624cbb294e84f7cdb30db05b8f167
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Fri Sep 1 16:09:04 2023 +0000
Refactor unneeded 'continue' jumps in www (#33838)
---
airflow/www/security.py | 42 ++++++++++++-----------------
airflow/www/utils.py | 11 ++++----
airflow/www/views.py | 36 +++++++++++--------------
tests/www/views/test_views_configuration.py | 17 +++++-------
4 files changed, 44 insertions(+), 62 deletions(-)
diff --git a/airflow/www/security.py b/airflow/www/security.py
index a558d92b15..7dcf2c30d0 100644
--- a/airflow/www/security.py
+++ b/airflow/www/security.py
@@ -235,12 +235,10 @@ class AirflowSecurityManager(SecurityManagerOverride,
SecurityManager, LoggingMi
# This is needed to support the "hack" where we had to edit
# FieldConverter.conversion_table in place in airflow.www.utils
for attr in dir(self):
- if not attr.endswith("view"):
- continue
- view = getattr(self, attr, None)
- if not view or not getattr(view, "datamodel", None):
- continue
- view.datamodel = CustomSQLAInterface(view.datamodel.obj)
+ if attr.endswith("view"):
+ view = getattr(self, attr, None)
+ if view and getattr(view, "datamodel", None):
+ view.datamodel = CustomSQLAInterface(view.datamodel.obj)
self.perms = None
def _get_root_dag_id(self, dag_id: str) -> str:
@@ -374,17 +372,15 @@ class AirflowSecurityManager(SecurityManagerOverride,
SecurityManager, LoggingMi
for role in roles:
for permission in role.permissions:
action = permission.action.name
- if action not in user_actions:
- continue
-
- resource = permission.resource.name
- if resource == permissions.RESOURCE_DAG:
- return {dag.dag_id for dag in
session.execute(select(DagModel.dag_id))}
-
- if resource.startswith(permissions.RESOURCE_DAG_PREFIX):
-
resources.add(resource[len(permissions.RESOURCE_DAG_PREFIX) :])
- else:
- resources.add(resource)
+ if action in user_actions:
+ resource = permission.resource.name
+ if resource == permissions.RESOURCE_DAG:
+ return {dag.dag_id for dag in
session.execute(select(DagModel.dag_id))}
+
+ if resource.startswith(permissions.RESOURCE_DAG_PREFIX):
+
resources.add(resource[len(permissions.RESOURCE_DAG_PREFIX) :])
+ else:
+ resources.add(resource)
return {
dag.dag_id
for dag in
session.execute(select(DagModel.dag_id).where(DagModel.dag_id.in_(resources)))
@@ -775,14 +771,10 @@ class AirflowSecurityManager(SecurityManagerOverride,
SecurityManager, LoggingMi
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG),
):
can_access_all_dags = self.has_access(*perm)
- if can_access_all_dags:
- continue
-
- action = perm[0]
- if self.can_access_some_dags(action, dag_id):
- continue
- return False
-
+ if not can_access_all_dags:
+ action = perm[0]
+ if not self.can_access_some_dags(action, dag_id):
+ return False
elif not self.has_access(*perm):
return False
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index 5f90b30969..f56aca9f37 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -806,12 +806,11 @@ class CustomSQLAInterface(SQLAInterface):
clean_column_names()
# Support for AssociationProxy in search and list columns
for obj_attr, desc in self.obj.__mapper__.all_orm_descriptors.items():
- if not isinstance(desc, AssociationProxy):
- continue
- proxy_instance = getattr(self.obj, obj_attr)
- if hasattr(proxy_instance.remote_attr.prop, "columns"):
- self.list_columns[obj_attr] =
proxy_instance.remote_attr.prop.columns[0]
- self.list_properties[obj_attr] =
proxy_instance.remote_attr.prop
+ if isinstance(desc, AssociationProxy):
+ proxy_instance = getattr(self.obj, obj_attr)
+ if hasattr(proxy_instance.remote_attr.prop, "columns"):
+ self.list_columns[obj_attr] =
proxy_instance.remote_attr.prop.columns[0]
+ self.list_properties[obj_attr] =
proxy_instance.remote_attr.prop
def is_utcdatetime(self, col_name):
"""Check if the datetime is a UTC one."""
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 6b11e99b72..7224b079be 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -971,12 +971,10 @@ class Airflow(AirflowBaseView):
def _iter_parsed_moved_data_table_names():
for table_name in inspect(session.get_bind()).get_table_names():
segments = table_name.split("__", 3)
- if len(segments) < 3:
- continue
- if segments[0] != settings.AIRFLOW_MOVED_TABLE_PREFIX:
- continue
- # Second segment is a version marker that we don't need to
show.
- yield segments[-1], table_name
+ if len(segments) >= 3:
+ if segments[0] == settings.AIRFLOW_MOVED_TABLE_PREFIX:
+ # Second segment is a version marker that we don't
need to show.
+ yield segments[-1], table_name
if (
permissions.ACTION_CAN_ACCESS_MENU,
@@ -3379,12 +3377,11 @@ class Airflow(AirflowBaseView):
y_points = []
x_points = []
for ti in tis:
- if ti.task_id != task.task_id:
- continue
- dttm = wwwutils.epoch(ti.execution_date)
- x_points.append(dttm)
- # y value should reflect completed tries to have a 0 baseline.
- y_points.append(ti.prev_attempted_tries)
+ if ti.task_id == task.task_id:
+ dttm = wwwutils.epoch(ti.execution_date)
+ x_points.append(dttm)
+ # y value should reflect completed tries to have a 0
baseline.
+ y_points.append(ti.prev_attempted_tries)
if x_points:
chart.add_serie(name=task.task_id, x=x_points, y=y_points)
@@ -3476,14 +3473,13 @@ class Airflow(AirflowBaseView):
for task in dag.tasks:
task_id = task.task_id
for ti in tis:
- if ti.task_id != task.task_id:
- continue
- ts = dag.get_run_data_interval(ti.dag_run).end
- if ti.end_date:
- dttm = wwwutils.epoch(ti.execution_date)
- secs = (ti.end_date - ts).total_seconds()
- x_points[task_id].append(dttm)
- y_points[task_id].append(secs)
+ if ti.task_id == task.task_id:
+ ts = dag.get_run_data_interval(ti.dag_run).end
+ if ti.end_date:
+ dttm = wwwutils.epoch(ti.execution_date)
+ secs = (ti.end_date - ts).total_seconds()
+ x_points[task_id].append(dttm)
+ y_points[task_id].append(secs)
# determine the most relevant time unit for the set of landing times
# for the DAG
diff --git a/tests/www/views/test_views_configuration.py
b/tests/www/views/test_views_configuration.py
index a6ea406e54..b5eed6467d 100644
--- a/tests/www/views/test_views_configuration.py
+++ b/tests/www/views/test_views_configuration.py
@@ -38,9 +38,8 @@ def test_user_can_view_configuration(admin_client):
resp = admin_client.get("configuration", follow_redirects=True)
for section, key in conf.sensitive_config_values:
value = conf.get(section, key, fallback="")
- if not value:
- continue
- check_content_in_response(html.escape(value), resp)
+ if value:
+ check_content_in_response(html.escape(value), resp)
@conf_vars({("webserver", "expose_config"): "non-sensitive-only"})
@@ -48,11 +47,8 @@ def test_configuration_redacted(admin_client):
resp = admin_client.get("configuration", follow_redirects=True)
for section, key in conf.sensitive_config_values:
value = conf.get(section, key, fallback="")
- if not value or value == "airflow":
- continue
- if value.startswith("db+postgresql"): # this is in configuration
comment
- continue
- check_content_not_in_response(value, resp)
+ if value and value != "airflow" and not
value.startswith("db+postgresql"):
+ check_content_not_in_response(value, resp)
@conf_vars({("webserver", "expose_config"): "non-sensitive-only"})
@@ -60,9 +56,8 @@ def
test_configuration_redacted_in_running_configuration(admin_client):
resp = admin_client.get("configuration", follow_redirects=True)
for section, key in conf.sensitive_config_values:
value = conf.get(section, key, fallback="")
- if not value or value == "airflow":
- continue
- check_content_not_in_response("<td class='code'>" + html.escape(value)
+ "</td", resp)
+ if value and value != "airflow":
+ check_content_not_in_response("<td class='code'>" +
html.escape(value) + "</td", resp)
@conf_vars({("webserver", "expose_config"): "non-sensitive-only"})