This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch v2-6-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ddecc0fb9d0e28eff7a5ff0cc4113037718acad1
Author: Pankaj Singh <[email protected]>
AuthorDate: Sat Apr 29 21:37:01 2023 +0530

    Fix `order_by` request in list DAG rest api (#30926)
    
    (cherry picked from commit 1d4b1410b027c667d4e2f51f488f98b166facf71)
---
 airflow/api_connexion/endpoints/dag_endpoint.py    |  8 ++-
 tests/api_connexion/endpoints/test_dag_endpoint.py | 80 ++++++++++++++++++++++
 2 files changed, 85 insertions(+), 3 deletions(-)

diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py 
b/airflow/api_connexion/endpoints/dag_endpoint.py
index 83d861ba65..18dcb45be6 100644
--- a/airflow/api_connexion/endpoints/dag_endpoint.py
+++ b/airflow/api_connexion/endpoints/dag_endpoint.py
@@ -28,7 +28,7 @@ from sqlalchemy.sql.expression import or_
 from airflow import DAG
 from airflow.api_connexion import security
 from airflow.api_connexion.exceptions import AlreadyExists, BadRequest, 
NotFound
-from airflow.api_connexion.parameters import check_limit, format_parameters
+from airflow.api_connexion.parameters import apply_sorting, check_limit, 
format_parameters
 from airflow.api_connexion.schemas.dag_schema import (
     DAGCollection,
     dag_detail_schema,
@@ -75,9 +75,11 @@ def get_dags(
     dag_id_pattern: str | None = None,
     only_active: bool = True,
     paused: bool | None = None,
+    order_by: str = "dag_id",
     session: Session = NEW_SESSION,
 ) -> APIResponse:
     """Get all DAGs."""
+    allowed_attrs = ["dag_id"]
     dags_query = session.query(DagModel).filter(~DagModel.is_subdag)
     if only_active:
         dags_query = dags_query.filter(DagModel.is_active)
@@ -97,8 +99,8 @@ def get_dags(
         dags_query = dags_query.filter(or_(*cond))
 
     total_entries = dags_query.count()
-
-    dags = 
dags_query.order_by(DagModel.dag_id).offset(offset).limit(limit).all()
+    dags_query = apply_sorting(dags_query, order_by, {}, allowed_attrs)
+    dags = dags_query.offset(offset).limit(limit).all()
 
     return dags_collection_schema.dump(DAGCollection(dags=dags, 
total_entries=total_entries))
 
diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py 
b/tests/api_connexion/endpoints/test_dag_endpoint.py
index 2298c58d80..5d0542a2c6 100644
--- a/tests/api_connexion/endpoints/test_dag_endpoint.py
+++ b/tests/api_connexion/endpoints/test_dag_endpoint.py
@@ -1722,6 +1722,86 @@ class TestPatchDags(TestDagEndpoint):
         dags_updated = session.query(DagModel).filter(DagModel.is_paused)
         assert len(dags_updated.all()) == 2
 
+    @provide_session
+    def test_should_respond_200_and_reverse_ordering(self, session, 
url_safe_serializer):
+        file_token = url_safe_serializer.dumps("/tmp/dag_1.py")
+        self._create_dag_models(2)
+        file_token10 = url_safe_serializer.dumps("/tmp/dag_2.py")
+
+        response = self.client.get(
+            "/api/v1/dags?order_by=-dag_id",
+            environ_overrides={"REMOTE_USER": "test"},
+        )
+
+        assert response.status_code == 200
+        assert {
+            "dags": [
+                {
+                    "dag_id": "TEST_DAG_2",
+                    "description": None,
+                    "fileloc": "/tmp/dag_2.py",
+                    "file_token": file_token10,
+                    "is_paused": False,
+                    "is_active": True,
+                    "is_subdag": False,
+                    "owners": [],
+                    "root_dag_id": None,
+                    "schedule_interval": {
+                        "__type": "CronExpression",
+                        "value": "2 2 * * *",
+                    },
+                    "tags": [],
+                    "next_dagrun": None,
+                    "has_task_concurrency_limits": True,
+                    "next_dagrun_data_interval_start": None,
+                    "next_dagrun_data_interval_end": None,
+                    "max_active_runs": 16,
+                    "next_dagrun_create_after": None,
+                    "last_expired": None,
+                    "max_active_tasks": 16,
+                    "last_pickled": None,
+                    "default_view": None,
+                    "last_parsed_time": None,
+                    "scheduler_lock": None,
+                    "timetable_description": None,
+                    "has_import_errors": False,
+                    "pickle_id": None,
+                },
+                {
+                    "dag_id": "TEST_DAG_1",
+                    "description": None,
+                    "fileloc": "/tmp/dag_1.py",
+                    "file_token": file_token,
+                    "is_paused": False,
+                    "is_active": True,
+                    "is_subdag": False,
+                    "owners": [],
+                    "root_dag_id": None,
+                    "schedule_interval": {
+                        "__type": "CronExpression",
+                        "value": "2 2 * * *",
+                    },
+                    "tags": [],
+                    "next_dagrun": None,
+                    "has_task_concurrency_limits": True,
+                    "next_dagrun_data_interval_start": None,
+                    "next_dagrun_data_interval_end": None,
+                    "max_active_runs": 16,
+                    "next_dagrun_create_after": None,
+                    "last_expired": None,
+                    "max_active_tasks": 16,
+                    "last_pickled": None,
+                    "default_view": None,
+                    "last_parsed_time": None,
+                    "scheduler_lock": None,
+                    "timetable_description": None,
+                    "has_import_errors": False,
+                    "pickle_id": None,
+                },
+            ],
+            "total_entries": 2,
+        } == response.json
+
     def test_should_respons_400_dag_id_pattern_missing(self):
         self._create_dag_models(1)
         response = self.client.patch(

Reply via email to