This is an automated email from the ASF dual-hosted git repository.
kaxilnaik pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 7abc289be1 Revert "Fix flaky `test_get_dags` in FastAPI routes
(#43100)" (#43108)
7abc289be1 is described below
commit 7abc289be12df6e4452d78eab9c89cf69ef88867
Author: Pierre Jeambrun <[email protected]>
AuthorDate: Thu Oct 17 18:07:06 2024 +0800
Revert "Fix flaky `test_get_dags` in FastAPI routes (#43100)" (#43108)
---
.../core_api/routes/public/test_dags.py | 44 +++++++++++-----------
1 file changed, 22 insertions(+), 22 deletions(-)
diff --git a/tests/api_fastapi/core_api/routes/public/test_dags.py
b/tests/api_fastapi/core_api/routes/public/test_dags.py
index e365c4d5e4..edc350c27b 100644
--- a/tests/api_fastapi/core_api/routes/public/test_dags.py
+++ b/tests/api_fastapi/core_api/routes/public/test_dags.py
@@ -135,37 +135,37 @@ class TestGetDags(TestDagEndpoint):
"query_params, expected_total_entries, expected_ids",
[
# Filters
- ({}, 2, {DAG1_ID, DAG2_ID}),
- ({"limit": 1}, 2, {DAG1_ID}),
- ({"offset": 1}, 2, {DAG2_ID}),
- ({"tags": ["example"]}, 1, {DAG1_ID}),
- ({"only_active": False}, 3, {DAG1_ID, DAG2_ID, DAG3_ID}),
- ({"paused": True, "only_active": False}, 1, {DAG3_ID}),
- ({"paused": False}, 2, {DAG1_ID, DAG2_ID}),
- ({"owners": ["airflow"]}, 2, {DAG1_ID, DAG2_ID}),
- ({"owners": ["test_owner"], "only_active": False}, 1, {DAG3_ID}),
- ({"last_dag_run_state": "success", "only_active": False}, 1,
{DAG3_ID}),
- ({"last_dag_run_state": "failed", "only_active": False}, 1,
{DAG1_ID}),
+ ({}, 2, [DAG1_ID, DAG2_ID]),
+ ({"limit": 1}, 2, [DAG1_ID]),
+ ({"offset": 1}, 2, [DAG2_ID]),
+ ({"tags": ["example"]}, 1, [DAG1_ID]),
+ ({"only_active": False}, 3, [DAG1_ID, DAG2_ID, DAG3_ID]),
+ ({"paused": True, "only_active": False}, 1, [DAG3_ID]),
+ ({"paused": False}, 2, [DAG1_ID, DAG2_ID]),
+ ({"owners": ["airflow"]}, 2, [DAG1_ID, DAG2_ID]),
+ ({"owners": ["test_owner"], "only_active": False}, 1, [DAG3_ID]),
+ ({"last_dag_run_state": "success", "only_active": False}, 1,
[DAG3_ID]),
+ ({"last_dag_run_state": "failed", "only_active": False}, 1,
[DAG1_ID]),
# # Sort
- ({"order_by": "-dag_id"}, 2, {DAG2_ID, DAG1_ID}),
- ({"order_by": "-dag_display_name"}, 2, {DAG2_ID, DAG1_ID}),
- ({"order_by": "dag_display_name"}, 2, {DAG1_ID, DAG2_ID}),
- ({"order_by": "next_dagrun", "only_active": False}, 3, {DAG3_ID,
DAG1_ID, DAG2_ID}),
- ({"order_by": "last_run_state", "only_active": False}, 3,
{DAG1_ID, DAG3_ID, DAG2_ID}),
- ({"order_by": "-last_run_state", "only_active": False}, 3,
{DAG3_ID, DAG1_ID, DAG2_ID}),
+ ({"order_by": "-dag_id"}, 2, [DAG2_ID, DAG1_ID]),
+ ({"order_by": "-dag_display_name"}, 2, [DAG2_ID, DAG1_ID]),
+ ({"order_by": "dag_display_name"}, 2, [DAG1_ID, DAG2_ID]),
+ ({"order_by": "next_dagrun", "only_active": False}, 3, [DAG3_ID,
DAG1_ID, DAG2_ID]),
+ ({"order_by": "last_run_state", "only_active": False}, 3,
[DAG1_ID, DAG3_ID, DAG2_ID]),
+ ({"order_by": "-last_run_state", "only_active": False}, 3,
[DAG3_ID, DAG1_ID, DAG2_ID]),
(
{"order_by": "last_run_start_date", "only_active": False},
3,
- {DAG1_ID, DAG3_ID, DAG2_ID},
+ [DAG1_ID, DAG3_ID, DAG2_ID],
),
(
{"order_by": "-last_run_start_date", "only_active": False},
3,
- {DAG3_ID, DAG1_ID, DAG2_ID},
+ [DAG3_ID, DAG1_ID, DAG2_ID],
),
# Search
- ({"dag_id_pattern": "1"}, 1, {DAG1_ID}),
- ({"dag_display_name_pattern": "test_dag2"}, 1, {DAG2_ID}),
+ ({"dag_id_pattern": "1"}, 1, [DAG1_ID]),
+ ({"dag_display_name_pattern": "test_dag2"}, 1, [DAG2_ID]),
],
)
def test_get_dags(self, test_client, query_params, expected_total_entries,
expected_ids):
@@ -175,7 +175,7 @@ class TestGetDags(TestDagEndpoint):
body = response.json()
assert body["total_entries"] == expected_total_entries
- assert set(dag["dag_id"] for dag in body["dags"]) == expected_ids
+ assert [dag["dag_id"] for dag in body["dags"]] == expected_ids
class TestPatchDag(TestDagEndpoint):