pierrejeambrun commented on code in PR #62583:
URL: https://github.com/apache/airflow/pull/62583#discussion_r2980862981


##########
airflow-core/src/airflow/api_fastapi/core_api/routes/ui/deadlines.py:
##########
@@ -53,44 +59,135 @@
         ),
     ],
 )
-def get_dag_run_deadlines(
+def get_deadlines(
     dag_id: str,
     dag_run_id: str,
     session: SessionDep,
     limit: QueryLimit,
     offset: QueryOffset,
+    readable_dag_runs_filter: ReadableDagRunsFilterDep,
     order_by: Annotated[
         SortParam,
         Depends(
             SortParam(
-                ["id", "deadline_time", "created_at"],
+                ["id", "deadline_time", "created_at", "missed", "met"],
                 Deadline,
                 to_replace={
+                    "dag_id": DagRun.dag_id,
+                    "dag_run_id": DagRun.run_id,
                     "alert_name": DeadlineAlert.name,
                 },
             ).dynamic_depends(default="deadline_time")
         ),
     ],
-) -> DeadlineCollectionResponse:
-    """Get all deadlines for a specific DAG run."""
-    dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, 
DagRun.run_id == dag_run_id))
-
-    if not dag_run:
-        raise HTTPException(
-            status.HTTP_404_NOT_FOUND,
-            f"No DAG run found for dag_id={dag_id} dag_run_id={dag_run_id}",
-        )
+    missed: bool | None = Query(default=None),
+    met: bool | None = Query(default=None),
+    deadline_time_gte: datetime | None = Query(default=None),
+    deadline_time_lte: datetime | None = Query(default=None),

Review Comment:
   Check other endpoint implementation to see how we do filtering.



##########
airflow-core/src/airflow/api_fastapi/core_api/routes/ui/deadlines.py:
##########
@@ -53,44 +59,135 @@
         ),
     ],
 )
-def get_dag_run_deadlines(
+def get_deadlines(
     dag_id: str,
     dag_run_id: str,
     session: SessionDep,
     limit: QueryLimit,
     offset: QueryOffset,
+    readable_dag_runs_filter: ReadableDagRunsFilterDep,
     order_by: Annotated[
         SortParam,
         Depends(
             SortParam(
-                ["id", "deadline_time", "created_at"],
+                ["id", "deadline_time", "created_at", "missed", "met"],
                 Deadline,
                 to_replace={
+                    "dag_id": DagRun.dag_id,
+                    "dag_run_id": DagRun.run_id,
                     "alert_name": DeadlineAlert.name,
                 },
             ).dynamic_depends(default="deadline_time")
         ),
     ],
-) -> DeadlineCollectionResponse:
-    """Get all deadlines for a specific DAG run."""
-    dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, 
DagRun.run_id == dag_run_id))
-
-    if not dag_run:
-        raise HTTPException(
-            status.HTTP_404_NOT_FOUND,
-            f"No DAG run found for dag_id={dag_id} dag_run_id={dag_run_id}",
-        )
+    missed: bool | None = Query(default=None),
+    met: bool | None = Query(default=None),
+    deadline_time_gte: datetime | None = Query(default=None),
+    deadline_time_lte: datetime | None = Query(default=None),
+) -> DeadlineWithDagRunCollectionResponse:
+    """
+    Get deadlines for a DAG run.
 
+    This endpoint allows specifying `~` as the dag_id and dag_run_id to 
retrieve Deadlines for all
+    DAGs and DAG runs.
+    """
     query = (
         select(Deadline)
         .join(Deadline.dagrun)
         .outerjoin(Deadline.deadline_alert)
-        .where(Deadline.dagrun_id == dag_run.id)
-        .where(DagRun.dag_id == dag_id)
-        .options(joinedload(Deadline.deadline_alert))
+        .options(joinedload(Deadline.dagrun), 
joinedload(Deadline.deadline_alert))
     )
 
+    if dag_run_id != "~":
+        if dag_id == "~":
+            raise HTTPException(
+                status.HTTP_400_BAD_REQUEST,
+                "dag_id is required when dag_run_id is specified",
+            )
+        dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, 
DagRun.run_id == dag_run_id))
+        if not dag_run:
+            raise HTTPException(
+                status.HTTP_404_NOT_FOUND,
+                f"DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` 
was not found",
+            )
+        query = query.where(Deadline.dagrun_id == dag_run.id)
+    elif dag_id != "~":
+        query = query.where(DagRun.dag_id == dag_id)
+
+    if missed is not None:
+        query = query.where(Deadline.missed == missed)
+
+    if met is not None:
+        query = query.where(Deadline.met == met)
+
+    if deadline_time_gte is not None:
+        query = query.where(Deadline.deadline_time >= deadline_time_gte)
+
+    if deadline_time_lte is not None:
+        query = query.where(Deadline.deadline_time <= deadline_time_lte)

Review Comment:
   We have utility code to handle this. It shouldn't be done manually like this



##########
airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_deadlines.py:
##########
@@ -262,7 +283,7 @@ def test_deadlines_ordered_by_deadline_time_ascending(self, 
test_client):
     )
     def test_should_response_200_order_by(self, test_client, order_by):
         url = f"/dags/{DAG_ID}/dagRuns/{RUN_MULTI}/deadlines"
-        with assert_queries_count(8):
+        with assert_queries_count(10):

Review Comment:
   That's probably wrong. 10 requests seems like a lot we might have a N+1 
query problem. This increases with the number of dealines. We need some loading 
options to prefect relationship and not lazy load them at serialization time.



##########
airflow-core/src/airflow/migrations/versions/0110_3_2_0_add_met_to_deadline.py:
##########
@@ -0,0 +1,50 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Add met column to deadline table.
+
+Revision ID: 7f3a2b1e9c4d
+Revises: 1d6611b6ab7c
+Create Date: 2026-03-20 14:23:07.842153
+
+"""
+
+from __future__ import annotations
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "7f3a2b1e9c4d"
+down_revision = "1d6611b6ab7c"
+branch_labels = None
+depends_on = None
+airflow_version = "3.2.0"
+
+
+def upgrade():
+    """Add met boolean column to deadline table."""
+    with op.batch_alter_table("deadline", schema=None) as batch_op:
+        batch_op.add_column(sa.Column("met", sa.Boolean(), nullable=False, 
server_default=sa.false()))
+
+
+def downgrade():
+    """Remove met column from deadline table."""
+    with op.batch_alter_table("deadline", schema=None) as batch_op:
+        batch_op.drop_column("met")

Review Comment:
   This seems unrelated to the current change adding API endpoints. Can we 
split this to another PR?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to