ephraimbuddy commented on code in PR #58248:
URL: https://github.com/apache/airflow/pull/58248#discussion_r2563807254


##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -371,6 +374,111 @@ def _sort_serialized_dag_dict(cls, serialized_dag: Any):
             return [cls._sort_serialized_dag_dict(i) for i in serialized_dag]
         return serialized_dag
 
+    @classmethod
+    def _process_deadline_alerts(
+        cls,
+        serialized_dag_id: str,
+        dag_data: dict[str, Any],
+        session: Session,
+    ) -> bool:
+        """
+        Process DeadlineAlerts for a Dag during serialization.
+
+        Creates or finds deadline_alert records in the database and replaces
+        the deadline field in dag_data with UUID references.
+
+        :param serialized_dag_id: The serialized_dag id
+        :param dag_data: The serialized Dag data dictionary (will be modified 
in place)
+        :param session: Database session
+        """
+        dag_deadline_data = dag_data.get("dag", {}).get("deadline")
+        if not dag_deadline_data:
+            return False
+
+        log.debug("Processing DeadlineAlerts for Dag: %s", serialized_dag_id)
+
+        deadline_alerts = dag_deadline_data if isinstance(dag_deadline_data, 
list) else [dag_deadline_data]
+        deadline_alert_ids = []
+        new_alerts = []
+
+        for deadline_alert in deadline_alerts:
+            deadline_data = deadline_alert.get(Encoding.VAR, deadline_alert)
+
+            reference = deadline_data[DeadlineAlertFields.REFERENCE]
+            interval = deadline_data[DeadlineAlertFields.INTERVAL]
+            callback = deadline_data[DeadlineAlertFields.CALLBACK]
+
+            # This looks odd, but I had issues comparing the serialized data 
directly
+            # while doing manual testing. To avoid them, we fetch by dag_id 
and interval,
+            # then use python's dict comparison instead of trying to match 
strings in SQL.
+            candidates = (
+                session.execute(
+                    select(DeadlineAlertModel).filter(
+                        DeadlineAlertModel.serialized_dag_id == 
serialized_dag_id,
+                        DeadlineAlertModel.interval == interval,
+                    )
+                )
+                .scalars()
+                .all()

Review Comment:
   You might want to remove `all()` and iterate with the ScalarsResult generator



##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -371,6 +374,111 @@ def _sort_serialized_dag_dict(cls, serialized_dag: Any):
             return [cls._sort_serialized_dag_dict(i) for i in serialized_dag]
         return serialized_dag
 
+    @classmethod
+    def _process_deadline_alerts(
+        cls,
+        serialized_dag_id: str,
+        dag_data: dict[str, Any],
+        session: Session,
+    ) -> bool:
+        """
+        Process DeadlineAlerts for a Dag during serialization.
+
+        Creates or finds deadline_alert records in the database and replaces
+        the deadline field in dag_data with UUID references.
+
+        :param serialized_dag_id: The serialized_dag id
+        :param dag_data: The serialized Dag data dictionary (will be modified 
in place)
+        :param session: Database session
+        """
+        dag_deadline_data = dag_data.get("dag", {}).get("deadline")
+        if not dag_deadline_data:
+            return False
+
+        log.debug("Processing DeadlineAlerts for Dag: %s", serialized_dag_id)
+
+        deadline_alerts = dag_deadline_data if isinstance(dag_deadline_data, 
list) else [dag_deadline_data]
+        deadline_alert_ids = []
+        new_alerts = []
+
+        for deadline_alert in deadline_alerts:
+            deadline_data = deadline_alert.get(Encoding.VAR, deadline_alert)
+
+            reference = deadline_data[DeadlineAlertFields.REFERENCE]
+            interval = deadline_data[DeadlineAlertFields.INTERVAL]
+            callback = deadline_data[DeadlineAlertFields.CALLBACK]
+
+            # This looks odd, but I had issues comparing the serialized data 
directly
+            # while doing manual testing. To avoid them, we fetch by dag_id 
and interval,
+            # then use python's dict comparison instead of trying to match 
strings in SQL.
+            candidates = (
+                session.execute(
+                    select(DeadlineAlertModel).filter(
+                        DeadlineAlertModel.serialized_dag_id == 
serialized_dag_id,
+                        DeadlineAlertModel.interval == interval,
+                    )
+                )
+                .scalars()

Review Comment:
   ```suggestion
                   session.scalars(
                       select(DeadlineAlertModel).where(
                           DeadlineAlertModel.serialized_dag_id == 
serialized_dag_id,
                           DeadlineAlertModel.interval == interval,
                       )
                   )
   ```



##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -563,6 +680,65 @@ def data(self) -> dict | None:
 
         return self.__data_cache
 
+    @classmethod
+    def _reconstruct_deadline_alerts(
+        cls,
+        dag_data: dict[str, Any],
+        serialized_dag_id: str,
+        session: Session,
+    ) -> dict[str, Any]:
+        """
+        Reconstruct DeadlineAlert objects from UUID references during 
deserialization.
+
+        Queries the deadline_alert table to fetch full DeadlineAlert 
definitions
+        and reconstructs them in the serialized format expected by 
SerializedDAG.
+
+        :param dag_data: The serialized Dag data dictionary
+        :param serialized_dag_id: The serialized_dag ID
+        """
+        dag_deadline_data = dag_data.get("dag", {}).get("deadline")
+        if not dag_deadline_data:
+            return dag_data
+
+        deadline_list = dag_deadline_data if isinstance(dag_deadline_data, 
list) else [dag_deadline_data]
+        deadline_alerts_by_id = {
+            str(alert.id): alert
+            for alert in session.execute(
+                select(DeadlineAlertModel).filter(
+                    DeadlineAlertModel.id.in_(deadline_list),
+                    DeadlineAlertModel.serialized_dag_id == serialized_dag_id,
+                )
+            )
+            .scalars()

Review Comment:
   ```suggestion
               for alert in session.scalars(
                   select(DeadlineAlertModel).where(
                       DeadlineAlertModel.id.in_(deadline_list),
                       DeadlineAlertModel.serialized_dag_id == 
serialized_dag_id,
                   )
               )
   ```
   execute is used where we want some tuples. Using scalars directly is better



##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -470,6 +578,15 @@ def write_dag(
         log.debug("Writing Serialized DAG: %s to the DB", dag.dag_id)
         new_serialized_dag.dag_version = dagv
         session.add(new_serialized_dag)
+        session.flush()
+
+        if new_serialized_dag.data:

Review Comment:
   if using compressed data, this would be none



##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -470,6 +578,15 @@ def write_dag(
         log.debug("Writing Serialized DAG: %s to the DB", dag.dag_id)
         new_serialized_dag.dag_version = dagv
         session.add(new_serialized_dag)
+        session.flush()

Review Comment:
   We cannot flush here. The whole operations on `write_dag` should be atomic, 
no commit, no flushing. 



##########
airflow-core/src/airflow/models/serialized_dag.py:
##########
@@ -371,6 +374,111 @@ def _sort_serialized_dag_dict(cls, serialized_dag: Any):
             return [cls._sort_serialized_dag_dict(i) for i in serialized_dag]
         return serialized_dag
 
+    @classmethod
+    def _process_deadline_alerts(
+        cls,
+        serialized_dag_id: str,
+        dag_data: dict[str, Any],
+        session: Session,
+    ) -> bool:
+        """
+        Process DeadlineAlerts for a Dag during serialization.
+
+        Creates or finds deadline_alert records in the database and replaces
+        the deadline field in dag_data with UUID references.
+
+        :param serialized_dag_id: The serialized_dag id
+        :param dag_data: The serialized Dag data dictionary (will be modified 
in place)
+        :param session: Database session
+        """
+        dag_deadline_data = dag_data.get("dag", {}).get("deadline")
+        if not dag_deadline_data:
+            return False
+
+        log.debug("Processing DeadlineAlerts for Dag: %s", serialized_dag_id)
+
+        deadline_alerts = dag_deadline_data if isinstance(dag_deadline_data, 
list) else [dag_deadline_data]
+        deadline_alert_ids = []
+        new_alerts = []
+
+        for deadline_alert in deadline_alerts:
+            deadline_data = deadline_alert.get(Encoding.VAR, deadline_alert)
+
+            reference = deadline_data[DeadlineAlertFields.REFERENCE]
+            interval = deadline_data[DeadlineAlertFields.INTERVAL]
+            callback = deadline_data[DeadlineAlertFields.CALLBACK]
+
+            # This looks odd, but I had issues comparing the serialized data 
directly
+            # while doing manual testing. To avoid them, we fetch by dag_id 
and interval,
+            # then use python's dict comparison instead of trying to match 
strings in SQL.
+            candidates = (
+                session.execute(
+                    select(DeadlineAlertModel).filter(
+                        DeadlineAlertModel.serialized_dag_id == 
serialized_dag_id,
+                        DeadlineAlertModel.interval == interval,
+                    )
+                )
+                .scalars()
+                .all()
+            )
+
+            existing_alert = None
+            for alert in candidates:
+                if alert.reference == reference and alert.callback_def == 
callback:
+                    existing_alert = alert
+                    break
+
+            if existing_alert:
+                log.debug("Found existing DeadlineAlert: %s", 
existing_alert.id)
+                deadline_alert_ids.append(str(existing_alert.id))
+            else:
+                log.warning("No existing alert found, creating... ")

Review Comment:
   ```suggestion
                   log.warning("Creating deadline alert...")
   ```
   I think there's no need informing users that there was no existing alert



##########
airflow-core/src/airflow/models/deadline_alert.py:
##########
@@ -0,0 +1,101 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from datetime import datetime
+from typing import TYPE_CHECKING
+
+import uuid6
+from sqlalchemy import JSON, Float, ForeignKey, String, Text, select
+from sqlalchemy.orm import Mapped
+from sqlalchemy_utils import UUIDType
+
+from airflow._shared.timezones import timezone
+from airflow.models import Base
+from airflow.models.deadline import ReferenceModels
+from airflow.utils.session import NEW_SESSION, provide_session
+from airflow.utils.sqlalchemy import UtcDateTime, mapped_column
+
+if TYPE_CHECKING:
+    from sqlalchemy.orm import Session
+
+
+class DeadlineAlert(Base):
+    """Table containing DeadlineAlert properties."""
+
+    __tablename__ = "deadline_alert"
+
+    id: Mapped[str] = mapped_column(UUIDType(binary=False), primary_key=True, 
default=uuid6.uuid7)
+    created_at: Mapped[datetime] = mapped_column(UtcDateTime, nullable=False, 
default=timezone.utcnow)
+
+    serialized_dag_id: Mapped[str] = mapped_column(
+        UUIDType(binary=False), ForeignKey("serialized_dag.id"), nullable=False
+    )
+
+    name: Mapped[str | None] = mapped_column(String(250), nullable=True)
+    description: Mapped[str | None] = mapped_column(Text, nullable=True)
+    reference: Mapped[dict] = mapped_column(JSON, nullable=False)
+    interval: Mapped[float] = mapped_column(Float, nullable=False)
+    callback_def: Mapped[dict] = mapped_column(JSON, nullable=False)
+
+    def __repr__(self):
+        interval_seconds = int(self.interval)
+
+        if interval_seconds >= 3600:
+            interval_display = f"{interval_seconds // 3600}h"
+        elif interval_seconds >= 60:
+            interval_display = f"{interval_seconds // 60}m"
+        else:
+            interval_display = f"{interval_seconds}s"
+
+        return (
+            f"[DeadlineAlert] "
+            f"id={str(self.id)[:8]}, "
+            f"created_at={self.created_at}, "
+            f"name={self.name or 'Unnamed'}, "
+            f"reference={self.reference}, "
+            f"interval={interval_display}, "
+            f"callback={self.callback_def}"
+        )
+
+    def __eq__(self, other):
+        if not isinstance(other, DeadlineAlert):
+            return False
+        return (
+            self.reference == other.reference
+            and self.interval == other.interval
+            and self.callback_def == other.callback_def
+        )
+
+    def __hash__(self):
+        return hash((str(self.reference), self.interval, 
str(self.callback_def)))
+
+    @property
+    def reference_class(self) -> type[ReferenceModels.BaseDeadlineReference]:
+        """Return the deserialized reference object."""
+        return 
ReferenceModels.get_reference_class(self.reference[ReferenceModels.REFERENCE_TYPE_FIELD])
+
+    @classmethod
+    @provide_session
+    def get_by_id(cls, deadline_alert_id: str, session: Session = NEW_SESSION) 
-> DeadlineAlert:
+        """
+        Retrieve a DeadlineAlert record by its UUID.
+
+        :param deadline_alert_id: The UUID of the DeadlineAlert to retrieve
+        :param session: Database session
+        """
+        return 
session.execute(select(cls).filter_by(id=deadline_alert_id)).scalar_one()

Review Comment:
   ```suggestion
           return session.scalar(select(cls).where(cls.id=deadline_alert_id))
   ```



##########
airflow-core/src/airflow/serialization/serialized_objects.py:
##########
@@ -3361,27 +3356,77 @@ def create_dagrun(
         )
 
         if self.deadline:
-            for deadline in cast("list", self.deadline):
-                if isinstance(deadline.reference, 
DeadlineReference.TYPES.DAGRUN):
-                    deadline_time = deadline.reference.evaluate_with(
-                        session=session,
-                        interval=deadline.interval,
-                        dag_id=self.dag_id,
-                        run_id=run_id,
-                    )
-                    if deadline_time is not None:
-                        session.add(
-                            Deadline(
-                                deadline_time=deadline_time,
-                                callback=deadline.callback,
-                                dagrun_id=orm_dagrun.id,
-                                dag_id=orm_dagrun.dag_id,
-                            )
-                        )
-                        Stats.incr("deadline_alerts.deadline_created", 
tags={"dag_id": self.dag_id})
+            self._process_dagrun_deadline_alerts(orm_dagrun, session)
 
         return orm_dagrun
 
+    def _process_dagrun_deadline_alerts(
+        self,
+        orm_dagrun: DagRun,
+        session: Session,
+    ) -> None:
+        """
+        Process deadline alerts for a newly created DagRun.
+
+        Creates Deadline records for any DeadlineAlerts that reference DAGRUN.
+
+        :param orm_dagrun: The newly created DagRun
+        :param session: Database session
+        """
+        # Import here to avoid circular dependency
+        from airflow.models.serialized_dag import SerializedDagModel
+
+        # Get the serialized_dag ID for this DAG
+        serialized_dag_id = session.scalar(
+            select(SerializedDagModel.id).where(SerializedDagModel.dag_id == 
self.dag_id).limit(1)
+        )

Review Comment:
   I think the approach here is wrong in dag versioning context because you 
might end up getting the wrong version of the serdag. 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to