ashb commented on a change in pull request #16352:
URL: https://github.com/apache/airflow/pull/16352#discussion_r649234817



##########
File path: 
airflow/migrations/versions/142555e44c17_add_data_interval_start_end_to_dagrun.py
##########
@@ -0,0 +1,100 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Add data_interval_[start|end] to DagRun.
+
+Revision ID: 142555e44c17
+Revises: e9304a3141f0
+Create Date: 2021-06-09 08:28:02.089817
+
+"""
+
+import alembic
+import sqlalchemy
+
+from airflow.models import DagModel, DagRun
+from airflow.models.serialized_dag import SerializedDagModel
+from airflow.utils.session import create_session
+from airflow.utils.sqlalchemy import UtcDateTime
+from airflow.utils.timezone import utcnow
+
+# revision identifiers, used by Alembic.
+revision = "142555e44c17"
+down_revision = "e9304a3141f0"
+branch_labels = None
+depends_on = None
+
+
+# None and "@once" schedule intervals don't have a data interval and can be
+# bulk-updated with one SQL call, so we do them separately. Other "real"
+# schedule intervals are too complicated and need to be populated manually.
+NO_SCHEDULE_FILTER = DagModel.schedule_interval.in_([None, "@once"])
+
+
+def _populate_simple_dagrun_intervals(session):
+    """Handle DAG runs with simple schedule intervals."""
+    updates = {
+        DagRun.data_interval_start: DagRun.execution_date,
+        DagRun.data_interval_end: DagRun.execution_date,
+    }
+    joined = session.query(DagRun).join(DagModel, DagModel.dag_id == 
DagRun.dag_id)
+    joined.filter(NO_SCHEDULE_FILTER).update(updates)
+
+
+def _populate_complex_dagrun_intervals(session):
+    """Handle DAG runs with "real" schedule intervals."""
+    joined = session.query(DagRun, SerializedDagModel).join(
+        SerializedDagModel,
+        DagRun.dag_id == SerializedDagModel.dag_id,
+    )
+    for dag_run, serialized in joined.filter(~NO_SCHEDULE_FILTER):
+        dag = serialized.dag
+        data_interval_start = dag_run.execution_date
+        dag_run.data_interval_start = data_interval_start
+        dag_run.data_interval_end = dag.following_schedule(data_interval_start)

Review comment:
       Is this at all possible without requiring the seriazlied dag table? -- 
inflating every dag, and walking over every dag run is not going to be quick.
   
   There is _a_ `schedule_interval` column on DagModel currently, that I 
_think_ gives us enough info? Not sure it saves us much run time though.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to