jedcunningham commented on code in PR #27828:
URL: https://github.com/apache/airflow/pull/27828#discussion_r1031991662
##########
airflow/jobs/scheduler_job.py:
##########
@@ -1574,3 +1585,33 @@ def _cleanup_stale_dags(self, session: Session =
NEW_SESSION) -> None:
dag.is_active = False
SerializedDagModel.remove_dag(dag_id=dag.dag_id, session=session)
session.flush()
+
+ @provide_session
+ def _orphan_unreferenced_datasets(self, session: Session = NEW_SESSION) ->
None:
+ """
+ Detects datasets that are no longer referenced in any DAG schedule
parameters or task outlets and
+ sets the dataset is_orphaned flags to True
+ """
+ orphaned_dataset_query = (
+ session.query(DatasetModel)
+ .join(
+ DagScheduleDatasetReference,
+ DagScheduleDatasetReference.dataset_id == DatasetModel.id,
+ isouter=True,
+ )
+ .join(
+ TaskOutletDatasetReference,
+ TaskOutletDatasetReference.dataset_id == DatasetModel.id,
+ isouter=True,
+ )
+ .group_by(DatasetModel.id)
+ .having(
+ and_(
+ func.count(DagScheduleDatasetReference.dag_id) == 0,
+ func.count(TaskOutletDatasetReference.dag_id) == 0,
+ )
+ )
+ )
+ for dataset in orphaned_dataset_query.all():
Review Comment:
```suggestion
for dataset in orphaned_dataset_query:
```
##########
airflow/www/views.py:
##########
@@ -3648,7 +3648,7 @@ def datasets_summary(self):
if has_event_filters:
count_query = count_query.join(DatasetEvent,
DatasetEvent.dataset_id == DatasetModel.id)
- filters = []
+ filters = [DatasetModel.is_orphaned.is_(False)]
Review Comment:
```suggestion
filters = [~DatasetModel.is_orphaned]
```
##########
airflow/jobs/scheduler_job.py:
##########
@@ -1574,3 +1585,33 @@ def _cleanup_stale_dags(self, session: Session =
NEW_SESSION) -> None:
dag.is_active = False
SerializedDagModel.remove_dag(dag_id=dag.dag_id, session=session)
session.flush()
+
+ @provide_session
+ def _orphan_unreferenced_datasets(self, session: Session = NEW_SESSION) ->
None:
+ """
+ Detects datasets that are no longer referenced in any DAG schedule
parameters or task outlets and
+ sets the dataset is_orphaned flags to True
+ """
+ orphaned_dataset_query = (
+ session.query(DatasetModel)
+ .join(
+ DagScheduleDatasetReference,
+ DagScheduleDatasetReference.dataset_id == DatasetModel.id,
+ isouter=True,
+ )
+ .join(
+ TaskOutletDatasetReference,
+ TaskOutletDatasetReference.dataset_id == DatasetModel.id,
+ isouter=True,
+ )
+ .group_by(DatasetModel.id)
+ .having(
+ and_(
+ func.count(DagScheduleDatasetReference.dag_id) == 0,
+ func.count(TaskOutletDatasetReference.dag_id) == 0,
+ )
+ )
+ )
+ for dataset in orphaned_dataset_query.all():
+ self.log.info("Orphaning dataset '%s'", dataset.uri)
Review Comment:
```suggestion
self.log.info("Orphaning unreferenced dataset '%s'", dataset.uri)
```
##########
airflow/dag_processing/manager.py:
##########
@@ -433,8 +433,10 @@ def __init__(
self.last_stat_print_time = 0
# Last time we cleaned up DAGs which are no longer in files
self.last_deactivate_stale_dags_time =
timezone.make_aware(datetime.fromtimestamp(0))
- # How often to check for DAGs which are no longer in files
- self.deactivate_stale_dags_interval = conf.getint("scheduler",
"deactivate_stale_dags_interval")
+ # How often to clean up:
+ # * DAGs which are no longer in files
+ # * datasets that are no longer referenced by any DAG schedule
parameters or task outlets
Review Comment:
```suggestion
# How often to check for DAGs which are no longer in files
```
##########
airflow/models/dag.py:
##########
@@ -2828,6 +2828,7 @@ def bulk_write_to_db(
for dataset in all_datasets:
stored_dataset =
session.query(DatasetModel).filter(DatasetModel.uri == dataset.uri).first()
if stored_dataset:
+ stored_dataset.is_orphaned = False
Review Comment:
Test this situation.
##########
airflow/migrations/versions/0122_2_5_0_add_is_orphaned_to_datasetmodel.py:
##########
@@ -0,0 +1,49 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Add is_orphaned to DatasetModel
+
+Revision ID: 290244fb8b83
+Revises: 65a852f26899
+Create Date: 2022-11-22 00:12:53.432961
+
+"""
+
+from __future__ import annotations
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "290244fb8b83"
+down_revision = "65a852f26899"
+branch_labels = None
+depends_on = None
+airflow_version = "2.5.0"
+
+
+def upgrade():
+ """Add is_orphaned to DatasetModel"""
+ with op.batch_alter_table("dataset") as batch_op:
Review Comment:
Do we need to batch here?
##########
airflow/www/views.py:
##########
@@ -3525,7 +3525,7 @@ def next_run_datasets(self, dag_id):
),
isouter=True,
)
- .filter(DagScheduleDatasetReference.dag_id == dag_id)
+ .filter(DagScheduleDatasetReference.dag_id == dag_id,
DatasetModel.is_orphaned.is_(False))
Review Comment:
```suggestion
.filter(DagScheduleDatasetReference.dag_id == dag_id,
~DatasetModel.is_orphaned)
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]