ephraimbuddy commented on a change in pull request #18065:
URL: https://github.com/apache/airflow/pull/18065#discussion_r703709458



##########
File path: airflow/jobs/scheduler_job.py
##########
@@ -823,14 +823,30 @@ def _create_dag_runs(self, dag_models: 
Iterable[DagModel], session: Session) ->
         existing_dagruns = (
             session.query(DagRun.dag_id, 
DagRun.execution_date).filter(existing_dagruns_filter).all()
         )
+        max_queued_dagruns = conf.getint('scheduler', 
'max_queued_runs_per_dag')
+
+        queued_runs_of_dags = defaultdict(
+            int,
+            session.query(DagRun.dag_id, func.count('*'))
+            .filter(  # We use `list` here because SQLA doesn't accept a set
+                # We use set to avoid duplicate dag_ids
+                DagRun.dag_id.in_(list({dm.dag_id for dm in dag_models})),
+                DagRun.state == State.QUEUED,
+            )
+            .group_by(DagRun.dag_id)
+            .all(),
+        )
 
         for dag_model in dag_models:
+            # Lets quickly check if we have exceeded the number of queued 
dagruns per dags
+            total_queued = queued_runs_of_dags[dag_model.dag_id]
+            if total_queued >= max_queued_dagruns:
+                continue

Review comment:
       It will get noisy. Imagine possible dag_run to create is up to 2000, 
this would print ~1984 times




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to