This is an automated email from the ASF dual-hosted git repository.

uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 6cb9deff97 Fix SQLA deprecations in Airflow core (#39211)
6cb9deff97 is described below

commit 6cb9deff97a3906555f79200911bf7ada949fac6
Author: Sebastian Daum <[email protected]>
AuthorDate: Thu Apr 25 09:07:39 2024 +0200

    Fix SQLA deprecations in Airflow core (#39211)
---
 airflow/utils/db_cleanup.py | 4 ++--
 airflow/www/views.py        | 8 +++-----
 2 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/airflow/utils/db_cleanup.py b/airflow/utils/db_cleanup.py
index 7fe158e605..f1e1770926 100644
--- a/airflow/utils/db_cleanup.py
+++ b/airflow/utils/db_cleanup.py
@@ -196,8 +196,8 @@ def _do_delete(*, query, orm_model, skip_archive, session):
     session.execute(delete)
     session.commit()
     if skip_archive:
-        metadata.bind = session.get_bind()
-        target_table.drop()
+        bind = session.get_bind()
+        target_table.drop(bind=bind)
     session.commit()
     print("Finished Performing Delete")
 
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 486b404f57..9907d6d628 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -846,9 +846,7 @@ class Airflow(AirflowBaseView):
 
             is_paused_count = dict(
                 session.execute(
-                    all_dags.with_only_columns([DagModel.is_paused, 
func.count()]).group_by(
-                        DagModel.is_paused
-                    )
+                    all_dags.with_only_columns(DagModel.is_paused, 
func.count()).group_by(DagModel.is_paused)
                 ).all()
             )
 
@@ -3294,7 +3292,7 @@ class Airflow(AirflowBaseView):
             latest_run = dag_model.get_last_dagrun(session=session)
 
             events = [
-                dict(info)
+                dict(info._mapping)
                 for info in session.execute(
                     select(
                         DatasetModel.id,
@@ -3456,7 +3454,7 @@ class Airflow(AirflowBaseView):
             count_query = count_query.where(*filters)
 
             query = session.execute(query)
-            datasets = [dict(dataset) for dataset in query]
+            datasets = [dict(dataset._mapping) for dataset in query]
             data = {"datasets": datasets, "total_entries": 
session.scalar(count_query)}
 
             return (

Reply via email to