This is an automated email from the ASF dual-hosted git repository.

kgabryje pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 26a2e127793 perf: fix N+1 query in Slice.datasource property (#37899)
26a2e127793 is described below

commit 26a2e1277935475fd3570389e3b94757ee109036
Author: Kamil Gabryjelski <[email protected]>
AuthorDate: Wed Feb 11 18:57:28 2026 +0100

    perf: fix N+1 query in Slice.datasource property (#37899)
---
 superset/models/dashboard.py           | 32 +++++++-------------------------
 superset/models/slice.py               | 25 +++++--------------------
 tests/integration_tests/model_tests.py |  2 +-
 3 files changed, 13 insertions(+), 46 deletions(-)

diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index 5c9026635bf..abd477e2045 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -206,21 +206,7 @@ class Dashboard(CoreDashboard, AuditMixinNullable, 
ImportExportMixin):
 
     @property
     def datasources(self) -> set[BaseDatasource]:
-        # Verbose but efficient database enumeration of dashboard datasources.
-        datasources_by_cls_model: dict[type[BaseDatasource], set[int]] = 
defaultdict(
-            set
-        )
-
-        for slc in self.slices:
-            datasources_by_cls_model[slc.cls_model].add(slc.datasource_id)
-
-        return {
-            datasource
-            for cls_model, datasource_ids in datasources_by_cls_model.items()
-            for datasource in db.session.query(cls_model)
-            .filter(cls_model.id.in_(datasource_ids))
-            .all()
-        }
+        return {slc.datasource for slc in self.slices if slc.datasource}
 
     @property
     def charts(self) -> list[str]:
@@ -279,24 +265,20 @@ class Dashboard(CoreDashboard, AuditMixinNullable, 
ImportExportMixin):
         }
 
     def datasets_trimmed_for_slices(self) -> list[dict[str, Any]]:
-        # Verbose but efficient database enumeration of dashboard datasources.
-        slices_by_datasource: dict[tuple[type[BaseDatasource], int], 
set[Slice]] = (
-            defaultdict(set)
-        )
+        slices_by_datasource: dict[int, set[Slice]] = defaultdict(set)
 
         for slc in self.slices:
-            slices_by_datasource[(slc.cls_model, slc.datasource_id)].add(slc)
+            slices_by_datasource[slc.datasource_id].add(slc)
 
         result: list[dict[str, Any]] = []
 
-        for (cls_model, datasource_id), slices in slices_by_datasource.items():
-            datasource = (
-                
db.session.query(cls_model).filter_by(id=datasource_id).one_or_none()
-            )
+        for _, slices in slices_by_datasource.items():
+            # Use the eagerly-loaded datasource from any slice in the group
+            datasource = next(iter(slices)).datasource
 
             if datasource:
                 # Filter out unneeded fields from the datasource payload
-                result.append(datasource.data_for_slices(slices))
+                result.append(datasource.data_for_slices(list(slices)))
 
         return result
 
diff --git a/superset/models/slice.py b/superset/models/slice.py
index db58c6c8e50..dc2450df311 100644
--- a/superset/models/slice.py
+++ b/superset/models/slice.py
@@ -141,16 +141,9 @@ class Slice(  # pylint: disable=too-many-public-methods
     def __repr__(self) -> str:
         return self.slice_name or str(self.id)
 
-    @property
-    def cls_model(self) -> type[SqlaTable]:
-        # pylint: disable=import-outside-toplevel
-        from superset.daos.datasource import DatasourceDAO
-
-        return DatasourceDAO.sources[self.datasource_type]
-
     @property
     def datasource(self) -> SqlaTable | None:
-        return self.get_datasource
+        return self.table
 
     def clone(self) -> Slice:
         return Slice(
@@ -164,15 +157,6 @@ class Slice(  # pylint: disable=too-many-public-methods
             cache_timeout=self.cache_timeout,
         )
 
-    # pylint: disable=using-constant-test
-    @datasource.getter  # type: ignore
-    def get_datasource(self) -> SqlaTable | None:
-        return (
-            db.session.query(self.cls_model)
-            .filter_by(id=self.datasource_id)
-            .one_or_none()
-        )
-
     @renders("datasource_name")
     def datasource_link(self) -> Markup | None:
         datasource = self.datasource
@@ -201,8 +185,6 @@ class Slice(  # pylint: disable=too-many-public-methods
         datasource = self.datasource
         return datasource.url if datasource else None
 
-    # pylint: enable=using-constant-test
-
     @property
     def viz(self) -> BaseViz | None:
         form_data = json.loads(self.params)
@@ -377,7 +359,10 @@ def id_or_uuid_filter(id_or_uuid: str | int) -> 
BinaryExpression:
 
 
 def set_related_perm(_mapper: Mapper, _connection: Connection, target: Slice) 
-> None:
-    src_class = target.cls_model
+    # pylint: disable=import-outside-toplevel
+    from superset.daos.datasource import DatasourceDAO
+
+    src_class = DatasourceDAO.sources[target.datasource_type]
     if id_ := target.datasource_id:
         ds = db.session.query(src_class).filter_by(id=int(id_)).first()
         if ds:
diff --git a/tests/integration_tests/model_tests.py 
b/tests/integration_tests/model_tests.py
index e80fea7db88..1da5e3d0210 100644
--- a/tests/integration_tests/model_tests.py
+++ b/tests/integration_tests/model_tests.py
@@ -678,7 +678,7 @@ class TestSqlaTableModel(SupersetTestCase):
             datasource_id=tbl.id,
         )
         dashboard.slices.append(slc)
-        datasource_info = slc.datasource.data_for_slices([slc])
+        datasource_info = tbl.data_for_slices([slc])
         assert "database" in datasource_info
 
         # clean up and auto commit

Reply via email to