This is an automated email from the ASF dual-hosted git repository.
suddjian pushed a commit to branch dashboard-spa-caching
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/dashboard-spa-caching by this
push:
new efb835c fix(dashboard): [WIP] add caching back in to the dashboard
dataset api
efb835c is described below
commit efb835c46800ac7ce62adc381cacd4c8ac1f4c20
Author: David Aaron Suddjian <[email protected]>
AuthorDate: Thu Apr 22 09:43:58 2021 -0700
fix(dashboard): [WIP] add caching back in to the dashboard dataset api
---
superset/dashboards/dao.py | 8 +-------
superset/models/dashboard.py | 24 +++++++++++++++++-------
2 files changed, 18 insertions(+), 14 deletions(-)
diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py
index 707da7b..4edf8d3 100644
--- a/superset/dashboards/dao.py
+++ b/superset/dashboards/dao.py
@@ -49,13 +49,7 @@ class DashboardDAO(BaseDAO):
@staticmethod
def get_datasets_for_dashboard(id_or_slug: str) -> List[Any]:
dashboard = DashboardDAO.get_by_id_or_slug(id_or_slug)
- datasource_slices = core.indexed(dashboard.slices, "datasource")
- data = [
- datasource.data_for_slices(slices)
- for datasource, slices in datasource_slices.items()
- if datasource
- ]
- return data
+ return dashboard.datasets_trimmed_for_slices()
@staticmethod
def get_charts_for_dashboard(id_or_slug: str) -> List[Slice]:
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index 774cf62..c29ae57 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -248,14 +248,8 @@ class Dashboard( # pylint:
disable=too-many-instance-attributes
def full_data(self) -> Dict[str, Any]:
"""Bootstrap data for rendering the dashboard page."""
slices = self.slices
- datasource_slices = utils.indexed(slices, "datasource")
try:
- datasources = {
- # Filter out unneeded fields from the datasource payload
- datasource.uid: datasource.data_for_slices(slices)
- for datasource, slices in datasource_slices.items()
- if datasource
- }
+ datasources = self.datasets_for_slices()
except (SupersetException, SQLAlchemyError):
datasources = {}
return {
@@ -267,6 +261,21 @@ class Dashboard( # pylint:
disable=too-many-instance-attributes
"datasources": datasources,
}
+ @cache_manager.cache.memoize(
+ # manage cache version manually
+ make_name=lambda fname: f"{fname}-v1.0",
+ unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"),
+ )
+ def datasets_trimmed_for_slices(self) -> Dict[str, Any]:
+ logger.info(f"called datasets_trimmed_for_slices for {self.slug}")
+ datasource_slices = utils.indexed(self.slices, "datasource")
+ return {
+ # Filter out unneeded fields from the datasource payload
+ datasource.uid: datasource.data_for_slices(slices)
+ for datasource, slices in datasource_slices.items()
+ if datasource
+ }
+
@property # type: ignore
def params(self) -> str: # type: ignore
return self.json_metadata
@@ -288,6 +297,7 @@ class Dashboard( # pylint:
disable=too-many-instance-attributes
@debounce(0.1)
def clear_cache(self) -> None:
cache_manager.cache.delete_memoized(Dashboard.full_data, self)
+
cache_manager.cache.delete_memoized(Dashboard.datasets_trimmed_for_slices, self)
@classmethod
@debounce(0.1)