This is an automated email from the ASF dual-hosted git repository.
suddjian pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new 91ba897 fix(dashboard): Add caching for dashboard datasets (#14306)
91ba897 is described below
commit 91ba897f8e7b8007ce236ac5eae0b47c0b4e200f
Author: David Aaron Suddjian <[email protected]>
AuthorDate: Fri Apr 23 21:04:45 2021 -0700
fix(dashboard): Add caching for dashboard datasets (#14306)
* fix(dashboard): [WIP] add caching back in to the dashboard dataset api
* caching works! remove log message
* remove unused full_data method
* add caching to the charts endpoint as well
* spread the cache love
* lint
* Revert "spread the cache love"
This reverts commit ef322a3b0df7d35446a60141ad98288102547a52.
* Revert "add caching to the charts endpoint as well"
This reverts commit d3d1584989324efe56d08c081570149454ddf212.
* it's a list
---
superset/dashboards/dao.py | 9 +--------
superset/models/dashboard.py | 35 ++++++++++-------------------------
2 files changed, 11 insertions(+), 33 deletions(-)
diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py
index 707da7b..4a8a314 100644
--- a/superset/dashboards/dao.py
+++ b/superset/dashboards/dao.py
@@ -28,7 +28,6 @@ from superset.extensions import db
from superset.models.core import FavStar, FavStarClassName
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
-from superset.utils import core
from superset.utils.dashboard_filter_scopes_converter import copy_filter_scopes
logger = logging.getLogger(__name__)
@@ -49,13 +48,7 @@ class DashboardDAO(BaseDAO):
@staticmethod
def get_datasets_for_dashboard(id_or_slug: str) -> List[Any]:
dashboard = DashboardDAO.get_by_id_or_slug(id_or_slug)
- datasource_slices = core.indexed(dashboard.slices, "datasource")
- data = [
- datasource.data_for_slices(slices)
- for datasource, slices in datasource_slices.items()
- if datasource
- ]
- return data
+ return dashboard.datasets_trimmed_for_slices()
@staticmethod
def get_charts_for_dashboard(id_or_slug: str) -> List[Slice]:
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index 774cf62..61607d1 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -38,7 +38,6 @@ from sqlalchemy import (
UniqueConstraint,
)
from sqlalchemy.engine.base import Connection
-from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import relationship, sessionmaker, subqueryload
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.orm.session import object_session
@@ -49,7 +48,6 @@ from superset import app, ConnectorRegistry, db,
is_feature_enabled, security_ma
from superset.connectors.base.models import BaseDatasource
from superset.connectors.druid.models import DruidColumn, DruidMetric
from superset.connectors.sqla.models import SqlMetric, TableColumn
-from superset.exceptions import SupersetException
from superset.extensions import cache_manager
from superset.models.helpers import AuditMixinNullable, ImportExportMixin
from superset.models.slice import Slice
@@ -242,30 +240,17 @@ class Dashboard( # pylint:
disable=too-many-instance-attributes
@cache_manager.cache.memoize(
# manage cache version manually
- make_name=lambda fname: f"{fname}-v2.1",
+ make_name=lambda fname: f"{fname}-v1.0",
unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"),
)
- def full_data(self) -> Dict[str, Any]:
- """Bootstrap data for rendering the dashboard page."""
- slices = self.slices
- datasource_slices = utils.indexed(slices, "datasource")
- try:
- datasources = {
- # Filter out unneeded fields from the datasource payload
- datasource.uid: datasource.data_for_slices(slices)
- for datasource, slices in datasource_slices.items()
- if datasource
- }
- except (SupersetException, SQLAlchemyError):
- datasources = {}
- return {
- # dashboard metadata
- "dashboard": self.data,
- # slices metadata
- "slices": [slc.data for slc in slices],
- # datasource metadata
- "datasources": datasources,
- }
+ def datasets_trimmed_for_slices(self) -> List[Dict[str, Any]]:
+ datasource_slices = utils.indexed(self.slices, "datasource")
+ return [
+ # Filter out unneeded fields from the datasource payload
+ datasource.data_for_slices(slices)
+ for datasource, slices in datasource_slices.items()
+ if datasource
+ ]
@property # type: ignore
def params(self) -> str: # type: ignore
@@ -287,7 +272,7 @@ class Dashboard( # pylint:
disable=too-many-instance-attributes
@debounce(0.1)
def clear_cache(self) -> None:
- cache_manager.cache.delete_memoized(Dashboard.full_data, self)
+
cache_manager.cache.delete_memoized(Dashboard.datasets_trimmed_for_slices, self)
@classmethod
@debounce(0.1)