This is an automated email from the ASF dual-hosted git repository. johnbodley pushed a commit to branch feature--embeddable-charts-pilot in repository https://gitbox.apache.org/repos/asf/incubator-superset.git
commit 53f1ff3f364311b2ee609741fd983909daedb877 Author: Conglei Shi <[email protected]> AuthorDate: Tue Nov 13 10:21:55 2018 -0800 added cache key logic --- superset/common/query_context.py | 4 ++-- superset/common/query_object.py | 20 ++++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/superset/common/query_context.py b/superset/common/query_context.py index ce90a75..2415ca3 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -150,7 +150,7 @@ class QueryContext: def get_df_payload(self, query_obj): """Handles caching around the df paylod retrieval""" - cache_key = query_obj.cache_key() if query_obj else None + cache_key = query_obj.cache_key(datasource=self.datasource.uid) if query_obj else None logging.info('Cache key: {}'.format(cache_key)) is_loaded = False stacktrace = None @@ -223,7 +223,7 @@ class QueryContext: cache.delete(cache_key) return { 'cache_key': cache_key, - 'cached_dttm': self._any_cached_dttm, + 'cached_dttm': cache_value['dttm'] if cache_value not None else None 'cache_timeout': self.cache_timeout, 'df': df, 'error': error_message, diff --git a/superset/common/query_object.py b/superset/common/query_object.py index dfac02c..0d831f6 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -51,5 +51,21 @@ class QueryObject: raise NotImplementedError() - def cache_key(self): - raise NotImplementedError() \ No newline at end of file + def cache_key(self, **extra): + """ + The cache key is made out of the key/values in `query_obj`, plus any + other key/values in `extra` + + We remove datetime bounds that are hard values, and replace them with + the use-provided inputs to bounds, which may be time-relative (as in + "5 days ago" or "now"). + """ + cache_dict = self.to_dict() + cache_dict.update(extra) + + for k in ['from_dttm', 'to_dttm']: + del cache_dict[k] + + cache_dict['time_range'] = self.form_data.get('time_range') + json_data = self.json_dumps(cache_dict, sort_keys=True) + return hashlib.md5(json_data.encode('utf-8')).hexdigest() \ No newline at end of file
