xrmx commented on a change in pull request #4421: Update core.py
URL: 
https://github.com/apache/incubator-superset/pull/4421#discussion_r168113337
 
 

 ##########
 File path: superset/views/core.py
 ##########
 @@ -1275,13 +1276,44 @@ def filter(self, datasource_type, datasource_id, 
column):
             return json_error_response(DATASOURCE_MISSING_ERR)
         if not self.datasource_access(datasource):
             return json_error_response(DATASOURCE_ACCESS_ERR)
-
-        payload = json.dumps(
-            datasource.values_for_column(
-                column,
-                config.get('FILTER_SELECT_ROW_LIMIT', 10000),
-            ),
-            default=utils.json_int_dttm_ser)
+        # Implement: Cache endpoint by datasource and column
+        cache_key = hashlib.md5((datasource_id + 
column).encode('utf-8')).hexdigest()
+        if cache_key and cache:
+            if (hasattr(datasource, 'database') and 
datasource.database.cache_timeout):
+                cache_timeout = datasource.database.cache_timeout
+            elif (hasattr(datasource, 'database') and 
datasource.cache_timeout):
+                cache_timeout = datasource.database.cache_timeout
+            else:
+                cache_timeout = config.get('CACHE_DEFAULT_TIMEOUT')
+            cache_value = cache.get(cache_key)
+            if cache_value:
+                logging.info('Loading filter values from cache')
+                try:
+                    payload = json.dumps(cache_value, 
default=utils.json_int_dttm_ser)
+                except Exception as e:
+                    logging.exception(e)
+                    logging.error('Error reading cache:' +
+                                  utils.error_msg_from_exception(e))
+                logging.info('Serving filter values from cache')
+            else:
+                cache_value = datasource.values_for_column(
+                    column,
+                    config.get('FILTER_SELECT_ROW_LIMIT', 10000),
+                )
+                try:
+                    payload = json.dumps(cache_value, 
default=utils.json_int_dttm_ser)
+                    stats_logger.incr('set_cache_key')
+                    cache.set(cache_key, cache_value, cache_timeout)
+                except Exception as e:
 
 Review comment:
   You don't know if it was writing the cache that failed

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to