This is an automated email from the ASF dual-hosted git repository.

villebro pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 72b9a7f  feat(key-value): add superset metastore cache (#19232)
72b9a7f is described below

commit 72b9a7fa5b1a0c1d5f9769778280b32501356570
Author: Ville Brofeldt <[email protected]>
AuthorDate: Mon Mar 21 19:46:56 2022 +0200

    feat(key-value): add superset metastore cache (#19232)
---
 docs/docs/installation/cache.mdx                   |  17 ++-
 superset/dashboards/permalink/commands/create.py   |   5 +-
 superset/dashboards/permalink/commands/get.py      |   2 +-
 superset/explore/permalink/commands/create.py      |   5 +-
 superset/explore/permalink/commands/get.py         |   2 +-
 superset/{extensions.py => extensions/__init__.py} |   3 +-
 superset/extensions/metastore_cache.py             | 117 +++++++++++++++++++++
 superset/key_value/commands/create.py              |  25 ++++-
 superset/key_value/commands/delete.py              |   6 +-
 .../commands/{delete.py => delete_expired.py}      |  42 +++-----
 superset/key_value/commands/update.py              |  11 +-
 .../key_value/commands/{update.py => upsert.py}    |  29 +++--
 superset/utils/cache_manager.py                    |  29 +++--
 tests/integration_tests/extensions/__init__.py     |  16 +++
 .../extensions/metastore_cache_test.py             |  76 +++++++++++++
 .../key_value/commands/delete_test.py              |  12 +--
 .../commands/{delete_test.py => upsert_test.py}    |  79 +++++++-------
 17 files changed, 354 insertions(+), 122 deletions(-)

diff --git a/docs/docs/installation/cache.mdx b/docs/docs/installation/cache.mdx
index e86382b..2cf56c1 100644
--- a/docs/docs/installation/cache.mdx
+++ b/docs/docs/installation/cache.mdx
@@ -7,7 +7,7 @@ version: 1
 
 ## Caching
 
-Superset uses [Flask-Caching](https://flask-caching.readthedocs.io/) for 
caching purpose. Configuring caching is as easy as providing a custom cache 
config in your
+Superset uses [Flask-Caching](https://flask-caching.readthedocs.io/) for 
caching purposes. Configuring caching is as easy as providing a custom cache 
config in your
 `superset_config.py` that complies with [the Flask-Caching 
specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
 Flask-Caching supports various caching backends, including Redis, Memcached, 
SimpleCache (in-memory), or the
 local filesystem. Custom cache backends are also supported. See 
[here](https://flask-caching.readthedocs.io/en/latest/#custom-cache-backends) 
for specifics.
@@ -18,10 +18,17 @@ The following cache configurations can be customized:
 - Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
 - Explore chart form data (required): `EXPLORE_FORM_DATA_CACHE_CONFIG`
 
-Please note, that Dashboard and Explore caching is required. When running 
Superset in debug mode, both Explore and Dashboard caches will default to 
`SimpleCache`;
-However, trying to run Superset in non-debug mode without defining a cache for 
these will cause the application to fail on startup. When running
-superset in single-worker mode, any cache backend is supported. However, when 
running Superset in on a multi-worker setup, a dedicated cache is required. For 
this
-we recommend using either Redis or Memcached:
+Please note, that Dashboard and Explore caching is required. If these caches 
are undefined, Superset falls back to using a built-in cache that stores data
+in the metadata database. While it is recommended to use a dedicated cache, 
the built-in cache can also be used to cache other data.
+For example, to use the built-in cache to store chart data, use the following 
config:
+
+```python
+DATA_CACHE_CONFIG = {
+    "CACHE_TYPE": "SupersetMetastoreCache",
+    "CACHE_KEY_PREFIX": "superset_results",  # make sure this string is unique 
to avoid collisions
+    "CACHE_DEFAULT_TIMEOUT": 86400,  # 60 seconds * 60 minutes * 24 hours
+}
+```
 
 - Redis (recommended): we recommend the 
[redis](https://pypi.python.org/pypi/redis) Python package
 - Memcached: we recommend using [pylibmc](https://pypi.org/project/pylibmc/) 
client library as
diff --git a/superset/dashboards/permalink/commands/create.py 
b/superset/dashboards/permalink/commands/create.py
index 954c08a..a97f228 100644
--- a/superset/dashboards/permalink/commands/create.py
+++ b/superset/dashboards/permalink/commands/create.py
@@ -51,7 +51,10 @@ class 
CreateDashboardPermalinkCommand(BaseDashboardPermalinkCommand):
                 "state": self.state,
             }
             return CreateKeyValueCommand(
-                self.actor, self.resource, value, self.key_type
+                actor=self.actor,
+                resource=self.resource,
+                value=value,
+                key_type=self.key_type,
             ).run()
         except SQLAlchemyError as ex:
             logger.exception("Error running create command")
diff --git a/superset/dashboards/permalink/commands/get.py 
b/superset/dashboards/permalink/commands/get.py
index c82ade6..6cb2749 100644
--- a/superset/dashboards/permalink/commands/get.py
+++ b/superset/dashboards/permalink/commands/get.py
@@ -44,7 +44,7 @@ class 
GetDashboardPermalinkCommand(BaseDashboardPermalinkCommand):
         self.validate()
         try:
             command = GetKeyValueCommand(
-                self.resource, self.key, key_type=self.key_type
+                resource=self.resource, key=self.key, key_type=self.key_type
             )
             value: Optional[DashboardPermalinkValue] = command.run()
             if value:
diff --git a/superset/explore/permalink/commands/create.py 
b/superset/explore/permalink/commands/create.py
index 177aa7a..936f200 100644
--- a/superset/explore/permalink/commands/create.py
+++ b/superset/explore/permalink/commands/create.py
@@ -49,7 +49,10 @@ class 
CreateExplorePermalinkCommand(BaseExplorePermalinkCommand):
                 "state": self.state,
             }
             command = CreateKeyValueCommand(
-                self.actor, self.resource, value, self.key_type
+                actor=self.actor,
+                resource=self.resource,
+                value=value,
+                key_type=self.key_type,
             )
             return command.run()
         except SQLAlchemyError as ex:
diff --git a/superset/explore/permalink/commands/get.py 
b/superset/explore/permalink/commands/get.py
index 0db9da1..e22ab83 100644
--- a/superset/explore/permalink/commands/get.py
+++ b/superset/explore/permalink/commands/get.py
@@ -44,7 +44,7 @@ class GetExplorePermalinkCommand(BaseExplorePermalinkCommand):
         self.validate()
         try:
             value: Optional[ExplorePermalinkValue] = GetKeyValueCommand(
-                self.resource, self.key, key_type=self.key_type
+                resource=self.resource, key=self.key, key_type=self.key_type
             ).run()
             if value:
                 chart_id: Optional[int] = value.get("chartId")
diff --git a/superset/extensions.py b/superset/extensions/__init__.py
similarity index 98%
rename from superset/extensions.py
rename to superset/extensions/__init__.py
index 742182b..1f5882f 100644
--- a/superset/extensions.py
+++ b/superset/extensions/__init__.py
@@ -16,6 +16,7 @@
 # under the License.
 import json
 import os
+from pathlib import Path
 from typing import Any, Callable, Dict, List, Optional
 
 import celery
@@ -108,7 +109,7 @@ class ProfilingExtension:  # pylint: 
disable=too-few-public-methods
         app.wsgi_app = SupersetProfiler(app.wsgi_app, self.interval)  # type: 
ignore
 
 
-APP_DIR = os.path.dirname(__file__)
+APP_DIR = os.path.join(os.path.dirname(__file__), os.path.pardir)
 appbuilder = AppBuilder(update_perms=False)
 async_query_manager = AsyncQueryManager()
 cache_manager = CacheManager()
diff --git a/superset/extensions/metastore_cache.py 
b/superset/extensions/metastore_cache.py
new file mode 100644
index 0000000..156f777
--- /dev/null
+++ b/superset/extensions/metastore_cache.py
@@ -0,0 +1,117 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime, timedelta
+from hashlib import md5
+from typing import Any, Dict, List, Optional
+from uuid import UUID, uuid3
+
+from flask import Flask
+from flask_caching import BaseCache
+
+from superset.key_value.exceptions import KeyValueCreateFailedError
+from superset.key_value.types import KeyType
+
+RESOURCE = "superset_metastore_cache"
+KEY_TYPE: KeyType = "uuid"
+
+
+class SupersetMetastoreCache(BaseCache):
+    def __init__(self, namespace: UUID, default_timeout: int = 300) -> None:
+        super().__init__(default_timeout)
+        self.namespace = namespace
+
+    @classmethod
+    def factory(
+        cls, app: Flask, config: Dict[str, Any], args: List[Any], kwargs: 
Dict[str, Any]
+    ) -> BaseCache:
+        # base namespace for generating deterministic UUIDs
+        md5_obj = md5()
+        seed = config.get("CACHE_KEY_PREFIX", "")
+        md5_obj.update(seed.encode("utf-8"))
+        kwargs["namespace"] = UUID(md5_obj.hexdigest())
+        return cls(*args, **kwargs)
+
+    def get_key(self, key: str) -> str:
+        return str(uuid3(self.namespace, key))
+
+    @staticmethod
+    def _prune() -> None:
+        # pylint: disable=import-outside-toplevel
+        from superset.key_value.commands.delete_expired import (
+            DeleteExpiredKeyValueCommand,
+        )
+
+        DeleteExpiredKeyValueCommand(resource=RESOURCE).run()
+
+    def _get_expiry(self, timeout: Optional[int]) -> Optional[datetime]:
+        timeout = self._normalize_timeout(timeout)
+        if timeout is not None and timeout > 0:
+            return datetime.now() + timedelta(seconds=timeout)
+        return None
+
+    def set(self, key: str, value: Any, timeout: Optional[int] = None) -> bool:
+        # pylint: disable=import-outside-toplevel
+        from superset.key_value.commands.upsert import UpsertKeyValueCommand
+
+        UpsertKeyValueCommand(
+            resource=RESOURCE,
+            key_type=KEY_TYPE,
+            key=self.get_key(key),
+            value=value,
+            expires_on=self._get_expiry(timeout),
+        ).run()
+        return True
+
+    def add(self, key: str, value: Any, timeout: Optional[int] = None) -> bool:
+        # pylint: disable=import-outside-toplevel
+        from superset.key_value.commands.create import CreateKeyValueCommand
+
+        try:
+            CreateKeyValueCommand(
+                resource=RESOURCE,
+                value=value,
+                key_type=KEY_TYPE,
+                key=self.get_key(key),
+                expires_on=self._get_expiry(timeout),
+            ).run()
+            self._prune()
+            return True
+        except KeyValueCreateFailedError:
+            return False
+
+    def get(self, key: str) -> Any:
+        # pylint: disable=import-outside-toplevel
+        from superset.key_value.commands.get import GetKeyValueCommand
+
+        return GetKeyValueCommand(
+            resource=RESOURCE, key_type=KEY_TYPE, key=self.get_key(key),
+        ).run()
+
+    def has(self, key: str) -> bool:
+        entry = self.get(key)
+        if entry:
+            return True
+        return False
+
+    def delete(self, key: str) -> Any:
+        # pylint: disable=import-outside-toplevel
+        from superset.key_value.commands.delete import DeleteKeyValueCommand
+
+        return DeleteKeyValueCommand(
+            resource=RESOURCE, key_type=KEY_TYPE, key=self.get_key(key),
+        ).run()
diff --git a/superset/key_value/commands/create.py 
b/superset/key_value/commands/create.py
index 5f5bf67..e3c228a 100644
--- a/superset/key_value/commands/create.py
+++ b/superset/key_value/commands/create.py
@@ -18,6 +18,7 @@ import logging
 import pickle
 from datetime import datetime
 from typing import Any, Optional
+from uuid import UUID
 
 from flask_appbuilder.security.sqla.models import User
 from sqlalchemy.exc import SQLAlchemyError
@@ -33,18 +34,20 @@ logger = logging.getLogger(__name__)
 
 
 class CreateKeyValueCommand(BaseCommand):
-    actor: User
+    actor: Optional[User]
     resource: str
     value: Any
     key_type: KeyType
+    key: Optional[str]
     expires_on: Optional[datetime]
 
     def __init__(
         self,
-        actor: User,
         resource: str,
         value: Any,
-        key_type: KeyType,
+        key_type: KeyType = "uuid",
+        actor: Optional[User] = None,
+        key: Optional[str] = None,
         expires_on: Optional[datetime] = None,
     ):
         """
@@ -53,6 +56,8 @@ class CreateKeyValueCommand(BaseCommand):
         :param resource: the resource (dashboard, chart etc)
         :param value: the value to persist in the key-value store
         :param key_type: the type of the key to return
+        :param actor: the user performing the command
+        :param key: id of entry (autogenerated if undefined)
         :param expires_on: entry expiration time
         :return: the key associated with the persisted value
         """
@@ -60,12 +65,14 @@ class CreateKeyValueCommand(BaseCommand):
         self.actor = actor
         self.value = value
         self.key_type = key_type
+        self.key = key
         self.expires_on = expires_on
 
     def run(self) -> str:
         try:
             return self.create()
         except SQLAlchemyError as ex:
+            db.session.rollback()
             logger.exception("Error running create command")
             raise KeyValueCreateFailedError() from ex
 
@@ -77,9 +84,19 @@ class CreateKeyValueCommand(BaseCommand):
             resource=self.resource,
             value=pickle.dumps(self.value),
             created_on=datetime.now(),
-            created_by_fk=None if self.actor.is_anonymous else self.actor.id,
+            created_by_fk=None
+            if self.actor is None or self.actor.is_anonymous
+            else self.actor.id,
             expires_on=self.expires_on,
         )
+        if self.key is not None:
+            try:
+                if self.key_type == "uuid":
+                    entry.uuid = UUID(self.key)
+                else:
+                    entry.id = int(self.key)
+            except ValueError as ex:
+                raise KeyValueCreateFailedError() from ex
         db.session.add(entry)
         db.session.commit()
         return extract_key(entry, self.key_type)
diff --git a/superset/key_value/commands/delete.py 
b/superset/key_value/commands/delete.py
index 6eb340e..06cf423 100644
--- a/superset/key_value/commands/delete.py
+++ b/superset/key_value/commands/delete.py
@@ -15,6 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 import logging
+from typing import Optional
 
 from flask_appbuilder.security.sqla.models import User
 from sqlalchemy.exc import SQLAlchemyError
@@ -30,13 +31,12 @@ logger = logging.getLogger(__name__)
 
 
 class DeleteKeyValueCommand(BaseCommand):
-    actor: User
     key: str
     key_type: KeyType
     resource: str
 
     def __init__(
-        self, actor: User, resource: str, key: str, key_type: KeyType = "uuid"
+        self, resource: str, key: str, key_type: KeyType = "uuid",
     ):
         """
         Delete a key-value pair
@@ -47,7 +47,6 @@ class DeleteKeyValueCommand(BaseCommand):
         :return: was the entry deleted or not
         """
         self.resource = resource
-        self.actor = actor
         self.key = key
         self.key_type = key_type
 
@@ -55,6 +54,7 @@ class DeleteKeyValueCommand(BaseCommand):
         try:
             return self.delete()
         except SQLAlchemyError as ex:
+            db.session.rollback()
             logger.exception("Error running delete command")
             raise KeyValueDeleteFailedError() from ex
 
diff --git a/superset/key_value/commands/delete.py 
b/superset/key_value/commands/delete_expired.py
similarity index 62%
copy from superset/key_value/commands/delete.py
copy to superset/key_value/commands/delete_expired.py
index 6eb340e..0950739 100644
--- a/superset/key_value/commands/delete.py
+++ b/superset/key_value/commands/delete_expired.py
@@ -15,62 +15,46 @@
 # specific language governing permissions and limitations
 # under the License.
 import logging
+from datetime import datetime
 
-from flask_appbuilder.security.sqla.models import User
 from sqlalchemy.exc import SQLAlchemyError
 
 from superset import db
 from superset.commands.base import BaseCommand
 from superset.key_value.exceptions import KeyValueDeleteFailedError
 from superset.key_value.models import KeyValueEntry
-from superset.key_value.types import KeyType
-from superset.key_value.utils import get_filter
 
 logger = logging.getLogger(__name__)
 
 
-class DeleteKeyValueCommand(BaseCommand):
-    actor: User
-    key: str
-    key_type: KeyType
+class DeleteExpiredKeyValueCommand(BaseCommand):
     resource: str
 
-    def __init__(
-        self, actor: User, resource: str, key: str, key_type: KeyType = "uuid"
-    ):
+    def __init__(self, resource: str):
         """
-        Delete a key-value pair
+        Delete all expired key-value pairs
 
         :param resource: the resource (dashboard, chart etc)
-        :param key: the key to delete
-        :param key_type: the type of key
         :return: was the entry deleted or not
         """
         self.resource = resource
-        self.actor = actor
-        self.key = key
-        self.key_type = key_type
 
-    def run(self) -> bool:
+    def run(self) -> None:
         try:
-            return self.delete()
+            self.delete_expired()
         except SQLAlchemyError as ex:
+            db.session.rollback()
             logger.exception("Error running delete command")
             raise KeyValueDeleteFailedError() from ex
 
     def validate(self) -> None:
         pass
 
-    def delete(self) -> bool:
-        filter_ = get_filter(self.resource, self.key, self.key_type)
-        entry = (
+    @staticmethod
+    def delete_expired() -> None:
+        (
             db.session.query(KeyValueEntry)
-            .filter_by(**filter_)
-            .autoflush(False)
-            .first()
+            .filter(KeyValueEntry.expires_on <= datetime.now())
+            .delete()
         )
-        if entry:
-            db.session.delete(entry)
-            db.session.commit()
-            return True
-        return False
+        db.session.commit()
diff --git a/superset/key_value/commands/update.py 
b/superset/key_value/commands/update.py
index aed4c29..b739cfe 100644
--- a/superset/key_value/commands/update.py
+++ b/superset/key_value/commands/update.py
@@ -34,7 +34,7 @@ logger = logging.getLogger(__name__)
 
 
 class UpdateKeyValueCommand(BaseCommand):
-    actor: User
+    actor: Optional[User]
     resource: str
     value: Any
     key: str
@@ -43,10 +43,10 @@ class UpdateKeyValueCommand(BaseCommand):
 
     def __init__(
         self,
-        actor: User,
         resource: str,
         key: str,
         value: Any,
+        actor: Optional[User] = None,
         key_type: KeyType = "uuid",
         expires_on: Optional[datetime] = None,
     ):
@@ -56,6 +56,7 @@ class UpdateKeyValueCommand(BaseCommand):
         :param resource: the resource (dashboard, chart etc)
         :param key: the key to update
         :param value: the value to persist in the key-value store
+        :param actor: the user performing the command
         :param key_type: the type of the key to update
         :param expires_on: entry expiration time
         :return: the key associated with the updated value
@@ -71,6 +72,7 @@ class UpdateKeyValueCommand(BaseCommand):
         try:
             return self.update()
         except SQLAlchemyError as ex:
+            db.session.rollback()
             logger.exception("Error running update command")
             raise KeyValueUpdateFailedError() from ex
 
@@ -89,8 +91,11 @@ class UpdateKeyValueCommand(BaseCommand):
             entry.value = pickle.dumps(self.value)
             entry.expires_on = self.expires_on
             entry.changed_on = datetime.now()
-            entry.changed_by_fk = None if self.actor.is_anonymous else 
self.actor.id
+            entry.changed_by_fk = (
+                None if self.actor is None or self.actor.is_anonymous else 
self.actor.id
+            )
             db.session.merge(entry)
             db.session.commit()
             return extract_key(entry, self.key_type)
+
         return None
diff --git a/superset/key_value/commands/update.py 
b/superset/key_value/commands/upsert.py
similarity index 78%
copy from superset/key_value/commands/update.py
copy to superset/key_value/commands/upsert.py
index aed4c29..4afc4c3 100644
--- a/superset/key_value/commands/update.py
+++ b/superset/key_value/commands/upsert.py
@@ -25,6 +25,7 @@ from sqlalchemy.exc import SQLAlchemyError
 
 from superset import db
 from superset.commands.base import BaseCommand
+from superset.key_value.commands.create import CreateKeyValueCommand
 from superset.key_value.exceptions import KeyValueUpdateFailedError
 from superset.key_value.models import KeyValueEntry
 from superset.key_value.types import KeyType
@@ -33,8 +34,8 @@ from superset.key_value.utils import extract_key, get_filter
 logger = logging.getLogger(__name__)
 
 
-class UpdateKeyValueCommand(BaseCommand):
-    actor: User
+class UpsertKeyValueCommand(BaseCommand):
+    actor: Optional[User]
     resource: str
     value: Any
     key: str
@@ -43,20 +44,21 @@ class UpdateKeyValueCommand(BaseCommand):
 
     def __init__(
         self,
-        actor: User,
         resource: str,
         key: str,
         value: Any,
+        actor: Optional[User] = None,
         key_type: KeyType = "uuid",
         expires_on: Optional[datetime] = None,
     ):
         """
-        Update a key value entry
+        Upsert a key value entry
 
         :param resource: the resource (dashboard, chart etc)
         :param key: the key to update
         :param value: the value to persist in the key-value store
         :param key_type: the type of the key to update
+        :param actor: the user performing the command
         :param expires_on: entry expiration time
         :return: the key associated with the updated value
         """
@@ -69,15 +71,16 @@ class UpdateKeyValueCommand(BaseCommand):
 
     def run(self) -> Optional[str]:
         try:
-            return self.update()
+            return self.upsert()
         except SQLAlchemyError as ex:
+            db.session.rollback()
             logger.exception("Error running update command")
             raise KeyValueUpdateFailedError() from ex
 
     def validate(self) -> None:
         pass
 
-    def update(self) -> Optional[str]:
+    def upsert(self) -> Optional[str]:
         filter_ = get_filter(self.resource, self.key, self.key_type)
         entry: KeyValueEntry = (
             db.session.query(KeyValueEntry)
@@ -89,8 +92,18 @@ class UpdateKeyValueCommand(BaseCommand):
             entry.value = pickle.dumps(self.value)
             entry.expires_on = self.expires_on
             entry.changed_on = datetime.now()
-            entry.changed_by_fk = None if self.actor.is_anonymous else 
self.actor.id
+            entry.changed_by_fk = (
+                None if self.actor is None or self.actor.is_anonymous else 
self.actor.id
+            )
             db.session.merge(entry)
             db.session.commit()
             return extract_key(entry, self.key_type)
-        return None
+        else:
+            return CreateKeyValueCommand(
+                resource=self.resource,
+                value=self.value,
+                key_type=self.key_type,
+                actor=self.actor,
+                key=self.key,
+                expires_on=self.expires_on,
+            ).run()
diff --git a/superset/utils/cache_manager.py b/superset/utils/cache_manager.py
index a0c7590..31752bb 100644
--- a/superset/utils/cache_manager.py
+++ b/superset/utils/cache_manager.py
@@ -15,14 +15,14 @@
 # specific language governing permissions and limitations
 # under the License.
 import logging
-import math
 
 from flask import Flask
-from flask_babel import gettext as _
 from flask_caching import Cache
 
 logger = logging.getLogger(__name__)
 
+CACHE_IMPORT_PATH = 
"superset.extensions.metastore_cache.SupersetMetastoreCache"
+
 
 class CacheManager:
     def __init__(self) -> None:
@@ -40,27 +40,24 @@ class CacheManager:
     ) -> None:
         cache_config = app.config[cache_config_key]
         cache_type = cache_config.get("CACHE_TYPE")
-        if app.debug and cache_type is None:
-            cache_threshold = cache_config.get("CACHE_THRESHOLD", math.inf)
+        if required and cache_type in (None, "SupersetMetastoreCache"):
+            if cache_type is None:
+                logger.warning(
+                    "Falling back to the built-in cache, that stores data in 
the "
+                    "metadata database, for the followinng cache: `%s`. "
+                    "It is recommended to use `RedisCache`, `MemcachedCache` 
or "
+                    "another dedicated caching backend for production 
deployments",
+                    cache_config_key,
+                )
+            cache_key_prefix = cache_config.get("CACHE_KEY_PREFIX", 
cache_config_key)
             cache_config.update(
-                {"CACHE_TYPE": "SimpleCache", "CACHE_THRESHOLD": 
cache_threshold,}
+                {"CACHE_TYPE": CACHE_IMPORT_PATH, "CACHE_KEY_PREFIX": 
cache_key_prefix}
             )
 
         if "CACHE_DEFAULT_TIMEOUT" not in cache_config:
             default_timeout = app.config.get("CACHE_DEFAULT_TIMEOUT")
             cache_config["CACHE_DEFAULT_TIMEOUT"] = default_timeout
 
-        if required and cache_type in ("null", "NullCache"):
-            raise Exception(
-                _(
-                    "The CACHE_TYPE `%(cache_type)s` for 
`%(cache_config_key)s` is not "
-                    "supported. It is recommended to use `RedisCache`, "
-                    "`MemcachedCache` or another dedicated caching backend for 
"
-                    "production deployments",
-                    cache_type=cache_config["CACHE_TYPE"],
-                    cache_config_key=cache_config_key,
-                ),
-            )
         cache.init_app(app, cache_config)
 
     def init_app(self, app: Flask) -> None:
diff --git a/tests/integration_tests/extensions/__init__.py 
b/tests/integration_tests/extensions/__init__.py
new file mode 100644
index 0000000..13a8339
--- /dev/null
+++ b/tests/integration_tests/extensions/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/integration_tests/extensions/metastore_cache_test.py 
b/tests/integration_tests/extensions/metastore_cache_test.py
new file mode 100644
index 0000000..eb264c9
--- /dev/null
+++ b/tests/integration_tests/extensions/metastore_cache_test.py
@@ -0,0 +1,76 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from datetime import datetime, timedelta
+from typing import TYPE_CHECKING
+from uuid import UUID
+
+import pytest
+from flask.ctx import AppContext
+from freezegun import freeze_time
+
+if TYPE_CHECKING:
+    from superset.extensions.metastore_cache import SupersetMetastoreCache
+
+FIRST_KEY = "foo"
+FIRST_KEY_INITIAL_VALUE = {"foo": "bar"}
+FIRST_KEY_UPDATED_VALUE = "foo"
+
+SECOND_KEY = "baz"
+SECOND_VALUE = "qwerty"
+
+
[email protected]
+def cache() -> SupersetMetastoreCache:
+    from superset.extensions.metastore_cache import SupersetMetastoreCache
+
+    return SupersetMetastoreCache(
+        namespace=UUID("ee173d1b-ccf3-40aa-941c-985c15224496"), 
default_timeout=600,
+    )
+
+
+def test_caching_flow(app_context: AppContext, cache: SupersetMetastoreCache) 
-> None:
+    assert cache.has(FIRST_KEY) is False
+    assert cache.add(FIRST_KEY, FIRST_KEY_INITIAL_VALUE) is True
+    assert cache.has(FIRST_KEY) is True
+    cache.set(SECOND_KEY, SECOND_VALUE)
+    assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE
+    assert cache.get(SECOND_KEY) == SECOND_VALUE
+    assert cache.add(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) is False
+    assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE
+    assert cache.set(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) == True
+    assert cache.get(FIRST_KEY) == FIRST_KEY_UPDATED_VALUE
+    cache.delete(FIRST_KEY)
+    assert cache.has(FIRST_KEY) is False
+    assert cache.get(FIRST_KEY) is None
+    assert cache.has(SECOND_KEY)
+    assert cache.get(SECOND_KEY) == SECOND_VALUE
+
+
+def test_expiry(app_context: AppContext, cache: SupersetMetastoreCache) -> 
None:
+    delta = timedelta(days=90)
+    dttm = datetime(2022, 3, 18, 0, 0, 0)
+    with freeze_time(dttm):
+        cache.set(FIRST_KEY, FIRST_KEY_INITIAL_VALUE, 
int(delta.total_seconds()))
+        assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE
+    with freeze_time(dttm + delta - timedelta(seconds=1)):
+        assert cache.has(FIRST_KEY)
+        assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE
+    with freeze_time(dttm + delta + timedelta(seconds=1)):
+        assert cache.has(FIRST_KEY) is False
+        assert cache.get(FIRST_KEY) is None
diff --git a/tests/integration_tests/key_value/commands/delete_test.py 
b/tests/integration_tests/key_value/commands/delete_test.py
index 3a25cb7..a98d941 100644
--- a/tests/integration_tests/key_value/commands/delete_test.py
+++ b/tests/integration_tests/key_value/commands/delete_test.py
@@ -56,9 +56,7 @@ def test_delete_id_entry(
     from superset.key_value.models import KeyValueEntry
 
     assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key=ID_KEY, key_type="id",
-        ).run()
+        DeleteKeyValueCommand(resource=RESOURCE, key=ID_KEY, 
key_type="id",).run()
         is True
     )
 
@@ -70,9 +68,7 @@ def test_delete_uuid_entry(
     from superset.key_value.models import KeyValueEntry
 
     assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key=UUID_KEY, key_type="uuid",
-        ).run()
+        DeleteKeyValueCommand(resource=RESOURCE, key=UUID_KEY, 
key_type="uuid").run()
         is True
     )
 
@@ -84,8 +80,6 @@ def test_delete_entry_missing(
     from superset.key_value.models import KeyValueEntry
 
     assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key="456", key_type="id",
-        ).run()
+        DeleteKeyValueCommand(resource=RESOURCE, key="456", 
key_type="id").run()
         is False
     )
diff --git a/tests/integration_tests/key_value/commands/delete_test.py 
b/tests/integration_tests/key_value/commands/upsert_test.py
similarity index 55%
copy from tests/integration_tests/key_value/commands/delete_test.py
copy to tests/integration_tests/key_value/commands/upsert_test.py
index 3a25cb7..3221147 100644
--- a/tests/integration_tests/key_value/commands/delete_test.py
+++ b/tests/integration_tests/key_value/commands/upsert_test.py
@@ -20,72 +20,71 @@ import pickle
 from typing import TYPE_CHECKING
 from uuid import UUID
 
-import pytest
 from flask.ctx import AppContext
 from flask_appbuilder.security.sqla.models import User
 
 from superset.extensions import db
-from tests.integration_tests.key_value.commands.fixtures import admin, 
RESOURCE, VALUE
+from tests.integration_tests.key_value.commands.fixtures import (
+    admin,
+    ID_KEY,
+    key_value_entry,
+    RESOURCE,
+    UUID_KEY,
+)
 
 if TYPE_CHECKING:
     from superset.key_value.models import KeyValueEntry
 
-ID_KEY = "234"
-UUID_KEY = "5aae143c-44f1-478e-9153-ae6154df333a"
 
+NEW_VALUE = "new value"
 
[email protected]
-def key_value_entry() -> KeyValueEntry:
-    from superset.key_value.models import KeyValueEntry
-
-    entry = KeyValueEntry(
-        id=int(ID_KEY),
-        uuid=UUID(UUID_KEY),
-        resource=RESOURCE,
-        value=pickle.dumps(VALUE),
-    )
-    db.session.add(entry)
-    db.session.commit()
-    return entry
 
-
-def test_delete_id_entry(
+def test_upsert_id_entry(
     app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
 ) -> None:
-    from superset.key_value.commands.delete import DeleteKeyValueCommand
+    from superset.key_value.commands.upsert import UpsertKeyValueCommand
     from superset.key_value.models import KeyValueEntry
 
-    assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key=ID_KEY, key_type="id",
-        ).run()
-        is True
+    key = UpsertKeyValueCommand(
+        actor=admin, resource=RESOURCE, key=ID_KEY, value=NEW_VALUE, 
key_type="id",
+    ).run()
+    assert key == ID_KEY
+    entry = (
+        
db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).autoflush(False).one()
     )
+    assert pickle.loads(entry.value) == NEW_VALUE
+    assert entry.changed_by_fk == admin.id
 
 
-def test_delete_uuid_entry(
+def test_upsert_uuid_entry(
     app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
 ) -> None:
-    from superset.key_value.commands.delete import DeleteKeyValueCommand
+    from superset.key_value.commands.upsert import UpsertKeyValueCommand
     from superset.key_value.models import KeyValueEntry
 
-    assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key=UUID_KEY, key_type="uuid",
-        ).run()
-        is True
+    key = UpsertKeyValueCommand(
+        actor=admin, resource=RESOURCE, key=UUID_KEY, value=NEW_VALUE, 
key_type="uuid",
+    ).run()
+    assert key == UUID_KEY
+    entry = (
+        db.session.query(KeyValueEntry)
+        .filter_by(uuid=UUID(UUID_KEY))
+        .autoflush(False)
+        .one()
     )
+    assert pickle.loads(entry.value) == NEW_VALUE
+    assert entry.changed_by_fk == admin.id
 
 
-def test_delete_entry_missing(
+def test_upsert_missing_entry(
     app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
 ) -> None:
-    from superset.key_value.commands.delete import DeleteKeyValueCommand
+    from superset.key_value.commands.upsert import UpsertKeyValueCommand
     from superset.key_value.models import KeyValueEntry
 
-    assert (
-        DeleteKeyValueCommand(
-            actor=admin, resource=RESOURCE, key="456", key_type="id",
-        ).run()
-        is False
-    )
+    key = UpsertKeyValueCommand(
+        actor=admin, resource=RESOURCE, key="456", value=NEW_VALUE, 
key_type="id",
+    ).run()
+    assert key == "456"
+    db.session.query(KeyValueEntry).filter_by(id=456).delete()
+    db.session.commit()

Reply via email to