This is an automated email from the ASF dual-hosted git repository.
mayurnewase pushed a commit to branch fix-21635
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/fix-21635 by this push:
new 877ac2f1cf add tests
877ac2f1cf is described below
commit 877ac2f1cf2b4c28ce3795712b6aa24c5e90e4cb
Author: Mayur <[email protected]>
AuthorDate: Thu Sep 29 14:44:32 2022 +0530
add tests
---
superset/common/query_context.py | 4 +-
tests/integration_tests/charts/data/api_tests.py | 85 +++++++++++++++++++++-
.../integration_tests/fixtures/energy_dashboard.py | 14 ++--
3 files changed, 94 insertions(+), 9 deletions(-)
diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index b9414fddd1..3ff5f914d3 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -47,7 +47,7 @@ class QueryContext:
enforce_numerical_metrics: ClassVar[bool] = True
datasource: BaseDatasource
- slice_id: Optional[int] = None
+ slice: Optional[Slice] = None
queries: List[QueryObject]
form_data: Optional[Dict[str, Any]]
result_type: ChartDataResultType
@@ -102,7 +102,7 @@ class QueryContext:
def get_cache_timeout(self) -> Optional[int]:
if self.custom_cache_timeout is not None:
return self.custom_cache_timeout
- if self.slice and self.slice.cache_timeout:
+ if self.slice and self.slice.cache_timeout is not None:
return self.slice.cache_timeout
if self.datasource.cache_timeout is not None:
return self.datasource.cache_timeout
diff --git a/tests/integration_tests/charts/data/api_tests.py
b/tests/integration_tests/charts/data/api_tests.py
index 73d33cd793..4a56d0df29 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -21,7 +21,7 @@ import unittest
import copy
from datetime import datetime
from io import BytesIO
-from typing import Any, Dict, Optional
+from typing import Any, Dict, Optional, List
from unittest import mock
from zipfile import ZipFile
@@ -38,8 +38,12 @@ from tests.integration_tests.fixtures.birth_names_dashboard
import (
load_birth_names_data,
)
from tests.integration_tests.test_app import app
-
+from tests.integration_tests.fixtures.energy_dashboard import (
+ load_energy_table_with_slice,
+ load_energy_table_data,
+)
import pytest
+from superset.models.slice import Slice
from superset.charts.data.commands.get_data_command import ChartDataCommand
from superset.connectors.sqla.models import TableColumn, SqlaTable
@@ -976,3 +980,80 @@ def test_data_cache_default_timeout(
):
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 3456
+
+
+def test_chart_cache_timeout(
+ test_client,
+ login_as_admin,
+ physical_query_context,
+ load_energy_table_with_slice: List[Slice],
+):
+ # should override datasource cache timeout
+
+ slice_with_cache_timeout = load_energy_table_with_slice[0]
+ slice_with_cache_timeout.cache_timeout = 20
+ db.session.merge(slice_with_cache_timeout)
+
+ datasource: SqlaTable = (
+ db.session.query(SqlaTable)
+ .filter(SqlaTable.id == physical_query_context["datasource"]["id"])
+ .first()
+ )
+ datasource.cache_timeout = 1254
+ db.session.merge(datasource)
+
+ db.session.commit()
+
+ physical_query_context["form_data"] = {"slice_id":
slice_with_cache_timeout.id}
+
+ rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
+ assert rv.json["result"][0]["cache_timeout"] == 20
+
+
[email protected](
+ "superset.common.query_context_processor.config",
+ {
+ **app.config,
+ "DATA_CACHE_CONFIG": {
+ **app.config["DATA_CACHE_CONFIG"],
+ "CACHE_DEFAULT_TIMEOUT": 1010,
+ },
+ },
+)
+def test_chart_cache_timeout_not_present(
+ test_client, login_as_admin, physical_query_context
+):
+ # should use datasource cache, if it's present
+
+ datasource: SqlaTable = (
+ db.session.query(SqlaTable)
+ .filter(SqlaTable.id == physical_query_context["datasource"]["id"])
+ .first()
+ )
+ datasource.cache_timeout = 1980
+ db.session.merge(datasource)
+ db.session.commit()
+
+ rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
+ assert rv.json["result"][0]["cache_timeout"] == 1980
+
+
[email protected](
+ "superset.common.query_context_processor.config",
+ {
+ **app.config,
+ "DATA_CACHE_CONFIG": {
+ **app.config["DATA_CACHE_CONFIG"],
+ "CACHE_DEFAULT_TIMEOUT": 1010,
+ },
+ },
+)
+def test_chart_cache_timeout_chart_not_found(
+ test_client, login_as_admin, physical_query_context
+):
+ # should use default timeout
+
+ physical_query_context["form_data"] = {"slice_id": 0}
+
+ rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
+ assert rv.json["result"][0]["cache_timeout"] == 1010
diff --git a/tests/integration_tests/fixtures/energy_dashboard.py
b/tests/integration_tests/fixtures/energy_dashboard.py
index 0279fe8ff2..436ba1ce55 100644
--- a/tests/integration_tests/fixtures/energy_dashboard.py
+++ b/tests/integration_tests/fixtures/energy_dashboard.py
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import random
-from typing import Dict, Set
+from typing import Dict, List, Set
import pandas as pd
import pytest
@@ -59,8 +59,8 @@ def load_energy_table_data():
@pytest.fixture()
def load_energy_table_with_slice(load_energy_table_data):
with app.app_context():
- _create_energy_table()
- yield
+ slices = _create_energy_table()
+ yield slices
_cleanup()
@@ -69,7 +69,7 @@ def _get_dataframe():
return pd.DataFrame.from_dict(data)
-def _create_energy_table():
+def _create_energy_table() -> List[Slice]:
table = create_table_metadata(
table_name=ENERGY_USAGE_TBL_NAME,
database=get_example_database(),
@@ -86,13 +86,17 @@ def _create_energy_table():
db.session.commit()
table.fetch_metadata()
+ slices = []
for slice_data in _get_energy_slices():
- _create_and_commit_energy_slice(
+
+ slice = _create_and_commit_energy_slice(
table,
slice_data["slice_title"],
slice_data["viz_type"],
slice_data["params"],
)
+ slices.append(slice)
+ return slices
def _create_and_commit_energy_slice(