This is an automated email from the ASF dual-hosted git repository.
jli pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new 06e4f4ff4c9 fix(dashboard): catch DatasourceNotFound in get_datasets
to prevent 404 (#37503)
06e4f4ff4c9 is described below
commit 06e4f4ff4c93e7ce552c43343d4cc10d611b459c
Author: Gabriel Torres Ruiz <[email protected]>
AuthorDate: Wed Jan 28 20:54:56 2026 -0300
fix(dashboard): catch DatasourceNotFound in get_datasets to prevent 404
(#37503)
---
superset/connectors/sqla/models.py | 9 +++++-
tests/unit_tests/connectors/sqla/models_test.py | 40 +++++++++++++++++++++++++
2 files changed, 48 insertions(+), 1 deletion(-)
diff --git a/superset/connectors/sqla/models.py
b/superset/connectors/sqla/models.py
index 0e3777ba4bf..be74a199672 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -76,6 +76,7 @@ from superset.connectors.sqla.utils import (
get_physical_table_metadata,
get_virtual_table_metadata,
)
+from superset.daos.exceptions import DatasourceNotFound
from superset.db_engine_specs.base import BaseEngineSpec, TimestampExpression
from superset.exceptions import (
ColumnNotFoundException,
@@ -513,7 +514,13 @@ class BaseDatasource(
# for legacy dashboard imports which have the wrong query_context
in them
try:
query_context = slc.get_query_context()
- except DatasetNotFoundError:
+ except (DatasetNotFoundError, DatasourceNotFound):
+ logger.warning(
+ "Failed to load query_context for chart '%s' (id=%s): "
+ "referenced datasource not found",
+ slc.slice_name,
+ slc.id,
+ )
query_context = None
# legacy charts don't have query_context charts
diff --git a/tests/unit_tests/connectors/sqla/models_test.py
b/tests/unit_tests/connectors/sqla/models_test.py
index f3079ca356c..9c4dd73997e 100644
--- a/tests/unit_tests/connectors/sqla/models_test.py
+++ b/tests/unit_tests/connectors/sqla/models_test.py
@@ -24,6 +24,7 @@ from sqlalchemy.orm.session import Session
from superset.connectors.sqla.models import SqlaTable, TableColumn
from superset.daos.dataset import DatasetDAO
+from superset.daos.exceptions import DatasourceNotFound
from superset.exceptions import OAuth2RedirectError
from superset.models.core import Database
from superset.sql.parse import Table
@@ -906,3 +907,42 @@ def test_sqla_table_link_escapes_url(mocker:
MockerFixture) -> None:
# Verify that special characters are escaped in both name and URL
assert "<script>" in str(link)
assert "<script>" not in str(link)
+
+
+def test_data_for_slices_handles_missing_datasource(mocker: MockerFixture) ->
None:
+ """
+ Test that data_for_slices gracefully handles a chart whose query_context
+ references a datasource that no longer exists.
+
+ When a chart's query_context references a deleted datasource,
get_query_context()
+ raises DatasourceNotFound. The fix ensures this exception is caught and
logged,
+ allowing the dashboard to load normally instead of returning a 404.
+ """
+ database = mocker.MagicMock()
+ database.id = 1
+
+ table = SqlaTable(
+ table_name="test_table",
+ database=database,
+ columns=[],
+ metrics=[],
+ )
+
+ # Create a mock slice whose get_query_context raises DatasourceNotFound
+ mock_slice = mocker.MagicMock()
+ mock_slice.id = 1
+ mock_slice.slice_name = "Test Chart"
+ mock_slice.form_data = {}
+ mock_slice.get_query_context.side_effect = DatasourceNotFound()
+
+ # Mock the columns and metrics properties to return empty lists
+ mocker.patch.object(SqlaTable, "columns", [])
+ mocker.patch.object(SqlaTable, "metrics", [])
+
+ # This should not raise an exception - the fix catches DatasourceNotFound
+ result = table.data_for_slices([mock_slice])
+
+ # Verify the method returns a valid data structure
+ assert "columns" in result
+ assert "metrics" in result
+ assert "verbose_map" in result