This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 5.0-pulse
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 2eb9f4ca080cdb8ff6a817ffc3eba8d5e613a9df
Author: Daniel Vaz Gaspar <[email protected]>
AuthorDate: Wed Oct 8 12:22:38 2025 +0100

    fix: dataset update with invalid SQL query (#35543)
    
    (cherry picked from commit 50a5854b250fc886647d1ac2f6f6d73326c9b40a)
---
 .../SqlLab/components/SaveDatasetModal/index.tsx   | 70 +++++++++++++---------
 superset/commands/dataset/update.py                | 57 +++++++++++++++++-
 2 files changed, 95 insertions(+), 32 deletions(-)

diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx 
b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx
index ab17556a72..7d02144528 100644
--- a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx
+++ b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx
@@ -33,6 +33,7 @@ import {
   QueryResponse,
   QueryFormData,
   VizType,
+  getClientErrorObject,
 } from '@superset-ui/core';
 import { useSelector, useDispatch } from 'react-redux';
 import dayjs from 'dayjs';
@@ -204,39 +205,50 @@ export const SaveDatasetModal = ({
     }
     setLoading(true);
 
-    const [, key] = await Promise.all([
-      updateDataset(
-        datasource?.dbId,
-        datasetToOverwrite?.datasetid,
-        datasource?.sql,
-        datasource?.columns?.map(
-          (d: { column_name: string; type: string; is_dttm: boolean }) => ({
-            column_name: d.column_name,
-            type: d.type,
-            is_dttm: d.is_dttm,
-          }),
+    try {
+      const [, key] = await Promise.all([
+        updateDataset(
+          datasource?.dbId,
+          datasetToOverwrite?.datasetid,
+          datasource?.sql,
+          datasource?.columns?.map(
+            (d: { column_name: string; type: string; is_dttm: boolean }) => ({
+              column_name: d.column_name,
+              type: d.type,
+              is_dttm: d.is_dttm,
+            }),
+          ),
+          datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id),
+          true,
         ),
-        datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id),
-        true,
-      ),
-      postFormData(datasetToOverwrite.datasetid, 'table', {
-        ...formDataWithDefaults,
-        datasource: `${datasetToOverwrite.datasetid}__table`,
-        ...(defaultVizType === VizType.Table && {
-          all_columns: datasource?.columns?.map(column => column.column_name),
+        postFormData(datasetToOverwrite.datasetid, 'table', {
+          ...formDataWithDefaults,
+          datasource: `${datasetToOverwrite.datasetid}__table`,
+          ...(defaultVizType === VizType.Table && {
+            all_columns: datasource?.columns?.map(column => 
column.column_name),
+          }),
         }),
-      }),
-    ]);
-    setLoading(false);
+      ]);
+      setLoading(false);
 
-    const url = mountExploreUrl(null, {
-      [URL_PARAMS.formDataKey.name]: key,
-    });
-    createWindow(url);
+      const url = mountExploreUrl(null, {
+        [URL_PARAMS.formDataKey.name]: key,
+      });
+      createWindow(url);
 
-    setShouldOverwriteDataset(false);
-    setDatasetName(getDefaultDatasetName());
-    onHide();
+      setShouldOverwriteDataset(false);
+      setDatasetName(getDefaultDatasetName());
+      onHide();
+    } catch (error) {
+      setLoading(false);
+      getClientErrorObject(error).then((e: { error: string }) => {
+        dispatch(
+          addDangerToast(
+            e.error || t('An error occurred while overwriting the dataset'),
+          ),
+        );
+      });
+    }
   };
 
   const loadDatasetOverwriteOptions = useCallback(
diff --git a/superset/commands/dataset/update.py 
b/superset/commands/dataset/update.py
index 2772cc0ffa..4900b0caac 100644
--- a/superset/commands/dataset/update.py
+++ b/superset/commands/dataset/update.py
@@ -17,7 +17,7 @@
 import logging
 from collections import Counter
 from functools import partial
-from typing import Any, Optional
+from typing import Any, cast, Optional
 
 from flask_appbuilder.models.sqla import Model
 from marshmallow import ValidationError
@@ -27,9 +27,11 @@ from superset import security_manager
 from superset.commands.base import BaseCommand, UpdateMixin
 from superset.commands.dataset.exceptions import (
     DatabaseChangeValidationError,
+    DatabaseNotFoundValidationError,
     DatasetColumnNotFoundValidationError,
     DatasetColumnsDuplicateValidationError,
     DatasetColumnsExistsValidationError,
+    DatasetDataAccessIsNotAllowed,
     DatasetExistsValidationError,
     DatasetForbiddenError,
     DatasetInvalidError,
@@ -41,8 +43,9 @@ from superset.commands.dataset.exceptions import (
 )
 from superset.connectors.sqla.models import SqlaTable
 from superset.daos.dataset import DatasetDAO
-from superset.exceptions import SupersetSecurityException
-from superset.sql_parse import Table
+from superset.exceptions import SupersetParseError, SupersetSecurityException
+from superset.models.core import Database
+from superset.sql.parse import Table
 from superset.utils.decorators import on_error, transaction
 
 logger = logging.getLogger(__name__)
@@ -99,12 +102,27 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
                 self._model.database.get_default_catalog()
             )
 
+        new_db_connection: Database | None = None
+
+        if database_id and database_id != self._model.database.id:
+            if new_db_connection := DatasetDAO.get_database_by_id(database_id):
+                self._properties["database"] = new_db_connection
+            else:
+                exceptions.append(DatabaseNotFoundValidationError())
+        db = new_db_connection or self._model.database
+
         table = Table(
             self._properties.get("table_name"),  # type: ignore
             self._properties.get("schema"),
             catalog,
         )
 
+        schema = (
+            self._properties["schema"]
+            if "schema" in self._properties
+            else self._model.schema
+        )
+
         # Validate uniqueness
         if not DatasetDAO.validate_update_uniqueness(
             self._model.database,
@@ -127,6 +145,9 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
         except ValidationError as ex:
             exceptions.append(ex)
 
+        # Validate SQL access
+        self._validate_sql_access(db, catalog, schema, exceptions)
+
         # Validate columns
         if columns := self._properties.get("columns"):
             self._validate_columns(columns, exceptions)
@@ -138,6 +159,36 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
         if exceptions:
             raise DatasetInvalidError(exceptions=exceptions)
 
+    def _validate_sql_access(
+        self,
+        db: Database,
+        catalog: str | None,
+        schema: str | None,
+        exceptions: list[ValidationError],
+    ) -> None:
+        """Validate SQL query access if SQL is being updated."""
+        # we know we have a valid model
+        self._model = cast(SqlaTable, self._model)
+
+        sql = self._properties.get("sql")
+        if sql and sql != self._model.sql:
+            try:
+                security_manager.raise_for_access(
+                    database=db,
+                    sql=sql,
+                    catalog=catalog,
+                    schema=schema,
+                )
+            except SupersetSecurityException as ex:
+                
exceptions.append(DatasetDataAccessIsNotAllowed(ex.error.message))
+            except SupersetParseError as ex:
+                exceptions.append(
+                    ValidationError(
+                        f"Invalid SQL: {ex.error.message}",
+                        field_name="sql",
+                    )
+                )
+
     def _validate_columns(
         self, columns: list[dict[str, Any]], exceptions: list[ValidationError]
     ) -> None:

Reply via email to