aminghadersohi commented on code in PR #36529:
URL: https://github.com/apache/superset/pull/36529#discussion_r2611515282


##########
superset/sql/execution/executor.py:
##########
@@ -0,0 +1,996 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+SQL Executor implementation for Database.execute() and execute_async().
+
+This module provides the SQLExecutor class that implements the query execution
+methods defined in superset_core.api.models.Database.
+
+Implementation Features
+-----------------------
+
+Query Preparation (applies to both sync and async):
+- Jinja2 template rendering (via template_params in QueryOptions)
+- SQL mutation via SQL_QUERY_MUTATOR config hook
+- DML permission checking (requires database.allow_dml=True for DML)
+- Disallowed functions checking via DISALLOWED_SQL_FUNCTIONS config
+- Row-level security (RLS) via AST transformation (always applied)
+- Result limit application via SQL_MAX_ROW config
+- Catalog/schema resolution and validation
+
+Synchronous Execution (execute):
+- Multi-statement SQL parsing and execution
+- Progress tracking via Query model
+- Result caching via cache_manager.data_cache
+- Query logging via QUERY_LOGGER config hook
+- Timeout protection via SQLLAB_TIMEOUT config
+- Dry run mode (returns transformed SQL without execution)
+
+Asynchronous Execution (execute_async):
+- Celery task submission for background execution
+- Security validation before submission
+- Query model creation with PENDING status
+- Result caching check (returns cached if available)
+- Background execution with timeout via SQLLAB_ASYNC_TIME_LIMIT_SEC
+- Results stored in results backend for retrieval
+- Handle-based progress tracking and cancellation
+
+See Database.execute() and Database.execute_async() docstrings in
+superset_core.api.models for the public API contract.
+"""
+
+from __future__ import annotations
+
+import logging
+import time
+from datetime import datetime
+from typing import Any, TYPE_CHECKING
+
+from flask import current_app as app, g, has_app_context
+
+from superset import db
+from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
+from superset.exceptions import (
+    SupersetSecurityException,
+    SupersetTimeoutException,
+)
+from superset.extensions import cache_manager
+from superset.sql.parse import SQLScript
+from superset.utils import core as utils
+
+if TYPE_CHECKING:
+    from superset_core.api.types import (
+        AsyncQueryResult,
+        QueryOptions,
+        QueryResult,
+    )
+
+    from superset.models.core import Database
+    from superset.result_set import SupersetResultSet
+
+logger = logging.getLogger(__name__)
+
+
+def execute_sql_with_cursor(
+    database: Database,
+    cursor: Any,
+    statements: list[str],
+    query: Any,
+    log_query_fn: Any | None = None,
+    check_stopped_fn: Any | None = None,
+    execute_fn: Any | None = None,
+) -> SupersetResultSet | None:
+    """
+    Execute SQL statements with a cursor and return result set.
+
+    This is the shared execution logic used by both sync (SQLExecutor) and
+    async (celery_task) execution paths. It handles multi-statement execution
+    with progress tracking via the Query model.
+
+    :param database: Database model to execute against
+    :param cursor: Database cursor to use for execution
+    :param statements: List of SQL statements to execute
+    :param query: Query model for progress tracking
+    :param log_query_fn: Optional function to log queries, called as fn(sql, 
schema)
+    :param check_stopped_fn: Optional function to check if query was stopped.
+        Should return True if stopped. Used by async execution for 
cancellation.
+    :param execute_fn: Optional custom execute function. If not provided, uses
+        database.db_engine_spec.execute(cursor, sql, database). Custom function
+        should accept (cursor, sql) and handle execution.
+    :returns: SupersetResultSet from last statement, or None if stopped
+    """
+    from superset.result_set import SupersetResultSet
+
+    total = len(statements)
+    if total == 0:
+        return None
+
+    rows = None
+    description = None
+
+    for i, statement in enumerate(statements):
+        # Check if query was stopped (async cancellation)
+        if check_stopped_fn and check_stopped_fn():
+            return None
+
+        # Apply SQL mutation
+        stmt_sql = database.mutate_sql_based_on_config(
+            statement,
+            is_split=True,
+        )
+
+        # Log query
+        if log_query_fn:
+            log_query_fn(stmt_sql, query.schema)
+
+        # Execute - use custom function or default
+        if execute_fn:
+            execute_fn(cursor, stmt_sql)
+        else:
+            database.db_engine_spec.execute(cursor, stmt_sql, database)
+
+        # Fetch results from last statement only
+        if i == total - 1:
+            description = cursor.description
+            rows = database.db_engine_spec.fetch_data(cursor)
+        else:
+            cursor.fetchall()
+
+        # Update progress on Query model
+        progress_pct = int(((i + 1) / total) * 100)
+        query.progress = progress_pct
+        query.set_extra_json_key(
+            "progress",
+            f"Running statement {i + 1} of {total}",
+        )
+        db.session.commit()
+
+    # Build result set
+    if rows is not None and description is not None:
+        return SupersetResultSet(
+            rows,
+            description,
+            database.db_engine_spec,
+        )
+
+    return None
+
+
+class SQLExecutor:
+    """
+    SQL query executor implementation.
+
+    Implements Database.execute() and execute_async() methods.
+    See superset_core.api.models.Database for the full public API 
documentation.
+    """
+
+    def __init__(self, database: Database) -> None:
+        """
+        Initialize the executor with a database.
+
+        :param database: Database model instance to execute queries against
+        """
+        self.database = database
+
+    def execute(
+        self,
+        sql: str,
+        options: QueryOptions | None = None,
+    ) -> QueryResult:
+        """
+        Execute SQL synchronously.
+
+        If options.dry_run=True, returns the transformed SQL without execution.
+        All transformations (RLS, templates, limits) are still applied.
+
+        See superset_core.api.models.Database.execute() for full documentation.
+        """
+        from superset_core.api.types import (
+            QueryOptions as QueryOptionsType,
+            QueryResult as QueryResultType,
+            QueryStatus,
+        )
+
+        opts: QueryOptionsType = options or QueryOptionsType()
+        start_time = time.time()
+
+        try:
+            # 1. Prepare SQL (always runs - includes all transformations)
+            final_sql, script, catalog, schema = self._prepare_sql(sql, opts)
+
+            # DRY RUN: Return transformed SQL without execution
+            if opts.dry_run:
+                execution_time_ms = (time.time() - start_time) * 1000
+                return QueryResultType(
+                    status=QueryStatus.SUCCESS,
+                    data=None,
+                    row_count=0,
+                    query=final_sql,  # Transformed SQL (after RLS, templates, 
limits)
+                    query_id=None,  # No Query model created
+                    execution_time_ms=execution_time_ms,
+                    is_cached=False,
+                )
+
+            # 2. Check cache
+            cached_result = self._try_get_cached_result(script, final_sql, 
opts)
+            if cached_result:
+                return cached_result
+
+            # 3. Create Query model for audit
+            query = self._create_query_record(
+                final_sql, opts, catalog, schema, status="running"
+            )
+
+            # 4. Execute with timeout
+            timeout = opts.timeout_seconds or app.config.get("SQLLAB_TIMEOUT", 
30)
+            timeout_msg = f"Query exceeded the {timeout} seconds timeout."
+
+            with utils.timeout(seconds=timeout, error_message=timeout_msg):
+                df = self._execute_statements(
+                    final_sql,
+                    script,
+                    catalog,
+                    schema,
+                    query,
+                )
+
+            execution_time_ms = (time.time() - start_time) * 1000
+
+            # 5. Update query record
+            query.status = "success"
+            query.rows = len(df)
+            query.progress = 100
+            db.session.commit()
+
+            result = QueryResultType(
+                status=QueryStatus.SUCCESS,
+                data=df,
+                row_count=len(df),
+                query=final_sql,  # Transformed SQL (after RLS, templates, 
limits)
+                query_id=query.id,
+                execution_time_ms=execution_time_ms,
+            )
+
+            # 6. Store in cache (if SELECT and caching enabled)
+            if not script.has_mutation():
+                self._store_in_cache(result, final_sql, opts)
+
+            return result
+
+        except SupersetTimeoutException:
+            return self._create_error_result(
+                QueryStatus.TIMED_OUT,
+                "Query exceeded the timeout limit",
+                sql,
+                start_time,
+            )
+        except SupersetSecurityException as ex:
+            return self._create_error_result(
+                QueryStatus.FAILED, str(ex), sql, start_time
+            )
+        except Exception as ex:
+            error_msg = self.database.db_engine_spec.extract_error_message(ex)
+            return self._create_error_result(
+                QueryStatus.FAILED, error_msg, sql, start_time
+            )
+
+    def execute_async(
+        self,
+        sql: str,
+        options: QueryOptions | None = None,
+    ) -> AsyncQueryResult:
+        """
+        Execute SQL asynchronously via Celery.
+
+        If options.dry_run=True, returns the transformed SQL as a completed
+        AsyncQueryResult without submitting to Celery.
+
+        See superset_core.api.models.Database.execute_async() for full 
documentation.
+        """
+        from superset_core.api.types import (
+            QueryOptions as QueryOptionsType,
+            QueryResult as QueryResultType,
+            QueryStatus,
+        )
+
+        opts: QueryOptionsType = options or QueryOptionsType()
+
+        # 1. Prepare SQL (always runs - includes all transformations)
+        final_sql, script, catalog, schema = self._prepare_sql(sql, opts)
+
+        # DRY RUN: Return transformed SQL as completed async result
+        if opts.dry_run:
+            dry_run_result = QueryResultType(
+                status=QueryStatus.SUCCESS,
+                data=None,
+                row_count=0,
+                query=final_sql,  # Transformed SQL (after RLS, templates, 
limits)
+                query_id=None,
+                execution_time_ms=0,
+                is_cached=False,
+            )
+            return self._create_cached_async_result(dry_run_result)
+
+        # 2. Check cache
+        if cached_result := self._try_get_cached_result(script, final_sql, 
opts):
+            return self._create_cached_async_result(cached_result)
+
+        # 3. Create Query model for audit
+        query = self._create_query_record(
+            final_sql, opts, catalog, schema, status="pending"
+        )
+
+        # 4. Submit to Celery
+        self._submit_query_to_celery(query, final_sql, opts)
+
+        # 5. Create and return handle with bound methods
+        return self._create_async_query_handle(query.id)
+
+    def _prepare_sql(
+        self,
+        sql: str,
+        opts: QueryOptions,
+    ) -> tuple[str, SQLScript, str | None, str | None]:
+        """
+        Prepare SQL for execution (no side effects).
+
+        This method performs SQL preprocessing steps without creating any
+        database records. It can be used to prepare SQL for cache checks
+        before deciding to execute.
+
+        Steps performed:
+        1. Template rendering
+        2. SQL parsing
+        3. Security checks (DML, disallowed functions)
+        4. Catalog/schema resolution
+        5. RLS application
+        6. Limit application
+
+        :param sql: Original SQL query
+        :param opts: Query options
+        :returns: Tuple of (final_sql, script, catalog, schema)
+        :raises SupersetSecurityException: If DML not allowed or disallowed 
functions
+        """
+        # 1. Render Jinja2 templates
+        rendered_sql = self._render_sql_template(sql, opts.template_params)
+
+        # 2. Parse SQL with SQLScript
+        script = SQLScript(rendered_sql, self.database.db_engine_spec.engine)
+
+        # 3. Check DML permission
+        if script.has_mutation() and not self.database.allow_dml:
+            raise SupersetSecurityException(
+                SupersetError(
+                    message="DML queries are not allowed on this database",
+                    error_type=SupersetErrorType.DML_NOT_ALLOWED_ERROR,
+                    level=ErrorLevel.ERROR,
+                )
+            )
+
+        # 4. Check disallowed functions
+        if disallowed := self._check_disallowed_functions(script):
+            raise SupersetSecurityException(
+                SupersetError(
+                    message=f"Disallowed SQL functions: {', 
'.join(disallowed)}",
+                    error_type=SupersetErrorType.INVALID_SQL_ERROR,
+                    level=ErrorLevel.ERROR,
+                )
+            )
+
+        # 5. Get catalog and schema
+        catalog = opts.catalog or self.database.get_default_catalog()
+        schema = opts.schema or self.database.get_default_schema(catalog)
+
+        # 6. Apply RLS
+        rendered_sql = self._apply_rls_to_sql(rendered_sql, catalog, schema)

Review Comment:
   The script (SQLScript) is created from pre-RLS SQL at step 2, but RLS 
transformation at step 6 can structurally modify the SQL (adding WHERE clauses, 
subqueries). This creates a disconnect - script.has_mutation() and statement 
analysis operate on different SQL than what actually executes.
    
   Consider re-parsing after RLS application, or restructuring so the script 
always reflects the final SQL that will be executed. 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to