This is an automated email from the ASF dual-hosted git repository.

beto pushed a commit to branch db-diagnostics
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/db-diagnostics by this push:
     new 9b48d445fa WIP
9b48d445fa is described below

commit 9b48d445faafde21c77eac9dc8b7dc64834794ac
Author: Beto Dealmeida <[email protected]>
AuthorDate: Tue Jul 25 15:57:12 2023 -0700

    WIP
---
 superset/db_engine_specs/gsheets.py    |   7 +-
 superset/db_engine_specs/lib.py        |  57 +++++++++-----
 superset/db_engine_specs/shillelagh.py |  18 +++++
 superset/db_engine_specs/sqlite.py     | 131 ++++++++++++++++++++++++++++++++-
 4 files changed, 189 insertions(+), 24 deletions(-)

diff --git a/superset/db_engine_specs/gsheets.py 
b/superset/db_engine_specs/gsheets.py
index abf5bac48f..777499a8f9 100644
--- a/superset/db_engine_specs/gsheets.py
+++ b/superset/db_engine_specs/gsheets.py
@@ -32,7 +32,7 @@ from typing_extensions import TypedDict
 from superset import security_manager
 from superset.constants import PASSWORD_MASK
 from superset.databases.schemas import encrypted_field_properties, 
EncryptedString
-from superset.db_engine_specs.sqlite import SqliteEngineSpec
+from superset.db_engine_specs.shillelagh import ShillelaghEngineSpec
 from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
 
 if TYPE_CHECKING:
@@ -65,14 +65,13 @@ class GSheetsPropertiesType(TypedDict):
     catalog: dict[str, str]
 
 
-class GSheetsEngineSpec(SqliteEngineSpec):
+class GSheetsEngineSpec(ShillelaghEngineSpec):
     """Engine for Google spreadsheets"""
 
-    engine = "gsheets"
     engine_name = "Google Sheets"
+    engine = "gsheets"
     allows_joins = True
     allows_subqueries = True
-    disable_ssh_tunneling = True
 
     parameters_schema = GSheetsParametersSchema()
     default_driver = "apsw"
diff --git a/superset/db_engine_specs/lib.py b/superset/db_engine_specs/lib.py
index d786f63f49..48a5694f96 100644
--- a/superset/db_engine_specs/lib.py
+++ b/superset/db_engine_specs/lib.py
@@ -15,6 +15,8 @@
 # specific language governing permissions and limitations
 # under the License.
 
+from __future__ import annotations
+
 from typing import Any, List, Tuple, Type
 
 from superset.constants import TimeGrain
@@ -22,10 +24,26 @@ from superset.db_engine_specs import load_engine_specs
 from superset.db_engine_specs.base import BaseEngineSpec
 
 
+def has_custom_method(spec: Type[BaseEngineSpec], method: str) -> bool:
+    """
+    Check if a class has a custom implementation of a method.
+
+    Since some classes don't inherit directly from ``BaseEngineSpec`` we need
+    to check the attributes of the spec and the base class.
+    """
+    return bool(
+        getattr(spec, method, False)
+        and getattr(BaseEngineSpec, method, False)
+        and getattr(spec, method).__qualname__
+        != getattr(BaseEngineSpec, method).__qualname__
+    )
+
+
 def diagnose(spec: Type[BaseEngineSpec]) -> dict[str, Any]:
     """
     Run basic diagnostics on a given DB engine spec.
     """
+    # pylint: disable=import-outside-toplevel
     from superset.sql_validators.postgres import PostgreSQLValidator
     from superset.sql_validators.presto_db import PrestoDBSQLValidator
 
@@ -34,14 +52,12 @@ def diagnose(spec: Type[BaseEngineSpec]) -> dict[str, Any]:
         "postgresql": PostgreSQLValidator,
     }
 
-    output = {}
+    output: dict[str, Any] = {}
 
     output["time_grains"] = {}
-    supported_time_grain_expressions = spec.get_time_grain_expressions()
+    supported_time_grains = spec.get_time_grain_expressions()
     for time_grain in TimeGrain:
-        output["time_grains"][time_grain.name] = (
-            time_grain in supported_time_grain_expressions
-        )
+        output["time_grains"][time_grain.name] = time_grain in 
supported_time_grains
 
     output.update(
         {
@@ -61,33 +77,36 @@ def diagnose(spec: Type[BaseEngineSpec]) -> dict[str, Any]:
             "max_column_name": spec.max_column_name_length,
             "sql_comments": spec.allows_sql_comments,
             "escaped_colons": spec.allows_escaped_colons,
-            "masked_encrypted_extra": "mask_encrypted_extra" in spec.__dict__,
+            "masked_encrypted_extra": has_custom_method(spec, 
"mask_encrypted_extra"),
             "column_type_mapping": bool(spec.column_type_mappings),
-            "function_names": "get_function_names" in spec.__dict__,
+            "function_names": has_custom_method(spec, "get_function_names"),
             # there are multiple ways of implementing user impersonation
             "user_impersonation": (
-                "update_impersonation_config" in spec.__dict__
-                or "get_url_for_impersonation" in spec.__dict__
+                has_custom_method(spec, "update_impersonation_config")
+                or has_custom_method(spec, "get_url_for_impersonation")
             ),
             "file_upload": spec.supports_file_upload,
-            "extra_table_metadata": "extra_table_metadata" in spec.__dict__,
-            "dbapi_exception_mapping": "get_dbapi_exception_mappin" in 
spec.__dict__,
+            "extra_table_metadata": has_custom_method(spec, 
"extra_table_metadata"),
+            "dbapi_exception_mapping": has_custom_method(
+                spec, "get_dbapi_exception_mappin"
+            ),
             "custom_errors": (
-                "extract_errors" in spec.__dict__ or "custom_errors" in 
spec.__dict__
+                has_custom_method(spec, "extract_errors")
+                or has_custom_method(spec, "custom_errors")
             ),
             "dynamic_schema": spec.supports_dynamic_schema,
             "catalog": spec.supports_catalog,
             "dynamic_catalog": spec.supports_dynamic_catalog,
             "ssh_tunneling": not spec.disable_ssh_tunneling,
             "query_cancelation": (
-                "cancel_query" in spec.__dict__
-                or "has_implicit_cancel" in spec.__dict__
+                has_custom_method(spec, "cancel_query")
+                or has_custom_method(spec, "has_implicit_cancel")
             ),
-            "get_metrics": "get_metrics" in spec.__dict__,
-            "where_latest_partition": "where_latest_partition" in 
spec.__dict__,
-            "expand_data": "expand_data" in spec.__dict__,
-            "query_cost_estimation": "estimate_query_cost" in spec.__dict__
-            or "estimate_statement_cost" in spec.__dict__,
+            "get_metrics": has_custom_method(spec, "get_metrics"),
+            "where_latest_partition": has_custom_method(spec, 
"where_latest_partition"),
+            "expand_data": has_custom_method(spec, "expand_data"),
+            "query_cost_estimation": has_custom_method(spec, 
"estimate_query_cost")
+            or has_custom_method(spec, "estimate_statement_cost"),
             # SQL validation is implemented in external classes
             "sql_validation": spec.engine in sql_validators,
         },
diff --git a/superset/db_engine_specs/shillelagh.py 
b/superset/db_engine_specs/shillelagh.py
index 3730122448..61820824b0 100644
--- a/superset/db_engine_specs/shillelagh.py
+++ b/superset/db_engine_specs/shillelagh.py
@@ -14,8 +14,15 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 from superset.db_engine_specs.sqlite import SqliteEngineSpec
 
+if TYPE_CHECKING:
+    from superset.models.core import Database
+
 
 class ShillelaghEngineSpec(SqliteEngineSpec):
     """Engine for shillelagh"""
@@ -28,3 +35,14 @@ class ShillelaghEngineSpec(SqliteEngineSpec):
 
     allows_joins = True
     allows_subqueries = True
+
+    @classmethod
+    def get_function_names(
+        cls,
+        database: Database,
+    ) -> list[str]:
+        return super().get_function_names(database) + [
+            "sleep",
+            "version",
+            "get_metadata",
+        ]
diff --git a/superset/db_engine_specs/sqlite.py 
b/superset/db_engine_specs/sqlite.py
index 49b9cdb2e3..e7f053da21 100644
--- a/superset/db_engine_specs/sqlite.py
+++ b/superset/db_engine_specs/sqlite.py
@@ -14,6 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
+from __future__ import annotations
+
 import re
 from datetime import datetime
 from re import Pattern
@@ -39,6 +42,8 @@ class SqliteEngineSpec(BaseEngineSpec):
     engine = "sqlite"
     engine_name = "SQLite"
 
+    disable_ssh_tunneling = True
+
     _time_grain_expressions = {
         None: "{col}",
         TimeGrain.SECOND: "DATETIME(STRFTIME('%Y-%m-%dT%H:%M:%S', {col}))",
@@ -121,7 +126,131 @@ class SqliteEngineSpec(BaseEngineSpec):
 
     @classmethod
     def get_table_names(
-        cls, database: "Database", inspector: Inspector, schema: Optional[str]
+        cls, database: Database, inspector: Inspector, schema: Optional[str]
     ) -> set[str]:
         """Need to disregard the schema for Sqlite"""
         return set(inspector.get_table_names())
+
+    @classmethod
+    def get_function_names(
+        cls,
+        database: Database,
+    ) -> list[str]:
+        """
+        Return function names.
+        """
+        return [
+            "abs",
+            "acos",
+            "acosh",
+            "asin",
+            "asinh",
+            "atan",
+            "atan2",
+            "atanh",
+            "avg",
+            "ceil",
+            "ceiling",
+            "changes",
+            "char",
+            "coalesce",
+            "cos",
+            "cosh",
+            "count",
+            "cume_dist",
+            "date",
+            "datetime",
+            "degrees",
+            "dense_rank",
+            "exp",
+            "first_value",
+            "floor",
+            "format",
+            "glob",
+            "group_concat",
+            "hex",
+            "ifnull",
+            "iif",
+            "instr",
+            "json",
+            "json_array",
+            "json_array_length",
+            "json_each",
+            "json_error_position",
+            "json_extract",
+            "json_group_array",
+            "json_group_object",
+            "json_insert",
+            "json_object",
+            "json_patch",
+            "json_quote",
+            "json_remove",
+            "json_replace",
+            "json_set",
+            "json_tree",
+            "json_type",
+            "json_valid",
+            "julianday",
+            "lag",
+            "last_insert_rowid",
+            "last_value",
+            "lead",
+            "length",
+            "like",
+            "likelihood",
+            "likely",
+            "ln",
+            "load_extension",
+            "log",
+            "log10",
+            "log2",
+            "lower",
+            "ltrim",
+            "max",
+            "min",
+            "mod",
+            "nth_value",
+            "ntile",
+            "nullif",
+            "percent_rank",
+            "pi",
+            "pow",
+            "power",
+            "printf",
+            "quote",
+            "radians",
+            "random",
+            "randomblob",
+            "rank",
+            "replace",
+            "round",
+            "row_number",
+            "rtrim",
+            "sign",
+            "sin",
+            "sinh",
+            "soundex",
+            "sqlite_compileoption_get",
+            "sqlite_compileoption_used",
+            "sqlite_offset",
+            "sqlite_source_id",
+            "sqlite_version",
+            "sqrt",
+            "strftime",
+            "substr",
+            "substring",
+            "sum",
+            "tan",
+            "tanh",
+            "time",
+            "total_changes",
+            "trim",
+            "trunc",
+            "typeof",
+            "unhex",
+            "unicode",
+            "unixepoch",
+            "unlikely",
+            "upper",
+            "zeroblob",
+        ]

Reply via email to