This is an automated email from the ASF dual-hosted git repository.
amitmiran pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new c520eb7 refactor: sqllab: move sqllab ralated enumns and utils to
more logical place (#16843)
c520eb7 is described below
commit c520eb79b03e533bc189fad4786b09b9435520dd
Author: ofekisr <[email protected]>
AuthorDate: Sun Sep 26 21:15:57 2021 +0300
refactor: sqllab: move sqllab ralated enumns and utils to more logical
place (#16843)
* refactor move QueryStatus to common
* refactor move apply_display_max_row_limit to sqllab package
* refactor move limiting_factor to sqllab package
* fix pylint issues
---
superset/common/db_query_status.py | 30 ++++++++++++++++++
superset/common/query_actions.py | 2 +-
superset/common/query_context.py | 2 +-
superset/common/utils.py | 3 +-
superset/connectors/sqla/models.py | 9 +++---
superset/db_engine_specs/base.py | 2 --
superset/db_engine_specs/druid.py | 2 +-
superset/db_engine_specs/hive.py | 2 +-
superset/db_engine_specs/presto.py | 2 +-
superset/models/helpers.py | 2 +-
superset/models/sql_lab.py | 10 +-----
superset/sql_lab.py | 11 +++----
superset/sqllab/command.py | 11 ++++---
superset/sqllab/limiting_factor.py | 25 +++++++++++++++
superset/sqllab/utils.py | 47 ++++++++++++++++++++++++++++
superset/utils/sqllab_execution_context.py | 1 -
superset/views/core.py | 9 +++---
superset/views/utils.py | 3 +-
superset/viz.py | 17 +++++-----
tests/integration_tests/cache_tests.py | 2 +-
tests/integration_tests/celery_tests.py | 2 +-
tests/integration_tests/core_tests.py | 17 +++++-----
tests/integration_tests/model_tests.py | 3 +-
tests/integration_tests/queries/api_tests.py | 3 +-
tests/integration_tests/sqllab_tests.py | 3 +-
25 files changed, 160 insertions(+), 60 deletions(-)
diff --git a/superset/common/db_query_status.py
b/superset/common/db_query_status.py
new file mode 100644
index 0000000..82bb437
--- /dev/null
+++ b/superset/common/db_query_status.py
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from enum import Enum
+
+
+class QueryStatus(str, Enum):
+ """Enum-type class for query statuses"""
+
+ STOPPED: str = "stopped"
+ FAILED: str = "failed"
+ PENDING: str = "pending"
+ RUNNING: str = "running"
+ SCHEDULED: str = "scheduled"
+ SUCCESS: str = "success"
+ FETCHING: str = "fetching"
+ TIMED_OUT: str = "timed_out"
diff --git a/superset/common/query_actions.py b/superset/common/query_actions.py
index d0058bd..86a687f 100644
--- a/superset/common/query_actions.py
+++ b/superset/common/query_actions.py
@@ -20,6 +20,7 @@ from typing import Any, Callable, cast, Dict, List, Optional,
TYPE_CHECKING
from flask_babel import _
from superset import app
+from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseDatasource
from superset.exceptions import QueryObjectValidationError
from superset.utils.core import (
@@ -28,7 +29,6 @@ from superset.utils.core import (
extract_dataframe_dtypes,
ExtraFiltersReasonType,
get_time_filter_status,
- QueryStatus,
)
if TYPE_CHECKING:
diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index 17ba7c4..9f1f4bf 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -29,6 +29,7 @@ from typing_extensions import TypedDict
from superset import app, db, is_feature_enabled
from superset.annotation_layers.dao import AnnotationLayerDAO
from superset.charts.dao import ChartDAO
+from superset.common.db_query_status import QueryStatus
from superset.common.query_actions import get_query_results
from superset.common.query_object import QueryObject
from superset.common.utils import QueryCacheManager
@@ -49,7 +50,6 @@ from superset.utils.core import (
get_column_names_from_metrics,
get_metric_names,
normalize_dttm_col,
- QueryStatus,
TIME_COMPARISION,
)
from superset.utils.date_parser import get_past_or_future, normalize_time_delta
diff --git a/superset/common/utils.py b/superset/common/utils.py
index ab83b84..77a6bab 100644
--- a/superset/common/utils.py
+++ b/superset/common/utils.py
@@ -21,13 +21,14 @@ from flask_caching import Cache
from pandas import DataFrame
from superset import app
+from superset.common.db_query_status import QueryStatus
from superset.constants import CacheRegion
from superset.exceptions import CacheLoadError
from superset.extensions import cache_manager
from superset.models.helpers import QueryResult
from superset.stats_logger import BaseStatsLogger
from superset.utils.cache import set_and_log_cache
-from superset.utils.core import error_msg_from_exception, get_stacktrace,
QueryStatus
+from superset.utils.core import error_msg_from_exception, get_stacktrace
config = app.config
stats_logger: BaseStatsLogger = config["STATS_LOGGER"]
diff --git a/superset/connectors/sqla/models.py
b/superset/connectors/sqla/models.py
index c87b4c2..7cebb8b 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -70,6 +70,7 @@ from sqlalchemy.sql.expression import Label, Select,
TextAsFrom, TextClause
from sqlalchemy.sql.selectable import Alias, TableClause
from superset import app, db, is_feature_enabled, security_manager
+from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseColumn, BaseDatasource,
BaseMetric
from superset.connectors.sqla.utils import (
get_physical_table_metadata,
@@ -151,12 +152,12 @@ class AnnotationDatasource(BaseDatasource):
qry = qry.filter(Annotation.start_dttm >= query_obj["from_dttm"])
if query_obj["to_dttm"]:
qry = qry.filter(Annotation.end_dttm <= query_obj["to_dttm"])
- status = utils.QueryStatus.SUCCESS
+ status = QueryStatus.SUCCESS
try:
df = pd.read_sql_query(qry.statement, db.engine)
except Exception as ex: # pylint: disable=broad-except
df = pd.DataFrame()
- status = utils.QueryStatus.FAILED
+ status = QueryStatus.FAILED
logger.exception(ex)
error_message = utils.error_msg_from_exception(ex)
return QueryResult(
@@ -1444,7 +1445,7 @@ class SqlaTable(Model, BaseDatasource): # pylint:
disable=too-many-public-metho
qry_start_dttm = datetime.now()
query_str_ext = self.get_query_str_extended(query_obj)
sql = query_str_ext.sql
- status = utils.QueryStatus.SUCCESS
+ status = QueryStatus.SUCCESS
errors = None
error_message = None
@@ -1477,7 +1478,7 @@ class SqlaTable(Model, BaseDatasource): # pylint:
disable=too-many-public-metho
df = self.database.get_df(sql, self.schema,
mutator=assign_column_label)
except Exception as ex: # pylint: disable=broad-except
df = pd.DataFrame()
- status = utils.QueryStatus.FAILED
+ status = QueryStatus.FAILED
logger.warning(
"Query %s on schema %s failed", sql, self.schema, exc_info=True
)
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 5b3790a..74b451e 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -82,8 +82,6 @@ class TimeGrain(NamedTuple):
duration: Optional[str]
-QueryStatus = utils.QueryStatus
-
builtin_time_grains: Dict[Optional[str], str] = {
None: __("Original value"),
"PT1S": __("Second"),
diff --git a/superset/db_engine_specs/druid.py
b/superset/db_engine_specs/druid.py
index 0230a8c..58545d4 100644
--- a/superset/db_engine_specs/druid.py
+++ b/superset/db_engine_specs/druid.py
@@ -31,7 +31,7 @@ if TYPE_CHECKING:
logger = logging.getLogger()
-class DruidEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
+class DruidEngineSpec(BaseEngineSpec):
"""Engine spec for Druid.io"""
engine = "druid"
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index e33012e..b6c3bff 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -35,6 +35,7 @@ from sqlalchemy.engine.url import make_url, URL
from sqlalchemy.orm import Session
from sqlalchemy.sql.expression import ColumnClause, Select
+from superset.common.db_query_status import QueryStatus
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.presto import PrestoEngineSpec
from superset.exceptions import SupersetException
@@ -48,7 +49,6 @@ if TYPE_CHECKING:
from superset.models.core import Database
-QueryStatus = utils.QueryStatus
logger = logging.getLogger(__name__)
diff --git a/superset/db_engine_specs/presto.py
b/superset/db_engine_specs/presto.py
index 783dde7..9be7304 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -52,6 +52,7 @@ from sqlalchemy.sql.expression import ColumnClause, Select
from sqlalchemy.types import TypeEngine
from superset import cache_manager, is_feature_enabled
+from superset.common.db_query_status import QueryStatus
from superset.db_engine_specs.base import BaseEngineSpec
from superset.errors import SupersetErrorType
from superset.exceptions import SupersetTemplateException
@@ -95,7 +96,6 @@ CONNECTION_UNKNOWN_DATABASE_ERROR = re.compile(
)
-QueryStatus = utils.QueryStatus
logger = logging.getLogger(__name__)
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 580c906..30d5ab9 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -38,7 +38,7 @@ from sqlalchemy.orm import Mapper, Session
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy_utils import UUIDType
-from superset.utils.core import QueryStatus
+from superset.common.db_query_status import QueryStatus
logger = logging.getLogger(__name__)
diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py
index 17c72db..ef0f34e 100644
--- a/superset/models/sql_lab.py
+++ b/superset/models/sql_lab.py
@@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
"""A collection of ORM sqlalchemy models for SQL Lab"""
-import enum
import re
from datetime import datetime
from typing import Any, Dict, List
@@ -48,17 +47,10 @@ from superset.models.helpers import (
)
from superset.models.tags import QueryUpdater
from superset.sql_parse import CtasMethod, ParsedQuery, Table
+from superset.sqllab.limiting_factor import LimitingFactor
from superset.utils.core import QueryStatus, user_label
-class LimitingFactor(str, enum.Enum):
- QUERY = "QUERY"
- DROPDOWN = "DROPDOWN"
- QUERY_AND_DROPDOWN = "QUERY_AND_DROPDOWN"
- NOT_LIMITED = "NOT_LIMITED"
- UNKNOWN = "UNKNOWN"
-
-
class Query(Model, ExtraJSONMixin):
"""ORM model for SQL query
diff --git a/superset/sql_lab.py b/superset/sql_lab.py
index ec0b5d0..f5c9e42 100644
--- a/superset/sql_lab.py
+++ b/superset/sql_lab.py
@@ -32,21 +32,18 @@ from flask_babel import gettext as __
from sqlalchemy.orm import Session
from superset import app, results_backend, results_backend_use_msgpack,
security_manager
+from superset.common.db_query_status import QueryStatus
from superset.dataframe import df_to_records
from superset.db_engine_specs import BaseEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetErrorException, SupersetErrorsException
from superset.extensions import celery_app
-from superset.models.sql_lab import LimitingFactor, Query
+from superset.models.sql_lab import Query
from superset.result_set import SupersetResultSet
from superset.sql_parse import CtasMethod, ParsedQuery
+from superset.sqllab.limiting_factor import LimitingFactor
from superset.utils.celery import session_scope
-from superset.utils.core import (
- json_iso_dttm_ser,
- QuerySource,
- QueryStatus,
- zlib_compress,
-)
+from superset.utils.core import json_iso_dttm_ser, QuerySource, zlib_compress
from superset.utils.dates import now_as_float
from superset.utils.decorators import stats_timing
diff --git a/superset/sqllab/command.py b/superset/sqllab/command.py
index c984de8..ea4fb45 100644
--- a/superset/sqllab/command.py
+++ b/superset/sqllab/command.py
@@ -31,6 +31,7 @@ from sqlalchemy.orm.session import Session
from superset import app, db, is_feature_enabled, sql_lab
from superset.commands.base import BaseCommand
+from superset.common.db_query_status import QueryStatus
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
SupersetErrorException,
@@ -43,16 +44,16 @@ from superset.exceptions import (
)
from superset.jinja_context import BaseTemplateProcessor,
get_template_processor
from superset.models.core import Database
-from superset.models.sql_lab import LimitingFactor, Query
+from superset.models.sql_lab import Query
from superset.queries.dao import QueryDAO
from superset.sqllab.command_status import SqlJsonExecutionStatus
+from superset.sqllab.limiting_factor import LimitingFactor
+from superset.sqllab.utils import
apply_display_max_row_configuration_if_require
from superset.utils import core as utils
from superset.utils.dates import now_as_float
from superset.utils.sqllab_execution_context import SqlJsonExecutionContext
-from superset.views.utils import apply_display_max_row_limit
config = app.config
-QueryStatus = utils.QueryStatus
logger = logging.getLogger(__name__)
PARAMETER_MISSING_ERR = (
@@ -397,7 +398,9 @@ class ExecuteSqlCommand(BaseCommand):
) -> str:
display_max_row = config["DISPLAY_MAX_ROW"]
return json.dumps(
- apply_display_max_row_limit(execution_result, display_max_row),
+ apply_display_max_row_configuration_if_require(
+ execution_result, display_max_row
+ ),
default=utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
encoding=None,
diff --git a/superset/sqllab/limiting_factor.py
b/superset/sqllab/limiting_factor.py
new file mode 100644
index 0000000..46cbc9b
--- /dev/null
+++ b/superset/sqllab/limiting_factor.py
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import enum
+
+
+class LimitingFactor(str, enum.Enum):
+ QUERY = "QUERY"
+ DROPDOWN = "DROPDOWN"
+ QUERY_AND_DROPDOWN = "QUERY_AND_DROPDOWN"
+ NOT_LIMITED = "NOT_LIMITED"
+ UNKNOWN = "UNKNOWN"
diff --git a/superset/sqllab/utils.py b/superset/sqllab/utils.py
new file mode 100644
index 0000000..8181b5b
--- /dev/null
+++ b/superset/sqllab/utils.py
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Dict
+
+from superset.common.db_query_status import QueryStatus
+
+
+def apply_display_max_row_configuration_if_require( # pylint:
disable=invalid-name
+ sql_results: Dict[str, Any], max_rows_in_result: int
+) -> Dict[str, Any]:
+ """
+ Given a `sql_results` nested structure, applies a limit to the number of
rows
+
+ `sql_results` here is the nested structure coming out of
sql_lab.get_sql_results, it
+ contains metadata about the query, as well as the data set returned by the
query.
+ This method limits the number of rows adds a `displayLimitReached: True`
flag to the
+ metadata.
+
+ :param max_rows_in_result:
+ :param sql_results: The results of a sql query from sql_lab.get_sql_results
+ :returns: The mutated sql_results structure
+ """
+
+ def is_require_to_apply() -> bool:
+ return (
+ sql_results["status"] == QueryStatus.SUCCESS
+ and sql_results["query"]["rows"] > max_rows_in_result
+ )
+
+ if is_require_to_apply():
+ sql_results["data"] = sql_results["data"][:max_rows_in_result]
+ sql_results["displayLimitReached"] = True
+ return sql_results
diff --git a/superset/utils/sqllab_execution_context.py
b/superset/utils/sqllab_execution_context.py
index 09ae33d..c8cc344 100644
--- a/superset/utils/sqllab_execution_context.py
+++ b/superset/utils/sqllab_execution_context.py
@@ -34,7 +34,6 @@ from superset.views.utils import get_cta_schema_name
if TYPE_CHECKING:
from superset.connectors.sqla.models import Database
-QueryStatus = utils.QueryStatus
logger = logging.getLogger(__name__)
SqlResults = Dict[str, Any]
diff --git a/superset/views/core.py b/superset/views/core.py
index ba5495d..7bdd77b 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -60,6 +60,7 @@ from superset import (
viz,
)
from superset.charts.dao import ChartDAO
+from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import (
@@ -92,13 +93,15 @@ from superset.models.core import Database, FavStar, Log
from superset.models.dashboard import Dashboard
from superset.models.datasource_access_request import DatasourceAccessRequest
from superset.models.slice import Slice
-from superset.models.sql_lab import LimitingFactor, Query, TabState
+from superset.models.sql_lab import Query, TabState
from superset.models.user_attributes import UserAttribute
from superset.security.analytics_db_safety import check_sqlalchemy_uri
from superset.sql_parse import ParsedQuery, Table
from superset.sql_validators import get_validator_by_name
from superset.sqllab.command import CommandResult, ExecuteSqlCommand
from superset.sqllab.command_status import SqlJsonExecutionStatus
+from superset.sqllab.limiting_factor import LimitingFactor
+from superset.sqllab.utils import
apply_display_max_row_configuration_if_require
from superset.tasks.async_queries import load_explore_json_into_cache
from superset.typing import FlaskResponse
from superset.utils import core as utils, csv
@@ -127,7 +130,6 @@ from superset.views.base import (
)
from superset.views.utils import (
_deserialize_results_payload,
- apply_display_max_row_limit,
bootstrap_user_data,
check_datasource_perms,
check_explore_cache_perms,
@@ -145,7 +147,6 @@ config = app.config
SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT =
config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"]
stats_logger = config["STATS_LOGGER"]
DAR = DatasourceAccessRequest
-QueryStatus = utils.QueryStatus
logger = logging.getLogger(__name__)
DATABASE_KEYS = [
@@ -2314,7 +2315,7 @@ class Superset(BaseSupersetView): # pylint:
disable=too-many-public-methods
status=400,
) from ex
- obj = apply_display_max_row_limit(obj, rows)
+ obj = apply_display_max_row_configuration_if_require(obj, rows)
return json_success(
json.dumps(
diff --git a/superset/views/utils.py b/superset/views/utils.py
index 37d8361..035f332 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -32,6 +32,7 @@ from sqlalchemy.orm.exc import NoResultFound
import superset.models.core as models
from superset import app, dataframe, db, result_set, viz
+from superset.common.db_query_status import QueryStatus
from superset.connectors.connector_registry import ConnectorRegistry
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
@@ -47,7 +48,7 @@ from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import Query
from superset.typing import FormData
-from superset.utils.core import QueryStatus, TimeRangeEndpoint
+from superset.utils.core import TimeRangeEndpoint
from superset.utils.decorators import stats_timing
from superset.viz import BaseViz
diff --git a/superset/viz.py b/superset/viz.py
index 3ab2e2b..5e22114 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -54,6 +54,7 @@ from geopy.point import Point
from pandas.tseries.frequencies import to_offset
from superset import app, is_feature_enabled
+from superset.common.db_query_status import QueryStatus
from superset.constants import NULL_STRING
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
@@ -443,14 +444,14 @@ class BaseViz: # pylint: disable=too-many-public-methods
except SupersetSecurityException as ex:
error = dataclasses.asdict(ex.error)
self.errors.append(error)
- self.status = utils.QueryStatus.FAILED
+ self.status = QueryStatus.FAILED
payload = self.get_df_payload(query_obj)
# if payload does not have a df, we are raising an error here.
df = cast(Optional[pd.DataFrame], payload["df"])
- if self.status != utils.QueryStatus.FAILED:
+ if self.status != QueryStatus.FAILED:
payload["data"] = self.get_data(df)
if "df" in payload:
del payload["df"]
@@ -503,7 +504,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
try:
df = cache_value["df"]
self.query = cache_value["query"]
- self.status = utils.QueryStatus.SUCCESS
+ self.status = QueryStatus.SUCCESS
is_loaded = True
stats_logger.incr("loaded_from_cache")
except Exception as ex: # pylint: disable=broad-except
@@ -540,7 +541,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
)
)
df = self.get_df(query_obj)
- if self.status != utils.QueryStatus.FAILED:
+ if self.status != QueryStatus.FAILED:
stats_logger.incr("loaded_from_source")
if not self.force:
stats_logger.incr("loaded_from_source_without_force")
@@ -554,7 +555,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
)
)
self.errors.append(error)
- self.status = utils.QueryStatus.FAILED
+ self.status = QueryStatus.FAILED
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
@@ -566,10 +567,10 @@ class BaseViz: # pylint: disable=too-many-public-methods
)
)
self.errors.append(error)
- self.status = utils.QueryStatus.FAILED
+ self.status = QueryStatus.FAILED
stacktrace = utils.get_stacktrace()
- if is_loaded and cache_key and self.status !=
utils.QueryStatus.FAILED:
+ if is_loaded and cache_key and self.status != QueryStatus.FAILED:
set_and_log_cache(
cache_manager.data_cache,
cache_key,
@@ -605,7 +606,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
@staticmethod
def has_error(payload: VizPayload) -> bool:
return (
- payload.get("status") == utils.QueryStatus.FAILED
+ payload.get("status") == QueryStatus.FAILED
or payload.get("error") is not None
or bool(payload.get("errors"))
)
diff --git a/tests/integration_tests/cache_tests.py
b/tests/integration_tests/cache_tests.py
index 9f0e6d9..b600ab5 100644
--- a/tests/integration_tests/cache_tests.py
+++ b/tests/integration_tests/cache_tests.py
@@ -20,8 +20,8 @@ import json
import pytest
from superset import app, db
+from superset.common.db_query_status import QueryStatus
from superset.extensions import cache_manager
-from superset.utils.core import QueryStatus
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
)
diff --git a/tests/integration_tests/celery_tests.py
b/tests/integration_tests/celery_tests.py
index eb55c7c..f4224d2 100644
--- a/tests/integration_tests/celery_tests.py
+++ b/tests/integration_tests/celery_tests.py
@@ -36,11 +36,11 @@ from tests.integration_tests.base_tests import login
from tests.integration_tests.conftest import CTAS_SCHEMA_NAME
from tests.integration_tests.test_app import app
from superset import db, sql_lab
+from superset.common.db_query_status import QueryStatus
from superset.result_set import SupersetResultSet
from superset.db_engine_specs.base import BaseEngineSpec
from superset.errors import ErrorLevel, SupersetErrorType
from superset.extensions import celery_app
-from superset.models.helpers import QueryStatus
from superset.models.sql_lab import Query
from superset.sql_parse import ParsedQuery, CtasMethod
from superset.utils.core import get_example_database, backend
diff --git a/tests/integration_tests/core_tests.py
b/tests/integration_tests/core_tests.py
index 57955c2..f91ab45 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -53,6 +53,7 @@ from superset import (
security_manager,
sql_lab,
)
+from superset.common.db_query_status import QueryStatus
from superset.connectors.sqla.models import SqlaTable
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.mssql import MssqlEngineSpec
@@ -758,7 +759,7 @@ class TestCore(SupersetTestCase):
self.login()
sql = "SELECT '$DATE()' as test"
resp = {
- "status": utils.QueryStatus.SUCCESS,
+ "status": QueryStatus.SUCCESS,
"query": {"rows": 1},
"data": [{"test": "'1970-01-01'"}],
}
@@ -1214,7 +1215,7 @@ class TestCore(SupersetTestCase):
data = [{"col_0": i} for i in range(100)]
payload = {
- "status": utils.QueryStatus.SUCCESS,
+ "status": QueryStatus.SUCCESS,
"query": {"rows": 100},
"data": data,
}
@@ -1267,7 +1268,7 @@ class TestCore(SupersetTestCase):
query = {
"database_id": 1,
"sql": "SELECT * FROM birth_names LIMIT 100",
- "status": utils.QueryStatus.PENDING,
+ "status": QueryStatus.PENDING,
}
(
serialized_data,
@@ -1279,8 +1280,8 @@ class TestCore(SupersetTestCase):
)
payload = {
"query_id": 1,
- "status": utils.QueryStatus.SUCCESS,
- "state": utils.QueryStatus.SUCCESS,
+ "status": QueryStatus.SUCCESS,
+ "state": QueryStatus.SUCCESS,
"data": serialized_data,
"columns": all_columns,
"selected_columns": selected_columns,
@@ -1315,7 +1316,7 @@ class TestCore(SupersetTestCase):
query = {
"database_id": 1,
"sql": "SELECT * FROM birth_names LIMIT 100",
- "status": utils.QueryStatus.PENDING,
+ "status": QueryStatus.PENDING,
}
(
serialized_data,
@@ -1327,8 +1328,8 @@ class TestCore(SupersetTestCase):
)
payload = {
"query_id": 1,
- "status": utils.QueryStatus.SUCCESS,
- "state": utils.QueryStatus.SUCCESS,
+ "status": QueryStatus.SUCCESS,
+ "state": QueryStatus.SUCCESS,
"data": serialized_data,
"columns": all_columns,
"selected_columns": selected_columns,
diff --git a/tests/integration_tests/model_tests.py
b/tests/integration_tests/model_tests.py
index e84f918..e314a13 100644
--- a/tests/integration_tests/model_tests.py
+++ b/tests/integration_tests/model_tests.py
@@ -31,10 +31,11 @@ from sqlalchemy.types import DateTime
import tests.integration_tests.test_app
from superset import app, db as metadata_db
from superset.db_engine_specs.postgres import PostgresEngineSpec
+from superset.common.db_query_status import QueryStatus
from superset.models.core import Database
from superset.models.slice import Slice
from superset.models.sql_types.base import literal_dttm_type_factory
-from superset.utils.core import get_example_database, QueryStatus
+from superset.utils.core import get_example_database
from .base_tests import SupersetTestCase
from .fixtures.energy_dashboard import load_energy_table_with_slice
diff --git a/tests/integration_tests/queries/api_tests.py
b/tests/integration_tests/queries/api_tests.py
index e734af1..45a807b 100644
--- a/tests/integration_tests/queries/api_tests.py
+++ b/tests/integration_tests/queries/api_tests.py
@@ -27,8 +27,9 @@ from sqlalchemy.sql import func
import tests.integration_tests.test_app
from superset import db, security_manager
+from superset.common.db_query_status import QueryStatus
from superset.models.core import Database
-from superset.utils.core import get_example_database, get_main_database,
QueryStatus
+from superset.utils.core import get_example_database, get_main_database
from superset.models.sql_lab import Query
from tests.integration_tests.base_tests import SupersetTestCase
diff --git a/tests/integration_tests/sqllab_tests.py
b/tests/integration_tests/sqllab_tests.py
index 3657708..1e97514 100644
--- a/tests/integration_tests/sqllab_tests.py
+++ b/tests/integration_tests/sqllab_tests.py
@@ -35,8 +35,9 @@ from superset.db_engine_specs.presto import PrestoEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetErrorException
from superset.models.core import Database
-from superset.models.sql_lab import LimitingFactor, Query, SavedQuery
+from superset.models.sql_lab import Query, SavedQuery
from superset.result_set import SupersetResultSet
+from superset.sqllab.limiting_factor import LimitingFactor
from superset.sql_lab import (
cancel_query,
execute_sql_statements,