This is an automated email from the ASF dual-hosted git repository. beto pushed a commit to branch folder-api in repository https://gitbox.apache.org/repos/asf/superset.git
commit f4980746ea96b5cf6f0c768c22c58ba3b980ca16 Author: Beto Dealmeida <[email protected]> AuthorDate: Mon Mar 3 15:48:57 2025 -0500 feat: dataset folders (backend) --- superset/connectors/sqla/models.py | 15 ++++++-- superset/datasets/api.py | 28 +++++++++------ superset/datasets/schemas.py | 22 ++++++++++++ ...25-03-03_20-52_94e7a3499973_add_folder_table.py | 42 ++++++++++++++++++++++ 4 files changed, 93 insertions(+), 14 deletions(-) diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 6478fdf075..44353bdef2 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -69,6 +69,7 @@ from sqlalchemy.sql import column, ColumnElement, literal_column, table from sqlalchemy.sql.elements import ColumnClause, TextClause from sqlalchemy.sql.expression import Label from sqlalchemy.sql.selectable import Alias, TableClause +from sqlalchemy.types import JSON from superset import app, db, is_feature_enabled, security_manager from superset.commands.dataset.exceptions import DatasetNotFoundError @@ -170,7 +171,9 @@ class DatasourceKind(StrEnum): PHYSICAL = "physical" -class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=too-many-public-methods +class BaseDatasource( + AuditMixinNullable, ImportExportMixin +): # pylint: disable=too-many-public-methods """A common interface to objects that are queryable (tables and datasources)""" @@ -400,6 +403,7 @@ class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable= # one to many "columns": [o.data for o in self.columns], "metrics": [o.data for o in self.metrics], + "folders": self.folders or [], # TODO deprecate, move logic to JS "order_by_choices": self.order_by_choices, "owners": [owner.id for owner in self.owners], @@ -1018,6 +1022,7 @@ class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Mod "filterable", "groupby", "id", + "uuid", "is_certified", "is_dttm", "python_date_format", @@ -1065,7 +1070,7 @@ class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model "extra", "warning_text", ] - update_from_object_fields = list(s for s in export_fields if s != "table_id") # noqa: C400 + update_from_object_fields = [s for s in export_fields if s != "table_id"] export_parent = "table" def __repr__(self) -> str: @@ -1117,6 +1122,7 @@ class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model "description", "expression", "id", + "uuid", "is_certified", "metric_name", "warning_markdown", @@ -1193,6 +1199,7 @@ class SqlaTable( extra = Column(Text) normalize_columns = Column(Boolean, default=False) always_filter_main_dttm = Column(Boolean, default=False) + folders = Column(JSON, nullable=True) baselink = "tablemodelview" @@ -1945,7 +1952,9 @@ class SqlaTable( def default_query(qry: Query) -> Query: return qry.filter_by(is_sqllab_view=False) - def has_extra_cache_key_calls(self, query_obj: QueryObjectDict) -> bool: # noqa: C901 + def has_extra_cache_key_calls( + self, query_obj: QueryObjectDict + ) -> bool: # noqa: C901 """ Detects the presence of calls to `ExtraCache` methods in items in query_obj that can be templated. If any are present, the query must be evaluated to extract diff --git a/superset/datasets/api.py b/superset/datasets/api.py index b41f6395bd..cbdd8a5c9c 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -193,8 +193,10 @@ class DatasetRestApi(BaseSupersetModelRestApi): "metrics.id", "metrics.metric_name", "metrics.metric_type", + "metrics.uuid", "metrics.verbose_name", "metrics.warning_text", + "folders", "datasource_type", "url", "extra", @@ -620,9 +622,11 @@ class DatasetRestApi(BaseSupersetModelRestApi): return self.response(201, id=new_model.id, result=item) except DatasetInvalidError as ex: return self.response_422( - message=ex.normalized_messages() - if isinstance(ex, ValidationError) - else str(ex) + message=( + ex.normalized_messages() + if isinstance(ex, ValidationError) + else str(ex) + ) ) except DatasetCreateFailedError as ex: logger.error( @@ -1175,14 +1179,16 @@ class DatasetRestApi(BaseSupersetModelRestApi): def render_item_list(item_list: list[dict[str, Any]]) -> list[dict[str, Any]]: return [ - { - **item, - "rendered_expression": processor.process_template( - item["expression"] - ), - } - if item.get("expression") - else item + ( + { + **item, + "rendered_expression": processor.process_template( + item["expression"] + ), + } + if item.get("expression") + else item + ) for item in item_list ] diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 1d271d3dae..531e6da58e 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -88,6 +88,26 @@ class DatasetMetricsPutSchema(Schema): uuid = fields.UUID(allow_none=True) +class FolderContentSchema(Schema): + """ + Schema for creating/updating dataset folders. + + Only the UUID is required; other fields are here just to match the schema of the + response payload. + """ + + type = fields.String(required=False) + name = fields.String(required=False) + uuid = fields.UUID() + + +class FolderSchema(Schema): + uuid = fields.UUID() + name = fields.String(required=True, validate=Length(1, 250)) + description = fields.String(allow_none=True, validate=Length(0, 1000)) + contents = fields.List(fields.Nested(FolderContentSchema)) + + class DatasetPostSchema(Schema): database = fields.Integer(required=True) catalog = fields.String(allow_none=True, validate=Length(0, 250)) @@ -99,6 +119,7 @@ class DatasetPostSchema(Schema): external_url = fields.String(allow_none=True) normalize_columns = fields.Boolean(load_default=False) always_filter_main_dttm = fields.Boolean(load_default=False) + folders = fields.List(fields.Nested(FolderContentSchema), required=False) class DatasetPutSchema(Schema): @@ -121,6 +142,7 @@ class DatasetPutSchema(Schema): owners = fields.List(fields.Integer()) columns = fields.List(fields.Nested(DatasetColumnsPutSchema)) metrics = fields.List(fields.Nested(DatasetMetricsPutSchema)) + folders = fields.List(fields.Nested(FolderContentSchema), required=False) extra = fields.String(allow_none=True) is_managed_externally = fields.Boolean(allow_none=True, dump_default=False) external_url = fields.String(allow_none=True) diff --git a/superset/migrations/versions/2025-03-03_20-52_94e7a3499973_add_folder_table.py b/superset/migrations/versions/2025-03-03_20-52_94e7a3499973_add_folder_table.py new file mode 100644 index 0000000000..e95e3bbac1 --- /dev/null +++ b/superset/migrations/versions/2025-03-03_20-52_94e7a3499973_add_folder_table.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Add folder table + +Revision ID: 94e7a3499973 +Revises: 74ad1125881c +Create Date: 2025-03-03 20:52:24.585143 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.types import JSON + +# revision identifiers, used by Alembic. +revision = "94e7a3499973" +down_revision = "74ad1125881c" + + +def upgrade(): + op.add_column( + "tables", + sa.Column("folders", JSON, nullable=True), + ) + + +def downgrade(): + op.drop_column("tables", "folders")
