This is an automated email from the ASF dual-hosted git repository.
johnbodley pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git
The following commit(s) were added to refs/heads/master by this push:
new f488a32 [schema] Updating the base metric schema (#5453)
f488a32 is described below
commit f488a3281d3b4c73d53cb4b2b7c9af5ba2ab3fc5
Author: John Bodley <[email protected]>
AuthorDate: Wed Apr 24 21:50:14 2019 -0700
[schema] Updating the base metric schema (#5453)
---
UPDATING.md | 5 +
superset/connectors/base/models.py | 2 +-
superset/connectors/druid/models.py | 7 +-
superset/connectors/sqla/models.py | 2 +-
.../versions/e9df189e5c7e_update_base_metrics.py | 169 +++++++++++++++++++++
tests/dict_import_export_tests.py | 6 +-
tests/import_export_tests.py | 4 +-
7 files changed, 184 insertions(+), 11 deletions(-)
diff --git a/UPDATING.md b/UPDATING.md
index bdbdef3..f8d64ab 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -32,6 +32,11 @@ which adds missing non-nullable fields and uniqueness
constraints to the
`columns`and `table_columns` tables. Depending on the integrity of the data,
manual intervention may be required.
+* [5453](https://github.com/apache/incubator-superset/pull/5453): a change
+which adds missing non-nullable fields and uniqueness constraints to the
metrics
+and sql_metrics tables. Depending on the integrity of the data, manual
+intervention may be required.
+
## Superset 0.32.0
* `npm run backend-sync` is deprecated and no longer needed, will fail if
called
diff --git a/superset/connectors/base/models.py
b/superset/connectors/base/models.py
index cd8bd17..57db0ad 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -411,7 +411,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
__tablename__ = None # {connector_name}_metric
id = Column(Integer, primary_key=True)
- metric_name = Column(String(512))
+ metric_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
metric_type = Column(String(32))
description = Column(Text)
diff --git a/superset/connectors/druid/models.py
b/superset/connectors/druid/models.py
index a0d7ce3..43a092c 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -341,15 +341,14 @@ class DruidMetric(Model, BaseMetric):
__tablename__ = 'metrics'
__table_args__ = (UniqueConstraint('metric_name', 'datasource_id'),)
- datasource_id = Column(
- Integer,
- ForeignKey('datasources.id'))
+ datasource_id = Column(Integer, ForeignKey('datasources.id'))
+
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
- json = Column(Text)
+ json = Column(Text, nullable=False)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_id',
diff --git a/superset/connectors/sqla/models.py
b/superset/connectors/sqla/models.py
index 81cd1d3..f178db4 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -209,7 +209,7 @@ class SqlMetric(Model, BaseMetric):
'SqlaTable',
backref=backref('metrics', cascade='all, delete-orphan'),
foreign_keys=[table_id])
- expression = Column(Text)
+ expression = Column(Text, nullable=False)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'table_id', 'expression',
diff --git a/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py
b/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py
new file mode 100644
index 0000000..07a6c4d
--- /dev/null
+++ b/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py
@@ -0,0 +1,169 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""update base metrics
+
+Note that the metrics table was previously partially modifed by revision
+f231d82b9b26.
+
+Revision ID: e9df189e5c7e
+Revises: 7f2635b51f5d
+Create Date: 2018-07-20 15:57:48.118304
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'e9df189e5c7e'
+down_revision = '7f2635b51f5d'
+
+from alembic import op
+from sqlalchemy import Column, engine, Integer, String, Text
+from sqlalchemy.ext.declarative import declarative_base
+
+from superset import db
+from superset.utils.core import generic_find_uq_constraint_name
+
+Base = declarative_base()
+
+conv = {
+ 'uq': 'uq_%(table_name)s_%(column_0_name)s',
+}
+
+
+class BaseMetricMixin:
+ id = Column(Integer, primary_key=True)
+
+
+class DruidMetric(BaseMetricMixin, Base):
+ __tablename__ = 'metrics'
+
+ datasource_id = Column(Integer)
+
+
+class SqlMetric(BaseMetricMixin, Base):
+ __tablename__ = 'sql_metrics'
+
+ table_id = Column(Integer)
+
+
+def upgrade():
+ bind = op.get_bind()
+ session = db.Session(bind=bind)
+
+ # Delete the orphaned metrics records.
+ for record in session.query(DruidMetric).all():
+ if record.datasource_id is None:
+ session.delete(record)
+
+ session.commit()
+
+ # Enforce that metrics.metric_name column be non-nullable.
+ with op.batch_alter_table('metrics') as batch_op:
+ batch_op.alter_column(
+ 'metric_name',
+ existing_type=String(255),
+ nullable=False,
+ )
+
+ # Enforce that metrics.json column be non-nullable.
+ with op.batch_alter_table('metrics') as batch_op:
+ batch_op.alter_column(
+ 'json',
+ existing_type=Text,
+ nullable=False,
+ )
+
+ # Delete the orphaned sql_metrics records.
+ for record in session.query(SqlMetric).all():
+ if record.table_id is None:
+ session.delete(record)
+
+ session.commit()
+
+ # Reduce the size of the sql_metrics.metric_name column for constraint
+ # viability and enforce that it to be non-nullable.
+ with op.batch_alter_table('sql_metrics') as batch_op:
+ batch_op.alter_column(
+ 'metric_name',
+ existing_type=String(512),
+ nullable=False,
+ type_=String(255),
+ )
+
+ # Enforce that sql_metrics.expression column be non-nullable.
+ with op.batch_alter_table('sql_metrics') as batch_op:
+ batch_op.alter_column(
+ 'expression',
+ existing_type=Text,
+ nullable=False,
+ )
+
+ # Add the missing uniqueness constraint to the sql_metrics table.
+ with op.batch_alter_table('sql_metrics', naming_convention=conv) as
batch_op:
+ batch_op.create_unique_constraint(
+ 'uq_sql_metrics_metric_name',
+ ['metric_name', 'table_id'],
+ )
+
+
+def downgrade():
+ bind = op.get_bind()
+ insp = engine.reflection.Inspector.from_engine(bind)
+
+ # Remove the missing uniqueness constraint from the sql_metrics table.
+ with op.batch_alter_table('sql_metrics', naming_convention=conv) as
batch_op:
+ batch_op.drop_constraint(
+ generic_find_uq_constraint_name(
+ 'sql_metrics',
+ {'metric_name', 'table_id'},
+ insp,
+ ) or 'uq_sql_metrics_table_id',
+ type_='unique',
+ )
+
+ # Restore the size of the sql_metrics.metric_name column and forego that it
+ # be non-nullable.
+ with op.batch_alter_table('sql_metrics') as batch_op:
+ batch_op.alter_column(
+ 'metric_name',
+ existing_type=String(255),
+ nullable=True,
+ type_=String(512),
+ )
+
+ # Forego that the sql_metrics.expression column be non-nullable.
+ with op.batch_alter_table('sql_metrics') as batch_op:
+ batch_op.alter_column(
+ 'expression',
+ existing_type=Text,
+ nullable=True,
+ )
+
+ # Forego that the metrics.metric_name column be non-nullable.
+ with op.batch_alter_table('metrics') as batch_op:
+ batch_op.alter_column(
+ 'metric_name',
+ existing_type=String(255),
+ nullable=True,
+ )
+
+ # Forego that the metrics.json column be non-nullable.
+ with op.batch_alter_table('metrics') as batch_op:
+ batch_op.alter_column(
+ 'json',
+ existing_type=Text,
+ nullable=True,
+ )
diff --git a/tests/dict_import_export_tests.py
b/tests/dict_import_export_tests.py
index 50cb0f7..f1f93fa 100644
--- a/tests/dict_import_export_tests.py
+++ b/tests/dict_import_export_tests.py
@@ -72,7 +72,7 @@ class DictImportExportTests(SupersetTestCase):
'params': json.dumps(params),
'columns': [{'column_name': c}
for c in cols_names],
- 'metrics': [{'metric_name': c} for c in metric_names],
+ 'metrics': [{'metric_name': c, 'expression': ''} for c in
metric_names],
}
table = SqlaTable(
@@ -84,7 +84,7 @@ class DictImportExportTests(SupersetTestCase):
for col_name in cols_names:
table.columns.append(TableColumn(column_name=col_name))
for metric_name in metric_names:
- table.metrics.append(SqlMetric(metric_name=metric_name))
+ table.metrics.append(SqlMetric(metric_name=metric_name,
expression=''))
return table, dict_rep
def create_druid_datasource(
@@ -98,7 +98,7 @@ class DictImportExportTests(SupersetTestCase):
'id': id,
'params': json.dumps(params),
'columns': [{'column_name': c} for c in cols_names],
- 'metrics': [{'metric_name': c} for c in metric_names],
+ 'metrics': [{'metric_name': c, 'json': '{}'} for c in
metric_names],
}
datasource = DruidDatasource(
diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py
index ad1aa90..f6d0442 100644
--- a/tests/import_export_tests.py
+++ b/tests/import_export_tests.py
@@ -113,7 +113,7 @@ class ImportExportTests(SupersetTestCase):
table.columns.append(
TableColumn(column_name=col_name))
for metric_name in metric_names:
- table.metrics.append(SqlMetric(metric_name=metric_name))
+ table.metrics.append(SqlMetric(metric_name=metric_name,
expression=''))
return table
def create_druid_datasource(
@@ -130,7 +130,7 @@ class ImportExportTests(SupersetTestCase):
DruidColumn(column_name=col_name))
for metric_name in metric_names:
datasource.metrics.append(DruidMetric(
- metric_name=metric_name))
+ metric_name=metric_name, json='{}'))
return datasource
def get_slice(self, slc_id):