This is an automated email from the ASF dual-hosted git repository.

hugh pushed a commit to branch hugh/fix-500s-before-update
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 8a2bb4316530b9fd54f06883e447d8954d1bb6e6
Author: hughhhh <[email protected]>
AuthorDate: Fri Sep 24 12:40:24 2021 -0400

    update check for length
---
 superset/connectors/sqla/models.py |  2 +-
 superset/datasets/dao.py           | 25 ++++++++++++++++++++++++-
 2 files changed, 25 insertions(+), 2 deletions(-)

diff --git a/superset/connectors/sqla/models.py 
b/superset/connectors/sqla/models.py
index c87b4c2..490faa5 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -1671,7 +1671,7 @@ class SqlaTable(Model, BaseDatasource):  # pylint: 
disable=too-many-public-metho
             return None
 
         if not DatasetDAO.validate_uniqueness(
-            target.database_id, target.schema, target.table_name
+            target.database_id, target.schema, target.table_name, 
check_count=True
         ):
             raise Exception(get_dataset_exist_error_msg(target.full_name))
 
diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py
index 8aee37b..e2a8419 100644
--- a/superset/datasets/dao.py
+++ b/superset/datasets/dao.py
@@ -85,12 +85,35 @@ class DatasetDAO(BaseDAO):  # pylint: 
disable=too-many-public-methods
             return False
 
     @staticmethod
-    def validate_uniqueness(database_id: int, schema: Optional[str], name: 
str) -> bool:
+    def validate_uniqueness(
+        database_id: int,
+        schema: Optional[str],
+        name: str,
+        check_count: Optional[bool] = False,
+    ) -> bool:
         dataset_query = db.session.query(SqlaTable).filter(
             SqlaTable.table_name == name,
             SqlaTable.schema == schema,
             SqlaTable.database_id == database_id,
         )
+
+        if check_count:
+            # Make sure there is only one dataset
+            # Using this test to allow for overwrite
+            return dataset_query.count() == 1
+
+        return not db.session.query(dataset_query.exists()).scalar()
+
+    @staticmethod
+    def validate_duplicate_uniqueness(
+        database_id: int, schema: Optional[str], name: str
+    ) -> bool:
+        dataset_query = db.session.query(SqlaTable).filter(
+            SqlaTable.table_name == name,
+            SqlaTable.schema == schema,
+            SqlaTable.database_id == database_id,
+        )
+
         return not db.session.query(dataset_query.exists()).scalar()
 
     @staticmethod

Reply via email to