This is an automated email from the ASF dual-hosted git repository.
bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 0c987e96f [KYUUBI #5225] [KSHC] Unify the exception handling of v1 and
v2 during dropDatabase
0c987e96f is described below
commit 0c987e96fad325030affe2b2f2aaa5a27766e292
Author: yikaifei <[email protected]>
AuthorDate: Fri Sep 1 12:17:33 2023 +0800
[KYUUBI #5225] [KSHC] Unify the exception handling of v1 and v2 during
dropDatabase
### _Why are the changes needed?_
This PR aims to unify the exception handling of v1 and v2 during
dropDatabase
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
No
Closes #5225 from Yikf/hive-connector.
Closes #5225
3be33af76 [yikaifei] [KSHC] Improve test
Authored-by: yikaifei <[email protected]>
Signed-off-by: Bowen Liang <[email protected]>
---
.../kyuubi/spark/connector/hive/HiveTableCatalog.scala | 3 ---
.../kyuubi/spark/connector/hive/KyuubiHiveTest.scala | 2 +-
.../connector/hive/command/CreateNamespaceSuite.scala | 4 +---
.../connector/hive/command/DropNamespaceSuite.scala | 16 ++++++++++------
.../spark/connector/hive/command/ShowTablesSuite.scala | 4 +---
5 files changed, 13 insertions(+), 16 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
index 75804eb63..c128d67f1 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
@@ -382,9 +382,6 @@ class HiveTableCatalog(sparkSession: SparkSession)
withSQLConf(LEGACY_NON_IDENTIFIER_OUTPUT_CATALOG_NAME -> "true") {
namespace match {
case Array(db) if catalog.databaseExists(db) =>
- if (catalog.listTables(db).nonEmpty && !cascade) {
- throw new IllegalStateException(s"Namespace ${namespace.quoted} is
not empty")
- }
catalog.dropDatabase(db, ignoreIfNotExists = false, cascade)
true
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/KyuubiHiveTest.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/KyuubiHiveTest.scala
index 400afdb3e..851659b15 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/KyuubiHiveTest.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/KyuubiHiveTest.scala
@@ -44,7 +44,7 @@ abstract class KyuubiHiveTest extends QueryTest with Logging {
SupportsNamespaces.PROP_LOCATION,
SupportsNamespaces.PROP_OWNER)
- protected def catalogName: String = "hive"
+ protected val catalogName: String = "hive"
override def beforeEach(): Unit = {
super.beforeAll()
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
index e2e5b574b..d6b90cc04 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
@@ -133,8 +133,6 @@ trait CreateNamespaceSuiteBase extends DDLCommandTestUtils {
class CreateNamespaceV2Suite extends CreateNamespaceSuiteBase {
- override protected def catalogName: String = super.catalogName
-
override protected def catalogVersion: String = "Hive V2"
override protected def commandVersion: String = V2_COMMAND_VERSION
@@ -144,7 +142,7 @@ class CreateNamespaceV1Suite extends
CreateNamespaceSuiteBase {
val SESSION_CATALOG_NAME: String = "spark_catalog"
- override protected def catalogName: String = SESSION_CATALOG_NAME
+ override protected val catalogName: String = SESSION_CATALOG_NAME
override protected def catalogVersion: String = "V1"
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
index 81107c24f..eebfbe488 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
@@ -20,7 +20,9 @@ package org.apache.kyuubi.spark.connector.hive.command
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.types.{StringType, StructType}
+import
org.apache.kyuubi.spark.connector.common.SparkUtils.SPARK_RUNTIME_VERSION
import
org.apache.kyuubi.spark.connector.hive.command.DDLCommandTestUtils.{V1_COMMAND_VERSION,
V2_COMMAND_VERSION}
+import org.apache.kyuubi.util.AssertionUtils.interceptContains
trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
override protected def command: String = "DROP NAMESPACE"
@@ -70,10 +72,14 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
checkNamespace(Seq(namespace) ++ builtinNamespace)
// $catalog.ns.table is present, thus $catalog.ns cannot be dropped.
- val e = intercept[IllegalStateException] {
+ interceptContains[AnalysisException] {
sql(s"DROP NAMESPACE $catalogName.$namespace")
- }
- assert(e.getMessage.contains(s"Namespace $namespace is not empty"))
+ }(if (SPARK_RUNTIME_VERSION >= "3.4") {
+ s"[SCHEMA_NOT_EMPTY] Cannot drop a schema `$namespace` because it
contains objects"
+ } else {
+ "Use CASCADE option to drop a non-empty database"
+ })
+
sql(s"DROP TABLE $catalogName.$namespace.table")
// Now that $catalog.ns is empty, it can be dropped.
@@ -100,8 +106,6 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
class DropNamespaceV2Suite extends DropNamespaceSuiteBase {
- override protected def catalogName: String = super.catalogName
-
override protected def catalogVersion: String = "Hive V2"
override protected def commandVersion: String = V2_COMMAND_VERSION
@@ -111,7 +115,7 @@ class DropNamespaceV1Suite extends DropNamespaceSuiteBase {
val SESSION_CATALOG_NAME: String = "spark_catalog"
- override protected def catalogName: String = SESSION_CATALOG_NAME
+ override protected val catalogName: String = SESSION_CATALOG_NAME
override protected def catalogVersion: String = "V1"
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/ShowTablesSuite.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/ShowTablesSuite.scala
index bff47c9de..445ca9fa7 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/ShowTablesSuite.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/ShowTablesSuite.scala
@@ -96,8 +96,6 @@ trait ShowTablesSuiteBase extends DDLCommandTestUtils {
class ShowTablesV2Suite extends ShowTablesSuiteBase {
- override protected def catalogName: String = super.catalogName
-
override protected def catalogVersion: String = "Hive V2"
override protected def commandVersion: String = V2_COMMAND_VERSION
@@ -107,7 +105,7 @@ class ShowTablesV1Suite extends ShowTablesSuiteBase {
val SESSION_CATALOG_NAME: String = "spark_catalog"
- override protected def catalogName: String = SESSION_CATALOG_NAME
+ override protected val catalogName: String = SESSION_CATALOG_NAME
override protected def catalogVersion: String = "V1"