imback82 commented on a change in pull request #25247: [SPARK-28319][SQL]
Implement SHOW TABLES for Data Source V2 Tables
URL: https://github.com/apache/spark/pull/25247#discussion_r315983843
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
##########
@@ -1703,6 +1707,113 @@ class DataSourceV2SQLSuite extends QueryTest with
SharedSparkSession with Before
}
}
+ test("ShowTables: using v2 catalog") {
+ spark.sql("CREATE TABLE testcat.db.table_name (id bigint, data string)
USING foo")
+ spark.sql("CREATE TABLE testcat.n1.n2.db.table_name (id bigint, data
string) USING foo")
+
+ runShowTablesSql("SHOW TABLES FROM testcat.db", Seq(Row("db",
"table_name")))
+
+ runShowTablesSql(
+ "SHOW TABLES FROM testcat.n1.n2.db",
+ Seq(Row("n1.n2.db", "table_name")))
+ }
+
+ test("ShowTables: using v2 catalog with a pattern") {
+ spark.sql("CREATE TABLE testcat.db.table (id bigint, data string) USING
foo")
+ spark.sql("CREATE TABLE testcat.db.table_name_1 (id bigint, data string)
USING foo")
+ spark.sql("CREATE TABLE testcat.db.table_name_2 (id bigint, data string)
USING foo")
+ spark.sql("CREATE TABLE testcat.db2.table_name_2 (id bigint, data string)
USING foo")
+
+ runShowTablesSql(
+ "SHOW TABLES FROM testcat.db",
+ Seq(
+ Row("db", "table"),
+ Row("db", "table_name_1"),
+ Row("db", "table_name_2")))
+
+ runShowTablesSql(
+ "SHOW TABLES FROM testcat.db LIKE '*name*'",
+ Seq(Row("db", "table_name_1"), Row("db", "table_name_2")))
+
+ runShowTablesSql(
+ "SHOW TABLES FROM testcat.db LIKE '*2'",
+ Seq(Row("db", "table_name_2")))
+ }
+
+ test("ShowTables: using v2 catalog, namespace doesn't exist") {
+ runShowTablesSql("SHOW TABLES FROM testcat.unknown", Seq())
+ }
+
+ test("ShowTables: using v1 catalog") {
+ runShowTablesSql(
+ "SHOW TABLES FROM default",
+ Seq(Row("", "source", true), Row("", "source2", true)),
+ expectV2Catalog = false)
+ }
+
+ test("ShowTables: using v1 catalog, db doesn't exist ") {
+ // 'db' below resolves to a database name for v1 catalog because there is
no catalog named
+ // 'db' and there is no default catalog set.
+ val exception = intercept[NoSuchDatabaseException] {
+ runShowTablesSql("SHOW TABLES FROM db", Seq(), expectV2Catalog = false)
+ }
+
+ assert(exception.getMessage.contains("Database 'db' not found"))
+ }
+
+ test("ShowTables: using v1 catalog, db name with multipartIdentifier ('a.b')
is not allowed.") {
+ val exception = intercept[AnalysisException] {
+ runShowTablesSql("SHOW TABLES FROM a.b", Seq(), expectV2Catalog = false)
+ }
+
+ assert(exception.getMessage.contains("The database name is not valid:
a.b"))
+ }
+
+ test("ShowTables: using v2 catalog with empty namespace") {
+ spark.sql("CREATE TABLE testcat.table (id bigint, data string) USING foo")
+ runShowTablesSql("SHOW TABLES FROM testcat", Seq(Row("", "table")))
+ }
+
+ test("ShowTables: namespace is not specified and default v2 catalog is set")
{
+ spark.conf.set("spark.sql.default.catalog", "testcat")
Review comment:
This test is currently failing and will be updated based on the resolution
of https://github.com/apache/spark/pull/25368/files#r315980292.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]