amaliujia commented on code in PR #36641:
URL: https://github.com/apache/spark/pull/36641#discussion_r896166516
##########
sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala:
##########
@@ -681,4 +681,60 @@ class CatalogSuite extends SharedSparkSession with
AnalysisTest with BeforeAndAf
assert(spark.catalog.listTables("default").collect().map(_.name).toSet ==
Set("my_table1", "my_table2", "my_temp_table"))
}
+
+ test("three layer namespace compatibility - get table") {
+ val catalogName = "testcat"
+ val dbName = "my_db"
+ val tableName = "my_table"
+ val tableSchema = new StructType().add("i", "int")
+ val description = "this is a test table"
+
+ spark.catalog.createTable(
+ tableName = Array(catalogName, dbName, tableName).mkString("."),
+ source = classOf[FakeV2Provider].getName,
+ schema = tableSchema,
+ description = description,
+ options = Map.empty[String, String])
+
+ val t = spark.catalog.getTable(Array(catalogName, dbName,
tableName).mkString("."))
+ val expectedTable =
+ new Table(
+ tableName,
+ catalogName,
+ Array(dbName),
+ description,
+ CatalogTableType.MANAGED.name,
+ false)
+ assert(expectedTable.toString == t.toString)
+ }
+
+ test("three layer namespace compatibility - table exists") {
+ val catalogName = "testcat"
+ val dbName = "my_db"
+ val tableName = "my_table"
+ val tableSchema = new StructType().add("i", "int")
+
+ assert(!spark.catalog.tableExists(Array(catalogName, dbName,
tableName).mkString(".")))
+
+ spark.catalog.createTable(
+ tableName = Array(catalogName, dbName, tableName).mkString("."),
+ source = classOf[FakeV2Provider].getName,
+ schema = tableSchema,
+ description = "",
+ options = Map.empty[String, String])
+
+ assert(spark.catalog.tableExists(Array(catalogName, dbName,
tableName).mkString(".")))
+ }
+
+ test("three layer namespace compatibility - database exists") {
+ val catalogName = "testcat"
+ val dbName = "my_db"
+ assert(!spark.catalog.databaseExists(Array(catalogName,
dbName).mkString(".")))
+
+ val e = intercept[CatalogNotFoundException] {
+ val catalogName2 = "catalog_not_exists"
+ spark.catalog.databaseExists(Array(catalogName2, dbName).mkString("."))
+ }
+ assert(e.getMessage.contains("catalog_not_exists is not defined"))
Review Comment:
done
##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -250,8 +251,14 @@ class CatalogImpl(sparkSession: SparkSession) extends
Catalog {
* table/view. This throws an `AnalysisException` when no `Table` can be
found.
*/
override def getTable(tableName: String): Table = {
- val tableIdent =
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
- getTable(tableIdent.database.orNull, tableIdent.table)
+ try {
+ val ident =
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
Review Comment:
done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]