cloud-fan commented on a change in pull request #25077: [SPARK-28301][SQL] fix 
the behavior of table name resolution with multi-catalog
URL: https://github.com/apache/spark/pull/25077#discussion_r301080170
 
 

 ##########
 File path: 
sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala
 ##########
 @@ -21,70 +21,96 @@ import scala.collection.JavaConverters._
 
 import org.scalatest.BeforeAndAfter
 
-import org.apache.spark.sql.{AnalysisException, QueryTest}
-import org.apache.spark.sql.catalog.v2.Identifier
+import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
+import org.apache.spark.sql.catalog.v2.{CatalogNotFoundException, Identifier}
+import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{NoSuchTableException, 
TableAlreadyExistsException}
-import org.apache.spark.sql.execution.datasources.v2.orc.OrcDataSourceV2
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSQLContext
 import org.apache.spark.sql.types.{LongType, StringType, StructType}
 
 class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with 
BeforeAndAfter {
-
-  import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
-
-  private val orc2 = classOf[OrcDataSourceV2].getName
-
   before {
     spark.conf.set("spark.sql.catalog.testcat", 
classOf[TestInMemoryTableCatalog].getName)
     spark.conf.set("spark.sql.catalog.testcat2", 
classOf[TestInMemoryTableCatalog].getName)
-    spark.conf.set("spark.sql.default.catalog", "testcat")
 
     val df = spark.createDataFrame(Seq((1L, "a"), (2L, "b"), (3L, 
"c"))).toDF("id", "data")
     df.createOrReplaceTempView("source")
     val df2 = spark.createDataFrame(Seq((4L, "d"), (5L, "e"), (6L, 
"f"))).toDF("id", "data")
     df2.createOrReplaceTempView("source2")
   }
 
+  private def getTestCatalog() = {
+    spark.sessionState.catalog.catalogManager.getCatalog("testcat")
+      .asInstanceOf[TestInMemoryTableCatalog]
+  }
+
   after {
-    
spark.catalog("testcat").asInstanceOf[TestInMemoryTableCatalog].clearTables()
+    getTestCatalog().clearTables()
     spark.sql("DROP TABLE source")
     spark.sql("DROP TABLE source2")
   }
 
-  test("CreateTable: use v2 plan because catalog is set") {
-    spark.sql("CREATE TABLE testcat.table_name (id bigint, data string) USING 
foo")
-
-    val testCatalog = spark.catalog("testcat").asTableCatalog
-    val table = testCatalog.loadTable(Identifier.of(Array(), "table_name"))
+  test("CreateTable: basic") {
 
 Review comment:
   This test well demonstrates the expected behavior after this fix.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to