Github user wangyum commented on a diff in the pull request:
https://github.com/apache/spark/pull/22721#discussion_r225092331
--- Diff:
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
---
@@ -2370,4 +2370,26 @@ class HiveDDLSuite
))
}
}
+
+ test("Refresh table after insert into table") {
+ withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") {
+ Seq("t1", "t2").foreach { tableName =>
+ withTable(tableName) {
+ if (tableName.equals("t1")) {
+ sql(s"CREATE TABLE $tableName (a INT) STORED AS parquet")
+ } else {
+ sql(s"CREATE TABLE $tableName (a INT) USING parquet")
+ }
+
+ sql(s"INSERT INTO TABLE $tableName VALUES (1)")
+
+ val catalog = spark.sessionState.catalog
+ val qualifiedTableName =
QualifiedTableName(catalog.getCurrentDatabase, tableName)
+ val cachedRelation = catalog.getCachedTable(qualifiedTableName)
+ // cachedRelation should be null after refresh table.
+ assert(cachedRelation === null)
--- End diff --
cachedRelation should be null after refresh table:
https://github.com/apache/spark/blob/01c3dfab158d40653f8ce5d96f57220297545d5b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala#L791
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]