maropu commented on a change in pull request #28852:
URL: https://github.com/apache/spark/pull/28852#discussion_r445238601
##########
File path:
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
##########
@@ -126,4 +129,39 @@ class HiveMetadataCacheSuite extends QueryTest with
SQLTestUtils with TestHiveSi
for (pruningEnabled <- Seq(true, false)) {
testCaching(pruningEnabled)
}
+
+ test("cache TTL") {
+ val sparkConfWithTTl = new SparkConf().set(SQLConf.METADATA_CACHE_TTL.key,
"1")
+ val newSession =
SparkSession.builder.config(sparkConfWithTTl).getOrCreate().cloneSession()
+
+ withSparkSession(newSession) { implicit spark =>
+ withTable("test_ttl") {
+ withTempDir { dir =>
+ spark.sql(s"""
+ |create external table test_ttl (id long)
+ |partitioned by (f1 int, f2 int)
+ |stored as parquet
+ |location "${dir.toURI}"""".stripMargin)
+
+ val tableIdentifier = TableIdentifier("test_ttl", Some("default"))
+
+ // First, make sure the test table is not cached.
+ assert(getCachedDataSourceTable(tableIdentifier) === null)
+ // This query will make the table cached.
+ spark.sql("select * from test_ttl")
+ assert(getCachedDataSourceTable(tableIdentifier) !== null)
+ // Wait until the cache expiration.
+ Thread.sleep(1500L) // 1.5 seconds > 1 second.
+ // And the cache is gone.
+ assert(getCachedDataSourceTable(tableIdentifier) === null)
+ }
+ }
+ }
+ }
+
+ private def getCachedDataSourceTable(table: TableIdentifier)
+ (implicit spark: SparkSession):
LogicalPlan = {
Review comment:
nit format:
```
private def getCachedDataSourceTable(table: TableIdentifier)
(implicit spark: SparkSession): LogicalPlan = {
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]