Github user wangyum commented on a diff in the pull request: https://github.com/apache/spark/pull/22263#discussion_r225762035 --- Diff: sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala --- @@ -288,6 +297,65 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext } } + test("SQL interface support storageLevel(DISK_ONLY)") { --- End diff -- How about this: ```scala Seq("LAZY", "").foreach { isLazy => Seq(true, false).foreach { withInvalidOptions => Seq(true, false).foreach { withCacheTempView => Map("DISK_ONLY" -> Disk, "MEMORY_ONLY" -> Memory).foreach { case (storageLevel, dataReadMethod) => val testName = s"SQL interface support option: storageLevel: $storageLevel, " + s"isLazy: ${isLazy.equals("LAZY")}, " + s"withInvalidOptions: $withInvalidOptions, withCacheTempView: $withCacheTempView" val cacheOption = if (withInvalidOptions) { s"OPTIONS('storageLevel' '$storageLevel', 'a' '1', 'b' '2')" } else { s"OPTIONS('storageLevel' '$storageLevel')" } test(testName) { if (withCacheTempView) { withTempView("testSelect") { sql(s"CACHE $isLazy TABLE testSelect $cacheOption SELECT * FROM testData") assertCached(spark.table("testSelect")) val rddId = rddIdOf("testSelect") if (isLazy.equals("LAZY")) { sql("SELECT COUNT(*) FROM testSelect").collect() } assert(isExpectStorageLevel(rddId, dataReadMethod)) } } else { sql(s"CACHE $isLazy TABLE testData $cacheOption") assertCached(spark.table("testData")) val rddId = rddIdOf("testData") if (isLazy.equals("LAZY")) { sql("SELECT COUNT(*) FROM testData").collect() } assert(isExpectStorageLevel(rddId, dataReadMethod)) } } } } } } ```
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org