Repository: spark Updated Branches: refs/heads/master 214adb14b -> 0bd86c0fe
[SPARK-15011][SQL] Re-enable 'analyze MetastoreRelations' in hive StatisticsSuite ## What changes were proposed in this pull request? This test re-enables the `analyze MetastoreRelations` in `org.apache.spark.sql.hive.StatisticsSuite`. The flakiness of this test was traced back to a shared configuration option, `hive.exec.compress.output`, in `TestHive`. This property was set to `true` by the `HiveCompatibilitySuite`. I have added configuration resetting logic to `HiveComparisonTest`, in order to prevent such a thing from happening again. ## How was this patch tested? Is a test. Author: Herman van Hovell <[email protected]> Author: Herman van Hovell <[email protected]> Closes #13498 from hvanhovell/SPARK-15011. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0bd86c0f Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0bd86c0f Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0bd86c0f Branch: refs/heads/master Commit: 0bd86c0fe4ebf05d817632a8665a02b379fa3cae Parents: 214adb1 Author: Herman van Hovell <[email protected]> Authored: Tue Jun 14 18:24:59 2016 -0700 Committer: Reynold Xin <[email protected]> Committed: Tue Jun 14 18:24:59 2016 -0700 ---------------------------------------------------------------------- .../scala/org/apache/spark/sql/hive/StatisticsSuite.scala | 9 ++++----- .../spark/sql/hive/execution/HiveComparisonTest.scala | 6 ++++++ 2 files changed, 10 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/0bd86c0f/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala index 666a8da..a5975cf 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala @@ -21,7 +21,7 @@ import java.io.{File, PrintWriter} import scala.reflect.ClassTag -import org.apache.spark.sql.{QueryTest, Row} +import org.apache.spark.sql.{AnalysisException, QueryTest, Row} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.command.AnalyzeTableCommand import org.apache.spark.sql.execution.joins._ @@ -115,7 +115,7 @@ class StatisticsSuite extends QueryTest with TestHiveSingleton with SQLTestUtils } } - ignore("analyze MetastoreRelations") { + test("analyze MetastoreRelations") { def queryTotalSize(tableName: String): BigInt = spark.sessionState.catalog.lookupRelation(TableIdentifier(tableName)).statistics.sizeInBytes @@ -155,14 +155,13 @@ class StatisticsSuite extends QueryTest with TestHiveSingleton with SQLTestUtils sql("ANALYZE TABLE analyzeTable_part COMPUTE STATISTICS noscan") - // This seems to be flaky. - // assert(queryTotalSize("analyzeTable_part") === BigInt(17436)) + assert(queryTotalSize("analyzeTable_part") === BigInt(17436)) sql("DROP TABLE analyzeTable_part").collect() // Try to analyze a temp table sql("""SELECT * FROM src""").createOrReplaceTempView("tempTable") - intercept[UnsupportedOperationException] { + intercept[AnalysisException] { sql("ANALYZE TABLE tempTable COMPUTE STATISTICS") } spark.sessionState.catalog.dropTable( http://git-wip-us.apache.org/repos/asf/spark/blob/0bd86c0f/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala index a765214..a846711 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.execution import java.io._ import java.nio.charset.StandardCharsets +import java.util import scala.util.control.NonFatal @@ -497,6 +498,8 @@ abstract class HiveComparisonTest } } + val savedSettings = new util.HashMap[String, String] + savedSettings.putAll(TestHive.conf.settings) try { try { if (tryWithoutResettingFirst && canSpeculativelyTryWithoutReset) { @@ -515,6 +518,9 @@ abstract class HiveComparisonTest } } catch { case tf: org.scalatest.exceptions.TestFailedException => throw tf + } finally { + TestHive.conf.settings.clear() + TestHive.conf.settings.putAll(savedSettings) } } } --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
