Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/14971#discussion_r79300109
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala ---
    @@ -259,6 +260,230 @@ class StatisticsSuite extends QueryTest with 
TestHiveSingleton with SQLTestUtils
         }
       }
     
    +  private def createNonPartitionedTable(
    +      tabName: String,
    +      analyzedBySpark: Boolean = true,
    +      analyzedByHive: Boolean = true): Unit = {
    +    val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +    sql(
    +      s"""
    +         |CREATE TABLE $tabName (key STRING, value STRING)
    +         |STORED AS TEXTFILE
    +         |TBLPROPERTIES ('prop1' = 'val1', 'prop2' = 'val2')
    +       """.stripMargin)
    +    sql(s"INSERT INTO TABLE $tabName SELECT * FROM src")
    +    if (analyzedBySpark) sql(s"ANALYZE TABLE $tabName COMPUTE STATISTICS")
    +    if (analyzedByHive) hiveClient.runSqlHive(s"ANALYZE TABLE $tabName 
COMPUTE STATISTICS")
    +    val describeResult1 = hiveClient.runSqlHive(s"DESCRIBE FORMATTED 
$tabName")
    +
    +    val tableMetadata =
    +      
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName)).properties
    +    // statistics info is not contained in the metadata of the original 
table
    +    assert(Seq(StatsSetupConst.COLUMN_STATS_ACCURATE,
    +      StatsSetupConst.NUM_FILES,
    +      StatsSetupConst.NUM_PARTITIONS,
    +      StatsSetupConst.ROW_COUNT,
    +      StatsSetupConst.RAW_DATA_SIZE,
    +      StatsSetupConst.TOTAL_SIZE).forall(!tableMetadata.contains(_)))
    +
    +    if (analyzedByHive) {
    +      assert(StringUtils.filterPattern(describeResult1, 
"*numRows\\s+500*").nonEmpty)
    +    } else {
    +      assert(StringUtils.filterPattern(describeResult1, 
"*numRows\\s+500*").isEmpty)
    +    }
    +  }
    +
    +  private def extractStatsPropValues(
    +      descOutput: Seq[String],
    +      propKey: String): Option[BigInt] = {
    +    val str = descOutput
    +      .filterNot(_.contains(HiveExternalCatalog.STATISTICS_PREFIX))
    +      .filter(_.contains(propKey))
    +    if (str.isEmpty) {
    +      None
    +    } else {
    +      assert(str.length == 1, "found more than one matches")
    +      val pattern = new Regex(s"""$propKey\\s+(-?\\d+)""") // 
s"""${propKey}\s+(-?\d+)""".r
    +      val pattern(value) = str.head.trim
    +      Option(BigInt(value))
    +    }
    +  }
    +
    +  test("get statistics when not analyzed in both Hive and Spark") {
    +    val tabName = "tab1"
    +    val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +    hiveClient.runSqlHive("set hive.stats.autogather=false")
    +    withTable(tabName) {
    +      createNonPartitionedTable(tabName, analyzedByHive = false, 
analyzedBySpark = false)
    +      checkStats(
    +        tabName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = None)
    +
    +      // ALTER TABLE SET TBLPROPERTIES invalidates some contents of Hive 
specific statistics
    +      // This is triggered by the Hive alterTable API
    +      val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +      val describeResult = hiveClient.runSqlHive(s"DESCRIBE FORMATTED 
$tabName")
    +
    +      val rawDataSize = extractStatsPropValues(describeResult, 
"rawDataSize")
    +      val numRows = extractStatsPropValues(describeResult, "numRows")
    +      val totalSize = extractStatsPropValues(describeResult, "totalSize")
    +      assert(rawDataSize.isEmpty, "rawDataSize should not be shown without 
table analysis")
    +      assert(numRows.isEmpty, "numRows should not be shown without table 
analysis")
    +      assert(totalSize.isDefined && totalSize.get > 0, "totalSize is lost")
    +    }
    +  }
    +
    +  test("alter table rename after analyze table") {
    +    Seq(true, false).foreach { analyzedBySpark =>
    +      val oldName = "tab1"
    +      val newName = "tab2"
    +      withTable(oldName, newName) {
    +        createNonPartitionedTable(oldName, analyzedByHive = true, 
analyzedBySpark = analyzedBySpark)
    +        val fetchedStats1 = checkStats(
    +          oldName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        sql(s"ALTER TABLE $oldName RENAME TO $newName")
    +        val fetchedStats2 = checkStats(
    +          newName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        assert(fetchedStats1 == fetchedStats2)
    +
    +        // ALTER TABLE RENAME does not affect the contents of Hive 
specific statistics
    +        val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +        val describeResult = hiveClient.runSqlHive(s"DESCRIBE FORMATTED 
$newName")
    +
    +        val rawDataSize = extractStatsPropValues(describeResult, 
"rawDataSize")
    +        val numRows = extractStatsPropValues(describeResult, "numRows")
    +        val totalSize = extractStatsPropValues(describeResult, "totalSize")
    +        assert(rawDataSize.isDefined && rawDataSize.get > 0, "rawDataSize 
is lost")
    +        assert(numRows.isDefined && numRows.get == 500, "numRows is lost")
    +        assert(totalSize.isDefined && totalSize.get > 0, "totalSize is 
lost")
    +      }
    +    }
    +  }
    +
    +  test("alter table SET TBLPROPERTIES after analyze table") {
    +    Seq(true, false).foreach { analyzedBySpark =>
    +      val tabName = "tab1"
    +      withTable(tabName) {
    +        createNonPartitionedTable(tabName, analyzedByHive = true, 
analyzedBySpark = analyzedBySpark)
    +        val fetchedStats1 = checkStats(
    +          tabName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        sql(s"ALTER TABLE $tabName SET TBLPROPERTIES ('foo' = 'a')")
    +        val fetchedStats2 = checkStats(
    +          tabName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        assert(fetchedStats1 == fetchedStats2)
    +
    +        val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +        val describeResult = hiveClient.runSqlHive(s"DESCRIBE FORMATTED 
$tabName")
    +
    +        val totalSize = extractStatsPropValues(describeResult, "totalSize")
    +        assert(totalSize.isDefined && totalSize.get > 0, "totalSize is 
lost")
    +
    +        // ALTER TABLE SET TBLPROPERTIES invalidates some Hive specific 
statistics
    +        // This is triggered by the Hive alterTable API
    +        val numRows = extractStatsPropValues(describeResult, "numRows")
    +        assert(numRows.isDefined && numRows.get == -1, "numRows is lost")
    +        val rawDataSize = extractStatsPropValues(describeResult, 
"rawDataSize")
    +        assert(rawDataSize.isDefined && rawDataSize.get == -1, 
"rawDataSize is lost")
    +      }
    +    }
    +  }
    +
    +  test("alter table UNSET TBLPROPERTIES after analyze table") {
    +    Seq(true, false).foreach { analyzedBySpark =>
    +      val tabName = "tab1"
    +      withTable(tabName) {
    +        createNonPartitionedTable(tabName, analyzedByHive = true, 
analyzedBySpark = analyzedBySpark)
    +        val fetchedStats1 = checkStats(
    +          tabName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        sql(s"ALTER TABLE $tabName UNSET TBLPROPERTIES ('prop1')")
    +        val fetchedStats2 = checkStats(
    +          tabName, isDataSourceTable = false, hasSizeInBytes = true, 
expectedRowCounts = Some(500))
    +        assert(fetchedStats1 == fetchedStats2)
    +
    +        val hiveClient = 
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
    +        val describeResult = hiveClient.runSqlHive(s"DESCRIBE FORMATTED 
$tabName")
    +
    +        val totalSize = extractStatsPropValues(describeResult, "totalSize")
    +        assert(totalSize.isDefined && totalSize.get > 0, "totalSize is 
lost")
    +
    +        // ALTER TABLE UNSET TBLPROPERTIES invalidates some Hive specific 
statistics
    +        // This is triggered by the Hive alterTable API
    +        val numRows = extractStatsPropValues(describeResult, "numRows")
    +        assert(numRows.isDefined && numRows.get == -1, "numRows is lost")
    +        val rawDataSize = extractStatsPropValues(describeResult, 
"rawDataSize")
    +        assert(rawDataSize.isDefined && rawDataSize.get == -1, 
"rawDataSize is lost")
    +      }
    +    }
    +  }
    +
    +  test("add/drop partitions - managed table") {
    +    val catalog = spark.sessionState.catalog
    +    val managedTable = "partitionedTable"
    +    withTable(managedTable) {
    +      sql(
    +        s"""
    +           |CREATE TABLE $managedTable (key INT, value STRING)
    +           |PARTITIONED BY (ds STRING, hr STRING)
    +        """.stripMargin)
    +
    +      for (ds <- Seq("2008-04-08", "2008-04-09"); hr <- Seq("11", "12")) {
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE TABLE $managedTable
    +             |partition (ds='$ds',hr='$hr')
    +             |SELECT 1, 'a'
    +           """.stripMargin)
    +      }
    +
    +      checkStats(
    +        managedTable, isDataSourceTable = false, hasSizeInBytes = false, 
expectedRowCounts = None)
    --- End diff --
    
    Actually, this is a surprise to me. We did not use Hive-generated 
statistics. I found the table-level statistics is missing. We need to get the 
statistics info from the properties for each partition and then add them up. 
Will submit a separate PR.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to