Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/15168#discussion_r80418906
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala 
---
    @@ -341,6 +341,74 @@ class SQLQuerySuite extends QueryTest with 
SQLTestUtils with TestHiveSingleton {
         }
       }
     
    +  test("describe partition") {
    +    withTable("partitioned_table", "datasource_table") {
    +      sql("CREATE TABLE partitioned_table (a STRING, b INT) PARTITIONED BY 
(c STRING, d STRING)")
    +      sql("ALTER TABLE partitioned_table ADD PARTITION (c='Us', d=1)")
    +
    +      checkKeywordsExist(sql("DESC partitioned_table PARTITION (c='Us', 
d=1)"),
    +        "# Partition Information",
    +        "# col_name")
    +
    +      checkKeywordsExist(sql("DESC EXTENDED partitioned_table PARTITION 
(c='Us', d=1)"),
    +        "# Partition Information",
    +        "# col_name",
    +        "Detailed Partition Information CatalogPartition(",
    +        "Partition Values: [Us, 1]",
    +        "Storage(Location:",
    +        "Partition Parameters")
    +
    +      checkKeywordsExist(sql("DESC FORMATTED partitioned_table PARTITION 
(c='Us', d=1)"),
    +        "# Partition Information",
    +        "# col_name",
    +        "# Detailed Partition Information",
    +        "Partition Value:",
    +        "Database:",
    +        "Table:",
    +        "Location:",
    +        "Partition Parameters:",
    +        "# Storage Information")
    +
    +      val m = intercept[NoSuchPartitionException] {
    +        sql("DESC partitioned_table PARTITION (c='Us', d=2)")
    +      }.getMessage()
    +      assert(m.contains("Partition not found in table"))
    +
    +      val m2 = intercept[AnalysisException] {
    +        sql("DESC partitioned_table PARTITION (c='Us')")
    +      }.getMessage()
    +      assert(m2.contains("Partition spec is invalid"))
    +
    +      val m3 = intercept[ParseException] {
    +        sql("DESC partitioned_table PARTITION (c='Us', d)")
    +      }.getMessage()
    +      assert(m3.contains("Unsupported SQL statement"))
    +
    +      spark
    +        .range(1).select('id as 'a, 'id as 'b, 'id as 'c, 'id as 'd).write
    +        .partitionBy("d")
    +        .saveAsTable("datasource_table")
    +      val m4 = intercept[AnalysisException] {
    +        sql("DESC datasource_table PARTITION (d=2)")
    +      }.getMessage()
    +      assert(m4.contains("DESC PARTITION is not allowed on a datasource 
table"))
    +
    +      val m5 = intercept[AnalysisException] {
    +        spark.range(10).select('id as 'a, 'id as 
'b).createTempView("view1")
    +        sql("DESC view1 PARTITION (c='Us', d=1)")
    +      }.getMessage()
    +      assert(m5.contains("DESC PARTITION is not allowed on a temporary 
view"))
    +
    +      withView("permanent_view") {
    +        val m = intercept[AnalysisException] {
    +          sql("CREATE VIEW permanent_view AS SELECT * FROM 
partitioned_table")
    +          sql("DESC permanent_view PARTITION (c='Us', d=1)")
    +        }.getMessage()
    +        assert(m.contains("DESC PARTITION is not allowed on a view"))
    +      }
    +    }
    +  }
    --- End diff --
    
    Could you split the test case to two? One is covering the positive cases; 
another is covering the negative cases. We normally do not like a large test 
case.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to