Github user dilipbiswal commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12222#discussion_r59686964
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
 ---
    @@ -122,4 +133,103 @@ class HiveCommandSuite extends QueryTest with 
SQLTestUtils with TestHiveSingleto
           checkAnswer(sql("SHOW TBLPROPERTIES parquet_temp"), Nil)
         }
       }
    +
    +  test("show columns") {
    +    checkAnswer(
    +      sql("SHOW COLUMNS IN parquet_tab3"),
    +      Row("col1") :: Row("col 2") :: Nil)
    +
    +    checkAnswer(
    +      sql("SHOW COLUMNS IN default.parquet_tab3"),
    +      Row("col1") :: Row("col 2") :: Nil)
    +
    +    checkAnswer(
    +      sql("SHOW COLUMNS IN parquet_tab3 FROM default"),
    +      Row("col1") :: Row("col 2") :: Nil)
    +
    +    checkAnswer(
    +      sql("SHOW COLUMNS IN parquet_tab4 IN default"),
    +      Row("price") :: Row("qty") :: Row("year") :: Row("month") :: Nil)
    +
    +    val message = intercept[NoSuchTableException] {
    +      sql("SHOW COLUMNS IN badtable FROM default")
    +    }.getMessage
    +    assert(message.contains("Table badtable not found in database"))
    +  }
    +
    +  test("show partitions - show everything") {
    +    checkAnswer(
    +      sql("show partitions parquet_tab4"),
    +      Row("year=2015/month=1") ::
    +        Row("year=2015/month=2") ::
    +        Row("year=2016/month=2") ::
    +        Row("year=2016/month=3") :: Nil)
    +
    +    checkAnswer(
    +      sql("show partitions default.parquet_tab4"),
    +      Row("year=2015/month=1") ::
    +        Row("year=2015/month=2") ::
    +        Row("year=2016/month=2") ::
    +        Row("year=2016/month=3") :: Nil)
    +  }
    +
    +  test("show partitions - filter") {
    +    checkAnswer(
    +      sql("show partitions default.parquet_tab4 PARTITION(year=2015)"),
    +      Row("year=2015/month=1") ::
    +        Row("year=2015/month=2") :: Nil)
    +
    +    checkAnswer(
    +      sql("show partitions default.parquet_tab4 PARTITION(year=2015, 
month=1)"),
    +      Row("year=2015/month=1") :: Nil)
    +
    +    checkAnswer(
    +      sql("show partitions default.parquet_tab4 PARTITION(month=2)"),
    +      Row("year=2015/month=2") ::
    +        Row("year=2016/month=2") :: Nil)
    +  }
    +
    +  test("show partitions - empty row") {
    +    withTempTable("parquet_temp") {
    +      sql(
    +        """
    +          |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING)
    +          |USING org.apache.spark.sql.parquet.DefaultSource
    +        """.stripMargin)
    +      // An empty sequence of row is returned for session temporary table.
    +      checkAnswer(sql("SHOW PARTITIONS parquet_temp"), Nil)
    +      val message1 = intercept[AnalysisException] {
    +        sql("SHOW PARTITIONS parquet_tab3")
    +      }.getMessage
    +      assert(message1.contains("is not a partitioned table"))
    +
    +      val message2 = intercept[AnalysisException] {
    +        sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)")
    +      }.getMessage
    +      assert(message2.contains("Partition spec (abcd -> 2015, xyz -> 1) 
contains " +
    +        "non-partition columns"))
    +
    +      val message3 = intercept[AnalysisException] {
    +        sql("SHOW PARTITIONS parquet_view1")
    +      }.getMessage
    +      assert(message3.contains("Operation not allowed: view or index 
table"))
    +    }
    +  }
    +
    +  test("show partitions - datasource") {
    +    import sqlContext.implicits._
    +    withTable("part_datasrc") {
    +      val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
    +      df.write
    +        .partitionBy("a")
    +        .format("parquet")
    +        .mode(SaveMode.Overwrite)
    +        .saveAsTable("part_datasrc")
    +
    +      val message1 = intercept[AnalysisException] {
    +        sql("SHOW PARTITIONS part_datasrc")
    +      }.getMessage
    +      assert(message1.contains("Operation not allowed: datasource table"))
    --- End diff --
    
    ok


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to