Repository: spark
Updated Branches:
  refs/heads/master f96a8bf8f -> 473f2fb3b


[SPARK-21786][SQL][FOLLOWUP] Add compressionCodec test for CTAS

## What changes were proposed in this pull request?
Before Apache Spark 2.3, table properties were ignored when writing data to a 
hive table(created with STORED AS PARQUET/ORC syntax), because the compression 
configurations were not passed to the FileFormatWriter in hadoopConf. Then it 
was fixed in #20087. But actually for CTAS with USING PARQUET/ORC syntax, table 
properties were ignored too when convertMastore, so the test case for CTAS not 
supported.

Now it has been fixed  in #20522 , the test case should be enabled too.

## How was this patch tested?
This only re-enables the test cases of previous PR.

Closes #22302 from fjh100456/compressionCodec.

Authored-by: fjh100456 <fu.jinh...@zte.com.cn>
Signed-off-by: Dongjoon Hyun <dongj...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/473f2fb3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/473f2fb3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/473f2fb3

Branch: refs/heads/master
Commit: 473f2fb3bfd0e51c40a87e475392f2e2c8f912dd
Parents: f96a8bf
Author: fjh100456 <fu.jinh...@zte.com.cn>
Authored: Fri Sep 7 09:28:33 2018 -0700
Committer: Dongjoon Hyun <dongj...@apache.org>
Committed: Fri Sep 7 09:28:33 2018 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/hive/CompressionCodecSuite.scala | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/473f2fb3/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
index 4550d35..30204d1 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
@@ -122,7 +122,7 @@ class CompressionCodecSuite extends TestHiveSingleton with 
ParquetTest with Befo
       """.stripMargin)
   }
 
-  private def writeDateToTableUsingCTAS(
+  private def writeDataToTableUsingCTAS(
       rootDir: File,
       tableName: String,
       partitionValue: Option[String],
@@ -152,7 +152,7 @@ class CompressionCodecSuite extends TestHiveSingleton with 
ParquetTest with Befo
       usingCTAS: Boolean): String = {
     val partitionValue = if (isPartitioned) Some("test") else None
     if (usingCTAS) {
-      writeDateToTableUsingCTAS(tmpDir, tableName, partitionValue, format, 
compressionCodec)
+      writeDataToTableUsingCTAS(tmpDir, tableName, partitionValue, format, 
compressionCodec)
     } else {
       createTable(tmpDir, tableName, isPartitioned, format, compressionCodec)
       writeDataToTable(tableName, partitionValue)
@@ -258,8 +258,7 @@ class CompressionCodecSuite extends TestHiveSingleton with 
ParquetTest with Befo
   def checkForTableWithCompressProp(format: String, compressCodecs: 
List[String]): Unit = {
     Seq(true, false).foreach { isPartitioned =>
       Seq(true, false).foreach { convertMetastore =>
-        // TODO: Also verify CTAS(usingCTAS=true) cases when the 
bug(SPARK-22926) is fixed.
-        Seq(false).foreach { usingCTAS =>
+        Seq(true, false).foreach { usingCTAS =>
           checkTableCompressionCodecForCodecs(
             format,
             isPartitioned,
@@ -281,8 +280,7 @@ class CompressionCodecSuite extends TestHiveSingleton with 
ParquetTest with Befo
   def checkForTableWithoutCompressProp(format: String, compressCodecs: 
List[String]): Unit = {
     Seq(true, false).foreach { isPartitioned =>
       Seq(true, false).foreach { convertMetastore =>
-        // TODO: Also verify CTAS(usingCTAS=true) cases when the 
bug(SPARK-22926) is fixed.
-        Seq(false).foreach { usingCTAS =>
+        Seq(true, false).foreach { usingCTAS =>
           checkTableCompressionCodecForCodecs(
             format,
             isPartitioned,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to