Repository: spark
Updated Branches:
  refs/heads/master 61d729abd -> 2ca563cc4


[SPARK-15756][SQL] Support command 'create table stored as 
orcfile/parquetfile/avrofile'

## What changes were proposed in this pull request?
Now Spark SQL can support 'create table src stored as orc/parquet/avro' for 
orc/parquet/avro table. But Hive can support  both commands: ' stored as 
orc/parquet/avro' and 'stored as orcfile/parquetfile/avrofile'.
So this PR supports these keywords 'orcfile/parquetfile/avrofile' in Spark SQL.

## How was this patch tested?
add unit tests

Author: Lianhui Wang <[email protected]>

Closes #13500 from lianhuiwang/SPARK-15756.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2ca563cc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2ca563cc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2ca563cc

Branch: refs/heads/master
Commit: 2ca563cc45d1ac1c19b8e84c5a87a950c712ab87
Parents: 61d729a
Author: Lianhui Wang <[email protected]>
Authored: Fri Jun 3 22:19:22 2016 -0700
Committer: Reynold Xin <[email protected]>
Committed: Fri Jun 3 22:19:22 2016 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/internal/HiveSerDe.scala    |  3 +++
 .../sql/execution/command/DDLCommandSuite.scala      | 15 +++++++++++++++
 2 files changed, 18 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2ca563cc/sql/core/src/main/scala/org/apache/spark/sql/internal/HiveSerDe.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/HiveSerDe.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/HiveSerDe.scala
index d554937..ad69137 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/HiveSerDe.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/HiveSerDe.scala
@@ -71,6 +71,9 @@ object HiveSerDe {
     val key = source.toLowerCase match {
       case s if s.startsWith("org.apache.spark.sql.parquet") => "parquet"
       case s if s.startsWith("org.apache.spark.sql.orc") => "orc"
+      case s if s.equals("orcfile") => "orc"
+      case s if s.equals("parquetfile") => "parquet"
+      case s if s.equals("avrofile") => "avro"
       case s => s
     }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2ca563cc/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 850fca5..aec7e99 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -237,6 +237,21 @@ class DDLCommandSuite extends PlanTest {
     comparePlans(parsed4, expected4)
   }
 
+  test("create table - table file format") {
+    val allSources = Seq("parquet", "parquetfile", "orc", "orcfile", "avro", 
"avrofile",
+      "sequencefile", "rcfile", "textfile")
+
+    allSources.foreach { s =>
+      val query = s"CREATE TABLE my_tab STORED AS $s"
+      val ct = parseAs[CreateTableCommand](query)
+      val hiveSerde = HiveSerDe.sourceToSerDe(s, new SQLConf)
+      assert(hiveSerde.isDefined)
+      assert(ct.table.storage.serde == hiveSerde.get.serde)
+      assert(ct.table.storage.inputFormat == hiveSerde.get.inputFormat)
+      assert(ct.table.storage.outputFormat == hiveSerde.get.outputFormat)
+    }
+  }
+
   test("create table - row format and table file format") {
     val createTableStart = "CREATE TABLE my_tab ROW FORMAT"
     val fileFormat = s"STORED AS INPUTFORMAT 'inputfmt' OUTPUTFORMAT 
'outputfmt'"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to