Repository: spark
Updated Branches:
  refs/heads/master b0f5497e9 -> 124cbfb68


[SPARK-14488][SPARK-14493][SQL] "CREATE TEMPORARY TABLE ... USING ... AS 
SELECT" shouldn't create persisted table

## What changes were proposed in this pull request?

When planning logical plan node `CreateTableUsingAsSelect`, we neglected its 
`temporary` field and always generates a `CreateMetastoreDataSourceAsSelect`. 
This PR fixes this issue generating `CreateTempTableUsingAsSelect` when 
`temporary` is true.

This PR also fixes SPARK-14493 since the root cause of SPARK-14493 is that we 
were `CreateMetastoreDataSourceAsSelect` uses default Hive warehouse location 
when `PATH` data source option is absent.

## How was this patch tested?

Added a test case to create a temporary table using the target syntax and check 
whether it's indeed a temporary table.

Author: Cheng Lian <l...@databricks.com>

Closes #12303 from liancheng/spark-14488-fix-ctas-using.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/124cbfb6
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/124cbfb6
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/124cbfb6

Branch: refs/heads/master
Commit: 124cbfb683a5e959e1b5181d4d0cc56956b50385
Parents: b0f5497
Author: Cheng Lian <l...@databricks.com>
Authored: Tue Apr 12 22:28:57 2016 +0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Tue Apr 12 22:28:57 2016 +0800

----------------------------------------------------------------------
 .../apache/spark/sql/hive/HiveStrategies.scala  | 10 ++--
 .../sql/hive/execution/SQLQuerySuite.scala      | 49 ++++++++++++++++++--
 2 files changed, 53 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/124cbfb6/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index f44937e..010361a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -23,9 +23,8 @@ import org.apache.spark.sql.catalyst.planning._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution._
-import org.apache.spark.sql.execution.command.{DescribeCommand => 
RunnableDescribeCommand, _}
-import org.apache.spark.sql.execution.datasources.{CreateTableUsing, 
CreateTableUsingAsSelect,
-  DescribeCommand}
+import org.apache.spark.sql.execution.command.{DescribeCommand => _, _}
+import org.apache.spark.sql.execution.datasources.{CreateTableUsing, 
CreateTableUsingAsSelect, CreateTempTableUsingAsSelect, DescribeCommand}
 import org.apache.spark.sql.hive.execution._
 
 private[hive] trait HiveStrategies {
@@ -90,6 +89,11 @@ private[hive] trait HiveStrategies {
             tableIdent, userSpecifiedSchema, provider, opts, allowExisting, 
managedIfNoPath)
         ExecutedCommand(cmd) :: Nil
 
+      case c: CreateTableUsingAsSelect if c.temporary =>
+        val cmd = CreateTempTableUsingAsSelect(
+          c.tableIdent, c.provider, c.partitionColumns, c.mode, c.options, 
c.child)
+        ExecutedCommand(cmd) :: Nil
+
       case c: CreateTableUsingAsSelect =>
         val cmd = CreateMetastoreDataSourceAsSelect(c.tableIdent, c.provider, 
c.partitionColumns,
           c.bucketSpec, c.mode, c.options, c.child)

http://git-wip-us.apache.org/repos/asf/spark/blob/124cbfb6/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index b4886eb..7eaf19d 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -19,12 +19,9 @@ package org.apache.spark.sql.hive.execution
 
 import java.sql.{Date, Timestamp}
 
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, 
FunctionRegistry}
-import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.hive.{HiveContext, MetastoreRelation}
@@ -1852,4 +1849,50 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
       }
     }
   }
+
+  test(
+    "SPARK-14488 \"CREATE TEMPORARY TABLE ... USING ... AS SELECT ...\" " +
+    "shouldn't create persisted table"
+  ) {
+    withTempPath { dir =>
+      withTempTable("t1", "t2") {
+        val path = dir.getCanonicalPath
+        val ds = sqlContext.range(10)
+        ds.registerTempTable("t1")
+
+        sql(
+          s"""CREATE TEMPORARY TABLE t2
+             |USING PARQUET
+             |OPTIONS (PATH '$path')
+             |AS SELECT * FROM t1
+           """.stripMargin)
+
+        checkAnswer(
+          sqlContext.tables().select('isTemporary).filter('tableName === "t2"),
+          Row(true)
+        )
+
+        checkAnswer(table("t2"), table("t1"))
+      }
+    }
+  }
+
+  test(
+    "SPARK-14493 \"CREATE TEMPORARY TABLE ... USING ... AS SELECT ...\" " +
+    "shouldn always be used together with PATH data source option"
+  ) {
+    withTempTable("t") {
+      sqlContext.range(10).registerTempTable("t")
+
+      val message = intercept[IllegalArgumentException] {
+        sql(
+          s"""CREATE TEMPORARY TABLE t1
+             |USING PARQUET
+             |AS SELECT * FROM t
+           """.stripMargin)
+      }.getMessage
+
+      assert(message == "'path' is not specified")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to