Repository: spark
Updated Branches:
  refs/heads/branch-2.0 3bd7a89bd -> 749c29bc0


[SPARK-14933][SQL] Failed to create view out of a parquet or orc table

## What changes were proposed in this pull request?
#### Symptom
 If a table is created as parquet or ORC table with hive syntaxt DDL, such as
```SQL
create table t1 (c1 int, c2 string) stored as parquet
```
The following command will fail
```SQL
create view v1 as select * from t1
```
#### Root Cause
Currently, `HiveMetaStoreCatalog` converts Paruqet/Orc tables to 
`LogicalRelation` without giving any `tableIdentifier`. `SQLBuilder` expects 
the `LogicalRelation` to have an associated `tableIdentifier`. However, the 
`LogicalRelation` created earlier does not have such a `tableIdentifier`. Thus, 
`SQLBuilder.toSQL` can not recognize this logical plan and issue an exception.

This PR is to assign a `TableIdentifier` to the `LogicalRelation` when 
resolving parquet or orc tables in `HiveMetaStoreCatalog`.

## How was this patch tested?
testcases created and dev/run-tests is run.

Author: xin Wu <xi...@us.ibm.com>

Closes #12716 from xwu0226/SPARK_14933.

(cherry picked from commit 427c20dd6d84cb9de1aac322183bc6e7b72ca25d)
Signed-off-by: Cheng Lian <l...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/749c29bc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/749c29bc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/749c29bc

Branch: refs/heads/branch-2.0
Commit: 749c29bc099c20aa6156b843cf7c9216315cd5a6
Parents: 3bd7a89
Author: xin Wu <xi...@us.ibm.com>
Authored: Wed May 11 22:17:59 2016 +0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Wed May 11 22:21:40 2016 +0800

----------------------------------------------------------------------
 .../spark/sql/hive/HiveMetastoreCatalog.scala   | 10 +++++--
 .../sql/catalyst/LogicalPlanToSQLSuite.scala    | 24 ++++++++++++++++
 .../spark/sql/hive/execution/SQLViewSuite.scala | 30 ++++++++++++++++++++
 3 files changed, 62 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/749c29bc/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 7a799b6..607f0a1 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -293,7 +293,10 @@ private[hive] class HiveMetastoreCatalog(sparkSession: 
SparkSession) extends Log
           fileFormat = defaultSource,
           options = options)
 
-        val created = LogicalRelation(relation)
+        val created = LogicalRelation(
+          relation,
+          metastoreTableIdentifier =
+            Some(TableIdentifier(tableIdentifier.name, 
Some(tableIdentifier.database))))
         cachedDataSourceTables.put(tableIdentifier, created)
         created
       }
@@ -317,7 +320,10 @@ private[hive] class HiveMetastoreCatalog(sparkSession: 
SparkSession) extends Log
               userSpecifiedSchema = Some(metastoreRelation.schema),
               bucketSpec = bucketSpec,
               options = options,
-              className = fileType).resolveRelation())
+              className = fileType).resolveRelation(),
+              metastoreTableIdentifier =
+                Some(TableIdentifier(tableIdentifier.name, 
Some(tableIdentifier.database))))
+
 
         cachedDataSourceTables.put(tableIdentifier, created)
         created

http://git-wip-us.apache.org/repos/asf/spark/blob/749c29bc/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
index 9abefa5..4315197 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
@@ -741,4 +741,28 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with 
SQLTestUtils {
   test("filter after subquery") {
     checkHiveQl("SELECT a FROM (SELECT key + 1 AS a FROM parquet_t1) t WHERE a 
> 5")
   }
+
+  test("SPARK-14933 - select parquet table") {
+    withTable("parquet_t") {
+      sql(
+        """
+          |create table parquet_t (c1 int, c2 string)
+          |stored as parquet select 1, 'abc'
+        """.stripMargin)
+
+      checkHiveQl("select * from parquet_t")
+    }
+  }
+
+  test("SPARK-14933 - select orc table") {
+    withTable("orc_t") {
+      sql(
+        """
+          |create table orc_t (c1 int, c2 string)
+          |stored as orc select 1, 'abc'
+        """.stripMargin)
+
+      checkHiveQl("select * from orc_t")
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/749c29bc/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
index f37037e..5c72ec5 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
@@ -304,4 +304,34 @@ class SQLViewSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
       }
     }
   }
+
+  test("SPARK-14933 - create view from hive parquet tabale") {
+    withTable("t_part") {
+      withView("v_part") {
+        sqlContext.sql(
+          """create table t_part (c1 int, c2 int)
+            |stored as parquet as select 1 as a, 2 as b
+          """.stripMargin)
+        sqlContext.sql("create view v_part as select * from t_part")
+        checkAnswer(
+          sql("select * from t_part"),
+          sql("select * from v_part"))
+      }
+    }
+  }
+
+  test("SPARK-14933 - create view from hive orc tabale") {
+    withTable("t_orc") {
+      withView("v_orc") {
+        sqlContext.sql(
+          """create table t_orc (c1 int, c2 int)
+            |stored as orc as select 1 as a, 2 as b
+          """.stripMargin)
+        sqlContext.sql("create view v_orc as select * from t_orc")
+        checkAnswer(
+          sql("select * from t_orc"),
+          sql("select * from v_orc"))
+      }
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to