Repository: spark
Updated Branches:
  refs/heads/branch-2.0 e1bdf1e02 -> b52bd8070


[SPARK-16267][TEST] Replace deprecated `CREATE TEMPORARY TABLE ... USING` from 
testsuites.

## What changes were proposed in this pull request?

After SPARK-15674, `DDLStrategy` prints out the following deprecation messages 
in the testsuites.

```
12:10:53.284 WARN org.apache.spark.sql.execution.SparkStrategies$DDLStrategy:
CREATE TEMPORARY TABLE normal_orc_source USING... is deprecated,
please use CREATE TEMPORARY VIEW viewName USING... instead
```

Total : 40
- JDBCWriteSuite: 14
- DDLSuite: 6
- TableScanSuite: 6
- ParquetSourceSuite: 5
- OrcSourceSuite: 2
- SQLQuerySuite: 2
- HiveCommandSuite: 2
- JsonSuite: 1
- PrunedScanSuite: 1
- FilteredScanSuite  1

This PR replaces `CREATE TEMPORARY TABLE` with `CREATE TEMPORARY VIEW` in order 
to remove the deprecation messages in the above testsuites except `DDLSuite`, 
`SQLQuerySuite`, `HiveCommandSuite`.

The Jenkins results shows only remaining 10 messages.

https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/61422/consoleFull

## How was this patch tested?

This is a testsuite-only change.

Author: Dongjoon Hyun <dongj...@apache.org>

Closes #13956 from dongjoon-hyun/SPARK-16267.

(cherry picked from commit 831a04f5d152d1839c0edfdf65bb728aa5957f16)
Signed-off-by: Reynold Xin <r...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b52bd807
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b52bd807
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b52bd807

Branch: refs/heads/branch-2.0
Commit: b52bd8070dc852b419283f8a14595e42c179d3d0
Parents: e1bdf1e
Author: Dongjoon Hyun <dongj...@apache.org>
Authored: Wed Jun 29 17:29:17 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Wed Jun 29 17:29:23 2016 -0700

----------------------------------------------------------------------
 .../sql/execution/datasources/json/JsonSuite.scala      |  2 +-
 .../org/apache/spark/sql/jdbc/JDBCWriteSuite.scala      |  4 ++--
 .../apache/spark/sql/sources/FilteredScanSuite.scala    |  2 +-
 .../org/apache/spark/sql/sources/PrunedScanSuite.scala  |  2 +-
 .../org/apache/spark/sql/sources/TableScanSuite.scala   | 12 ++++++------
 .../org/apache/spark/sql/hive/orc/OrcSourceSuite.scala  |  4 ++--
 .../scala/org/apache/spark/sql/hive/parquetSuites.scala | 10 +++++-----
 7 files changed, 18 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 9f35c02..6c72019 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -847,7 +847,7 @@ class JsonSuite extends QueryTest with SharedSQLContext 
with TestJsonData {
 
     sql(
       s"""
-        |CREATE TEMPORARY TABLE jsonTableSQL
+        |CREATE TEMPORARY VIEW jsonTableSQL
         |USING org.apache.spark.sql.json
         |OPTIONS (
         |  path '$path'

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala
index 48fa5f9..ff66f53 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala
@@ -57,14 +57,14 @@ class JDBCWriteSuite extends SharedSQLContext with 
BeforeAndAfter {
 
     sql(
       s"""
-        |CREATE TEMPORARY TABLE PEOPLE
+        |CREATE OR REPLACE TEMPORARY VIEW PEOPLE
         |USING org.apache.spark.sql.jdbc
         |OPTIONS (url '$url1', dbtable 'TEST.PEOPLE', user 'testUser', 
password 'testPass')
       """.stripMargin.replaceAll("\n", " "))
 
     sql(
       s"""
-        |CREATE TEMPORARY TABLE PEOPLE1
+        |CREATE OR REPLACE TEMPORARY VIEW PEOPLE1
         |USING org.apache.spark.sql.jdbc
         |OPTIONS (url '$url1', dbtable 'TEST.PEOPLE1', user 'testUser', 
password 'testPass')
       """.stripMargin.replaceAll("\n", " "))

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala
index 45e737f..be56c96 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala
@@ -139,7 +139,7 @@ class FilteredScanSuite extends DataSourceTest with 
SharedSQLContext with Predic
     super.beforeAll()
     sql(
       """
-        |CREATE TEMPORARY TABLE oneToTenFiltered
+        |CREATE TEMPORARY VIEW oneToTenFiltered
         |USING org.apache.spark.sql.sources.FilteredScanSource
         |OPTIONS (
         |  from '1',

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala
index 207f89d..fb6123d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala
@@ -62,7 +62,7 @@ class PrunedScanSuite extends DataSourceTest with 
SharedSQLContext {
     super.beforeAll()
     sql(
       """
-        |CREATE TEMPORARY TABLE oneToTenPruned
+        |CREATE TEMPORARY VIEW oneToTenPruned
         |USING org.apache.spark.sql.sources.PrunedScanSource
         |OPTIONS (
         |  from '1',

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
index 93116d8..0fa0706 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
@@ -137,7 +137,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
     super.beforeAll()
     sql(
       """
-        |CREATE TEMPORARY TABLE oneToTen
+        |CREATE TEMPORARY VIEW oneToTen
         |USING org.apache.spark.sql.sources.SimpleScanSource
         |OPTIONS (
         |  From '1',
@@ -149,7 +149,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
 
     sql(
       """
-        |CREATE TEMPORARY TABLE tableWithSchema (
+        |CREATE TEMPORARY VIEW tableWithSchema (
         |`string$%Field` stRIng,
         |binaryField binary,
         |`booleanField` boolean,
@@ -332,7 +332,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
   test("defaultSource") {
     sql(
       """
-        |CREATE TEMPORARY TABLE oneToTenDef
+        |CREATE TEMPORARY VIEW oneToTenDef
         |USING org.apache.spark.sql.sources
         |OPTIONS (
         |  from '1',
@@ -351,7 +351,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
     val schemaNotAllowed = intercept[Exception] {
       sql(
         """
-          |CREATE TEMPORARY TABLE relationProvierWithSchema (i int)
+          |CREATE TEMPORARY VIEW relationProvierWithSchema (i int)
           |USING org.apache.spark.sql.sources.SimpleScanSource
           |OPTIONS (
           |  From '1',
@@ -364,7 +364,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
     val schemaNeeded = intercept[Exception] {
       sql(
         """
-          |CREATE TEMPORARY TABLE schemaRelationProvierWithoutSchema
+          |CREATE TEMPORARY VIEW schemaRelationProvierWithoutSchema
           |USING org.apache.spark.sql.sources.AllDataTypesScanSource
           |OPTIONS (
           |  From '1',
@@ -378,7 +378,7 @@ class TableScanSuite extends DataSourceTest with 
SharedSQLContext {
   test("SPARK-5196 schema field with comment") {
     sql(
       """
-       |CREATE TEMPORARY TABLE student(name string comment "SN", age int 
comment "SA", grade int)
+       |CREATE TEMPORARY VIEW student(name string comment "SN", age int 
comment "SA", grade int)
        |USING org.apache.spark.sql.sources.AllDataTypesScanSource
        |OPTIONS (
        |  from '1',

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 871b9e0..0f37cd7 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -153,7 +153,7 @@ class OrcSourceSuite extends OrcSuite {
     super.beforeAll()
 
     spark.sql(
-      s"""CREATE TEMPORARY TABLE normal_orc_source
+      s"""CREATE TEMPORARY VIEW normal_orc_source
          |USING org.apache.spark.sql.hive.orc
          |OPTIONS (
          |  PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'
@@ -161,7 +161,7 @@ class OrcSourceSuite extends OrcSuite {
        """.stripMargin)
 
     spark.sql(
-      s"""CREATE TEMPORARY TABLE normal_orc_as_source
+      s"""CREATE TEMPORARY VIEW normal_orc_as_source
          |USING org.apache.spark.sql.hive.orc
          |OPTIONS (
          |  PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'

http://git-wip-us.apache.org/repos/asf/spark/blob/b52bd807/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
index 6af9976..fe7253d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
@@ -582,7 +582,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
       "normal_parquet")
 
     sql( s"""
-      create temporary table partitioned_parquet
+      CREATE TEMPORARY VIEW partitioned_parquet
       USING org.apache.spark.sql.parquet
       OPTIONS (
         path '${partitionedTableDir.getCanonicalPath}'
@@ -590,7 +590,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
     """)
 
     sql( s"""
-      create temporary table partitioned_parquet_with_key
+      CREATE TEMPORARY VIEW partitioned_parquet_with_key
       USING org.apache.spark.sql.parquet
       OPTIONS (
         path '${partitionedTableDirWithKey.getCanonicalPath}'
@@ -598,7 +598,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
     """)
 
     sql( s"""
-      create temporary table normal_parquet
+      CREATE TEMPORARY VIEW normal_parquet
       USING org.apache.spark.sql.parquet
       OPTIONS (
         path '${new File(partitionedTableDir, "p=1").getCanonicalPath}'
@@ -606,7 +606,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
     """)
 
     sql( s"""
-      CREATE TEMPORARY TABLE partitioned_parquet_with_key_and_complextypes
+      CREATE TEMPORARY VIEW partitioned_parquet_with_key_and_complextypes
       USING org.apache.spark.sql.parquet
       OPTIONS (
         path '${partitionedTableDirWithKeyAndComplexTypes.getCanonicalPath}'
@@ -614,7 +614,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
     """)
 
     sql( s"""
-      CREATE TEMPORARY TABLE partitioned_parquet_with_complextypes
+      CREATE TEMPORARY VIEW partitioned_parquet_with_complextypes
       USING org.apache.spark.sql.parquet
       OPTIONS (
         path '${partitionedTableDirWithComplexTypes.getCanonicalPath}'


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to