Repository: spark
Updated Branches:
  refs/heads/master 589cce93c -> ee682fe29


[SPARK-15534][SPARK-15535][SQL] Truncate table fixes

## What changes were proposed in this pull request?

Two changes:
- When things fail, `TRUNCATE TABLE` just returns nothing. Instead, we should 
throw exceptions.
- Remove `TRUNCATE TABLE ... COLUMN`, which was never supported by either Spark 
or Hive.

## How was this patch tested?
Jenkins.

Author: Andrew Or <and...@databricks.com>

Closes #13302 from andrewor14/truncate-table.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ee682fe2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ee682fe2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ee682fe2

Branch: refs/heads/master
Commit: ee682fe293b47988056b540ee46ca49861309982
Parents: 589cce9
Author: Andrew Or <and...@databricks.com>
Authored: Wed May 25 15:08:39 2016 -0700
Committer: Andrew Or <and...@databricks.com>
Committed: Wed May 25 15:08:39 2016 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/catalyst/parser/SqlBase.g4     |  3 +--
 .../org/apache/spark/sql/execution/SparkSqlParser.scala |  7 +------
 .../org/apache/spark/sql/execution/command/tables.scala |  7 ++++---
 .../spark/sql/hive/execution/HiveCommandSuite.scala     | 12 ------------
 4 files changed, 6 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ee682fe2/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 403191a..b0e71c7 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -115,8 +115,7 @@ statement
     | CLEAR CACHE                                                      
#clearCache
     | LOAD DATA LOCAL? INPATH path=STRING OVERWRITE? INTO TABLE
         tableIdentifier partitionSpec?                                 
#loadData
-    | TRUNCATE TABLE tableIdentifier partitionSpec?
-        (COLUMNS identifierList)?                                      
#truncateTable
+    | TRUNCATE TABLE tableIdentifier partitionSpec?                    
#truncateTable
     | op=(ADD | LIST) identifier .*?                                   
#manageResource
     | SET ROLE .*?                                                     
#failNativeCommand
     | SET .*?                                                          
#setConfiguration

http://git-wip-us.apache.org/repos/asf/spark/blob/ee682fe2/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 57f534c..cfebfc6 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -368,17 +368,12 @@ class SparkSqlAstBuilder(conf: SQLConf) extends 
AstBuilder {
    * For example:
    * {{{
    *   TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
-   *   [COLUMNS (col1, col2)]
    * }}}
    */
   override def visitTruncateTable(ctx: TruncateTableContext): LogicalPlan = 
withOrigin(ctx) {
-    if (ctx.identifierList != null) {
-      throw operationNotAllowed("TRUNCATE TABLE ... COLUMNS", ctx)
-    }
     TruncateTableCommand(
       visitTableIdentifier(ctx.tableIdentifier),
-      Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)
-    )
+      Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/ee682fe2/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 13e63a1..bef4c92 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -278,7 +278,7 @@ case class LoadDataCommand(
  *
  * The syntax of this command is:
  * {{{
- *  TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
+ *   TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
  * }}}
  */
 case class TruncateTableCommand(
@@ -288,9 +288,10 @@ case class TruncateTableCommand(
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
     if (!catalog.tableExists(tableName)) {
-      logError(s"table '$tableName' in TRUNCATE TABLE does not exist.")
+      throw new AnalysisException(s"Table '$tableName' in TRUNCATE TABLE does 
not exist.")
     } else if (catalog.isTemporaryTable(tableName)) {
-      logError(s"table '$tableName' in TRUNCATE TABLE is a temporary table.")
+      throw new AnalysisException(
+        s"Operation not allowed: TRUNCATE TABLE on temporary tables: 
'$tableName'")
     } else {
       val locations = if (partitionSpec.isDefined) {
         catalog.listPartitions(tableName, 
partitionSpec).map(_.storage.locationUri)

http://git-wip-us.apache.org/repos/asf/spark/blob/ee682fe2/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index df62ba0..6f374d7 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -289,10 +289,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
 
       val testResults = sql("SELECT * FROM non_part_table").collect()
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE non_part_table COLUMNS (employeeID)")
-      }
-
       sql("TRUNCATE TABLE non_part_table")
       checkAnswer(sql("SELECT * FROM non_part_table"), Seq.empty[Row])
 
@@ -320,10 +316,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND 
d = '2'"),
         testResults)
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE part_table PARTITION(c='1', d='1') COLUMNS 
(employeeID)")
-      }
-
       sql("TRUNCATE TABLE part_table PARTITION(c='1', d='1')")
       checkAnswer(
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND 
d = '1'"),
@@ -332,10 +324,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND 
d = '2'"),
         testResults)
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE part_table PARTITION(c='1') COLUMNS (employeeID)")
-      }
-
       sql("TRUNCATE TABLE part_table PARTITION(c='1')")
       checkAnswer(
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1'"),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to