This is an automated email from the ASF dual-hosted git repository.

wangzhen pushed a commit to branch branch-1.9
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/branch-1.9 by this push:
     new db0dd0186 [KYUUBI #6215] Improve DropIgnoreNonexistent rule for Spark 
3.5
db0dd0186 is described below

commit db0dd01869fd164a560c79ca45492d023e2d86d3
Author: wforget <[email protected]>
AuthorDate: Fri Mar 29 10:51:46 2024 +0800

    [KYUUBI #6215] Improve DropIgnoreNonexistent rule for Spark 3.5
    
    # :mag: Description
    ## Issue References ๐Ÿ”—
    
    This pull request fixes #
    
    ## Describe Your Solution ๐Ÿ”ง
    
    Improve DropIgnoreNonexistent rule for spark 3.5
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [X] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    #### Behavior Without This Pull Request :coffin:
    
    #### Behavior With This Pull Request :tada:
    
    #### Related Unit Tests
    
    DropIgnoreNonexistentSuite
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [X] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6215 from wForget/hotfix2.
    
    Closes #6215
    
    cb1d34de1 [wforget] Improve DropIgnoreNonexistent rule for spark 3.5
    
    Authored-by: wforget <[email protected]>
    Signed-off-by: wforget <[email protected]>
    (cherry picked from commit ad612349fb5e04ed067ac6cd853f200c2e0d8835)
    Signed-off-by: wforget <[email protected]>
---
 .../org/apache/kyuubi/sql/DropIgnoreNonexistent.scala     | 11 +++++++++--
 .../org/apache/spark/sql/DropIgnoreNonexistentSuite.scala | 15 ++++++++++++++-
 2 files changed, 23 insertions(+), 3 deletions(-)

diff --git 
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
 
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
index e33632b8b..26b4b5b94 100644
--- 
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
+++ 
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
@@ -18,9 +18,9 @@ package org.apache.kyuubi.sql
 
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.analysis.{UnresolvedFunctionName, 
UnresolvedRelation}
-import org.apache.spark.sql.catalyst.plans.logical.{DropFunction, 
DropNamespace, LogicalPlan, NoopCommand, UncacheTable}
+import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.execution.command.{AlterTableDropPartitionCommand, 
DropTableCommand}
+import org.apache.spark.sql.execution.command.{AlterTableDropPartitionCommand, 
DropFunctionCommand, DropTableCommand}
 
 import org.apache.kyuubi.sql.KyuubiSQLConf._
 
@@ -33,8 +33,15 @@ case class DropIgnoreNonexistent(session: SparkSession) 
extends Rule[LogicalPlan
           i.copy(ifExists = true)
         case i @ DropTableCommand(_, false, _, _) =>
           i.copy(ifExists = true)
+        case i @ DropTable(_, false, _) =>
+          i.copy(ifExists = true)
         case i @ DropNamespace(_, false, _) =>
           i.copy(ifExists = true)
+        case i @ DropFunctionCommand(_, false, _) =>
+          i.copy(ifExists = true)
+        case i @ DropView(_, false) =>
+          i.copy(ifExists = true)
+        // refer: 
org.apache.spark.sql.catalyst.analysis.ResolveCommandsWithIfExists
         case UncacheTable(u: UnresolvedRelation, false, _) =>
           NoopCommand("UNCACHE TABLE", u.multipartIdentifier)
         case DropFunction(u: UnresolvedFunctionName, false) =>
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
index bbc61fb44..1899a1ef7 100644
--- 
a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
+++ 
b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
@@ -16,7 +16,7 @@
  */
 package org.apache.spark.sql
 
-import org.apache.spark.sql.catalyst.plans.logical.{DropNamespace, NoopCommand}
+import org.apache.spark.sql.catalyst.plans.logical.{DropNamespace, DropTable, 
NoopCommand}
 import org.apache.spark.sql.execution.command._
 
 import org.apache.kyuubi.sql.KyuubiSQLConf
@@ -29,10 +29,23 @@ class DropIgnoreNonexistentSuite extends 
KyuubiSparkSQLExtensionTest {
       val df1 = sql("DROP DATABASE nonexistent_database")
       assert(df1.queryExecution.analyzed.asInstanceOf[DropNamespace].ifExists 
== true)
 
+      // drop nonexistent table
+      val df2 = sql("DROP TABLE nonexistent_table")
+      assert(df2.queryExecution.analyzed.asInstanceOf[DropTable].ifExists == 
true)
+
+      // drop nonexistent view
+      val df3 = sql("DROP VIEW nonexistent_view")
+      assert(df3.queryExecution.analyzed.asInstanceOf[DropTableCommand].isView 
== true &&
+        df3.queryExecution.analyzed.asInstanceOf[DropTableCommand].ifExists == 
true)
+
       // drop nonexistent function
       val df4 = sql("DROP FUNCTION nonexistent_function")
       assert(df4.queryExecution.analyzed.isInstanceOf[NoopCommand])
 
+      // drop nonexistent temporary function
+      val df5 = sql("DROP TEMPORARY FUNCTION nonexistent_temp_function")
+      
assert(df5.queryExecution.analyzed.asInstanceOf[DropFunctionCommand].ifExists 
== true)
+
       // drop nonexistent PARTITION
       withTable("test") {
         sql("CREATE TABLE IF NOT EXISTS test(i int) PARTITIONED BY (p int)")

Reply via email to