This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 358c69c913a [HUDI-8396] Bump Spark version from 3.5.1 to 3.5.3 (#12129)
358c69c913a is described below

commit 358c69c913ad7c830e56e2c8fd61252c86d5626d
Author: Zouxxyy <[email protected]>
AuthorDate: Sun Oct 20 05:08:06 2024 +0800

    [HUDI-8396] Bump Spark version from 3.5.1 to 3.5.3 (#12129)
---
 .../spark/sql/hudi/command/DropHoodieTableCommand.scala      |  7 ++-----
 .../org/apache/spark/sql/hudi/command/IndexCommands.scala    | 12 +++---------
 pom.xml                                                      |  2 +-
 3 files changed, 6 insertions(+), 15 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala
index 09e05ff7923..5151bb9996c 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/DropHoodieTableCommand.scala
@@ -24,7 +24,7 @@ import org.apache.hudi.common.util.ConfigUtils
 import org.apache.hudi.hadoop.fs.HadoopFSUtils
 import org.apache.hudi.storage.{HoodieStorageUtils, StoragePath}
 import org.apache.spark.sql._
-import org.apache.spark.sql.catalyst.{QualifiedTableName, TableIdentifier}
+import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog._
 
 /**
@@ -45,10 +45,7 @@ case class DropHoodieTableCommand(
     if (!sparkSession.catalog.tableExists(tableIdentifier.unquotedString)) {
       sparkSession.catalog.refreshTable(tableIdentifier.unquotedString)
     }
-    val qualifiedTableName = QualifiedTableName(
-      
tableIdentifier.database.getOrElse(sparkSession.sessionState.catalog.getCurrentDatabase),
-      tableIdentifier.table)
-    sparkSession.sessionState.catalog.invalidateCachedTable(qualifiedTableName)
+    sparkSession.sessionState.catalog.invalidateCachedTable(tableIdentifier)
 
     dropTableInCatalog(sparkSession, tableIdentifier, ifExists, purge)
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/IndexCommands.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/IndexCommands.scala
index 2272bd2b394..14bb934cec4 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/IndexCommands.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/IndexCommands.scala
@@ -29,7 +29,7 @@ import org.apache.hudi.metadata.MetadataPartitionType
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.catalyst.catalog.CatalogTable
 import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.catalyst.{QualifiedTableName, TableIdentifier}
+import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.getTableLocation
 import org.apache.spark.sql.{Row, SparkSession}
 
@@ -62,10 +62,7 @@ case class CreateIndexCommand(table: CatalogTable,
 
     // Invalidate cached table for queries do not access related table
     // through {@code DefaultSource}
-    val qualifiedTableName = QualifiedTableName(
-      
tableId.database.getOrElse(sparkSession.sessionState.catalog.getCurrentDatabase),
-      tableId.table)
-    sparkSession.sessionState.catalog.invalidateCachedTable(qualifiedTableName)
+    sparkSession.sessionState.catalog.invalidateCachedTable(tableId)
     Seq.empty
   }
 }
@@ -88,10 +85,7 @@ case class DropIndexCommand(table: CatalogTable,
 
     // Invalidate cached table for queries do not access related table
     // through {@code DefaultSource}
-    val qualifiedTableName = QualifiedTableName(
-      
tableId.database.getOrElse(sparkSession.sessionState.catalog.getCurrentDatabase),
-      tableId.table)
-    sparkSession.sessionState.catalog.invalidateCachedTable(qualifiedTableName)
+    sparkSession.sessionState.catalog.invalidateCachedTable(tableId)
     Seq.empty
   }
 }
diff --git a/pom.xml b/pom.xml
index 58d48320d12..979f314602a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -165,7 +165,7 @@
     <rocksdbjni.version>7.5.3</rocksdbjni.version>
     <spark33.version>3.3.4</spark33.version>
     <spark34.version>3.4.3</spark34.version>
-    <spark35.version>3.5.1</spark35.version>
+    <spark35.version>3.5.3</spark35.version>
     <hudi.spark.module>hudi-spark3.5.x</hudi.spark.module>
     <hudi.spark.common.module>hudi-spark3-common</hudi.spark.common.module>
     <avro.version>1.8.2</avro.version>

Reply via email to