This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 243f404a0d2 [HUDI-9040] Set the correct table path when renaming 
tables (#12848)
243f404a0d2 is described below

commit 243f404a0d24d69018f571ceea977ae5fee0a9df
Author: Shawn Chang <[email protected]>
AuthorDate: Mon Feb 24 22:18:09 2025 -0800

    [HUDI-9040] Set the correct table path when renaming tables (#12848)
---
 .../command/AlterHoodieTableRenameCommand.scala    |  3 +-
 .../apache/spark/sql/hudi/ddl/TestAlterTable.scala | 72 +++++++++++-----------
 2 files changed, 38 insertions(+), 37 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterHoodieTableRenameCommand.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterHoodieTableRenameCommand.scala
index 34dcf131b1b..a87572dbf95 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterHoodieTableRenameCommand.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterHoodieTableRenameCommand.scala
@@ -52,8 +52,7 @@ case class AlterHoodieTableRenameCommand(
       // update table properties path in every op
       if (hoodieCatalogTable.table.properties.contains("path")) {
         val catalogTable = 
sparkSession.sessionState.catalog.getTableMetadata(newName)
-        val path = catalogTable.storage.locationUri.get.getPath
-        AlterTableSetPropertiesCommand(newName, Map("path" -> path), 
isView).run(sparkSession)
+        AlterTableSetPropertiesCommand(newName, Map("path" -> 
catalogTable.location.toString), isView).run(sparkSession)
       }
 
     }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/ddl/TestAlterTable.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/ddl/TestAlterTable.scala
index a6c85837899..6bd084411cd 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/ddl/TestAlterTable.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/ddl/TestAlterTable.scala
@@ -310,40 +310,44 @@ class TestAlterTable extends HoodieSparkSqlTestBase {
   }
 
   test("Test Alter Rename Table") {
+    Seq("cow", "mor").foreach { tableType =>
+      val tableName = generateTableName
+      // Create table
+      spark.sql(
+        s"""
+           |create table $tableName (
+           |  id int,
+           |  name string,
+           |  price double,
+           |  ts long
+           |) using hudi
+           | tblproperties (
+           |  type = '$tableType',
+           |  primaryKey = 'id',
+           |  preCombineField = 'ts'
+           | )
+     """.stripMargin)
+
+      // alter table name.
+      val newTableName = s"${tableName}_1"
+      val oldLocation = spark.sessionState.catalog.getTableMetadata(new 
TableIdentifier(tableName)).properties.get("path")
+      spark.sql(s"alter table $tableName rename to $newTableName")
+      val newLocation = spark.sessionState.catalog.getTableMetadata(new 
TableIdentifier(newTableName)).properties.get("path")
+      // only hoodieCatalog will set path to tblp
+      if (oldLocation.nonEmpty) {
+        assertResult(false)(
+          newLocation.equals(oldLocation)
+        )
+      } else {
+        assertResult(None)(newLocation)
+      }
+    }
+  }
+
+  test("Test Alter Rename Table With Location") {
     withTempDir { tmp =>
       Seq("cow", "mor").foreach { tableType =>
         val tableName = generateTableName
-        // Create table
-        spark.sql(
-          s"""
-             |create table $tableName (
-             |  id int,
-             |  name string,
-             |  price double,
-             |  ts long
-             |) using hudi
-             | tblproperties (
-             |  type = '$tableType',
-             |  primaryKey = 'id',
-             |  preCombineField = 'ts'
-             | )
-       """.stripMargin)
-
-        // alter table name.
-        val newTableName = s"${tableName}_1"
-        val oldLocation = spark.sessionState.catalog.getTableMetadata(new 
TableIdentifier(tableName)).properties.get("path")
-        spark.sql(s"alter table $tableName rename to $newTableName")
-        val newLocation = spark.sessionState.catalog.getTableMetadata(new 
TableIdentifier(newTableName)).properties.get("path")
-        // only hoodieCatalog will set path to tblp
-        if (oldLocation.nonEmpty) {
-          assertResult(false)(
-            newLocation.equals(oldLocation)
-          )
-        } else {
-          assertResult(None) (newLocation)
-        }
-
-
         // Create table with location
         val locTableName = s"${tableName}_loc"
         val tablePath = s"${tmp.getCanonicalPath}/$locTableName"
@@ -371,11 +375,9 @@ class TestAlterTable extends HoodieSparkSqlTestBase {
         val newLocation2 = spark.sessionState.catalog.getTableMetadata(new 
TableIdentifier(newLocTableName))
           .properties.get("path")
         if (oldLocation2.nonEmpty) {
-          // Remove the impact of the schema.
-          val oldLocation2Path = new 
Path(oldLocation2.get.stripPrefix("file:"))
-          val newLocation2Path = new 
Path(newLocation2.get.stripPrefix("file:"))
+          // the scheme and authority need to match as well
           assertResult(true)(
-            newLocation2Path.equals(oldLocation2Path)
+            oldLocation2.get.equals(newLocation2.get)
           )
         } else {
           assertResult(None) (newLocation2)

Reply via email to