This is an automated email from the ASF dual-hosted git repository.

jinsongzhou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/amoro.git


The following commit(s) were added to refs/heads/master by this push:
     new 5ce6eea96 [AMORO-3387]fix: create table like using mixed_hive (#3387)
5ce6eea96 is described below

commit 5ce6eea96147c587239dc3b0670451b9a04b1c76
Author: Wang Tao <[email protected]>
AuthorDate: Mon Apr 14 14:09:21 2025 +0800

    [AMORO-3387]fix: create table like using mixed_hive (#3387)
    
    * fix: create table like using mixed_hive
    
    * fix: create table like using mixed_hive
    
    * fix: supoprt create table like using mixed_hive
    
    * fix: supoprt create table like using mixed_hive
    
    * fix: supoprt create table like using mixed_hive
    
    * fix: supoprt create table like using mixed_hive
    
    ---------
    
    Co-authored-by: ZhouJinsong <[email protected]>
---
 .../amoro-mixed-spark/amoro-mixed-spark-3-common/pom.xml     |  1 -
 .../sql/catalyst/analysis/RewriteMixedFormatCommand.scala    |  8 ++++----
 .../sql/catalyst/analysis/RewriteMixedFormatCommand.scala    | 12 +++++++-----
 .../sql/catalyst/analysis/RewriteMixedFormatCommand.scala    | 12 +++++++-----
 4 files changed, 18 insertions(+), 15 deletions(-)

diff --git 
a/amoro-format-mixed/amoro-mixed-spark/amoro-mixed-spark-3-common/pom.xml 
b/amoro-format-mixed/amoro-mixed-spark/amoro-mixed-spark-3-common/pom.xml
index e3ca63138..fa53c6122 100644
--- a/amoro-format-mixed/amoro-mixed-spark/amoro-mixed-spark-3-common/pom.xml
+++ b/amoro-format-mixed/amoro-mixed-spark/amoro-mixed-spark-3-common/pom.xml
@@ -33,7 +33,6 @@
     <url>https://amoro.apache.org</url>
 
     <properties>
-        <iceberg.version>1.4.3</iceberg.version>
         <spark.version>3.2.4</spark.version>
         <scala.version>2.12.15</scala.version>
     </properties>
diff --git 
a/amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
 
b/amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
index e91c31e34..7eda1ce6c 100644
--- 
a/amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
+++ 
b/amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.connector.catalog.TableCatalog
 import org.apache.spark.sql.execution.command.CreateTableLikeCommand
 
-import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog}
+import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog, SparkUnifiedCatalog, SparkUnifiedSessionCatalog}
 import org.apache.amoro.spark.mixed.MixedSessionCatalogBase
 import 
org.apache.amoro.spark.sql.MixedFormatExtensionUtils.buildCatalogAndIdentifier
 import 
org.apache.amoro.spark.sql.catalyst.plans.{AlterMixedFormatTableDropPartition, 
TruncateMixedFormatTable}
@@ -47,8 +47,8 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
 
   private def isCreateMixedFormatTable(catalog: TableCatalog, provider: 
Option[String]): Boolean = {
     catalog match {
-      case _: MixedFormatSparkCatalog => true
-      case _: MixedFormatSparkSessionCatalog[_] =>
+      case _: MixedFormatSparkCatalog | _: MixedFormatSparkSessionCatalog[_]
+          | _: SparkUnifiedCatalog | _: SparkUnifiedSessionCatalog[_] =>
         provider.isDefined && 
MixedSessionCatalogBase.SUPPORTED_PROVIDERS.contains(
           provider.get.toLowerCase)
       case _ => false
@@ -95,7 +95,7 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
                 
mixedSparkTable.table().asKeyedTable().primaryKeySpec().fieldNames()))
           case _ =>
         }
-        targetProperties += ("provider" -> "arctic")
+        targetProperties += ("provider" -> provider.get)
         CreateV2Table(
           targetCatalog,
           targetIdentifier,
diff --git 
a/amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
 
b/amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
index aa21253ab..6d35b1159 100644
--- 
a/amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
+++ 
b/amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.connector.catalog.TableCatalog
 import org.apache.spark.sql.execution.command.CreateTableLikeCommand
 
-import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog}
+import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog, SparkUnifiedCatalog, SparkUnifiedSessionCatalog}
 import org.apache.amoro.spark.mixed.MixedSessionCatalogBase
 import 
org.apache.amoro.spark.sql.MixedFormatExtensionUtils.buildCatalogAndIdentifier
 import 
org.apache.amoro.spark.sql.catalyst.plans.{AlterMixedFormatTableDropPartition, 
TruncateMixedFormatTable}
@@ -47,8 +47,8 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
 
   private def isCreateMixedFormatTable(catalog: TableCatalog, provider: 
Option[String]): Boolean = {
     catalog match {
-      case _: MixedFormatSparkCatalog => true
-      case _: MixedFormatSparkSessionCatalog[_] =>
+      case _: MixedFormatSparkCatalog | _: MixedFormatSparkSessionCatalog[_]
+          | _: SparkUnifiedCatalog | _: SparkUnifiedSessionCatalog[_] =>
         provider.isDefined && 
MixedSessionCatalogBase.SUPPORTED_PROVIDERS.contains(
           provider.get.toLowerCase)
       case _ => false
@@ -88,7 +88,7 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
         val (targetCatalog, targetIdentifier) = 
buildCatalogAndIdentifier(sparkSession, targetTable)
         val table = sourceCatalog.loadTable(sourceIdentifier)
         var targetProperties = properties
-        targetProperties += ("provider" -> "arctic")
+        targetProperties += ("provider" -> provider.get)
         table match {
           case keyedTable: MixedSparkTable =>
             keyedTable.table() match {
@@ -108,7 +108,9 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
           comment = None,
           serde = None,
           external = false)
-        val seq: Seq[String] = Seq(targetTable.database.get, 
targetTable.identifier)
+        val seq: Seq[String] = Seq(
+          targetTable.database.getOrElse(sparkSession.catalog.currentDatabase),
+          targetTable.identifier)
         val name = ResolvedDBObjectName(targetCatalog, seq)
         CreateTable(name, table.schema(), table.partitioning(), tableSpec, 
ifNotExists)
       case _ => plan
diff --git 
a/amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
 
b/amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
index fb1b55fa9..b497a1ee0 100644
--- 
a/amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
+++ 
b/amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/scala/org/apache/amoro/spark/sql/catalyst/analysis/RewriteMixedFormatCommand.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
 import org.apache.spark.sql.execution.command.CreateTableLikeCommand
 
-import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog}
+import org.apache.amoro.spark.{MixedFormatSparkCatalog, 
MixedFormatSparkSessionCatalog, SparkUnifiedCatalog, SparkUnifiedSessionCatalog}
 import org.apache.amoro.spark.mixed.MixedSessionCatalogBase
 import 
org.apache.amoro.spark.sql.MixedFormatExtensionUtils.buildCatalogAndIdentifier
 import 
org.apache.amoro.spark.sql.catalyst.plans.{AlterMixedFormatTableDropPartition, 
TruncateMixedFormatTable}
@@ -47,8 +47,8 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
 
   private def isCreateMixedFormatTable(catalog: TableCatalog, provider: 
Option[String]): Boolean = {
     catalog match {
-      case _: MixedFormatSparkCatalog => true
-      case _: MixedFormatSparkSessionCatalog[_] =>
+      case _: MixedFormatSparkCatalog | _: MixedFormatSparkSessionCatalog[_]
+          | _: SparkUnifiedCatalog | _: SparkUnifiedSessionCatalog[_] =>
         provider.isDefined && 
MixedSessionCatalogBase.SUPPORTED_PROVIDERS.contains(
           provider.get.toLowerCase)
       case _ => false
@@ -90,7 +90,7 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
         val (targetCatalog, targetIdentifier) = 
buildCatalogAndIdentifier(sparkSession, targetTable)
         val table = sourceCatalog.loadTable(sourceIdentifier)
         var targetProperties = properties
-        targetProperties += ("provider" -> "arctic")
+        targetProperties += ("provider" -> provider.get)
         table match {
           case keyedTable: MixedSparkTable =>
             keyedTable.table() match {
@@ -111,7 +111,9 @@ case class RewriteMixedFormatCommand(sparkSession: 
SparkSession) extends Rule[Lo
           serde = None,
           external = false)
         val identifier =
-          Identifier.of(Array.apply(targetTable.database.get), 
targetTable.identifier)
+          Identifier.of(
+            
Array.apply(targetTable.database.getOrElse(sparkSession.catalog.currentDatabase)),
+            targetTable.identifier)
         val name = ResolvedIdentifier(targetCatalog, identifier)
         CreateTable(name, table.schema(), table.partitioning(), tableSpec, 
ifNotExists)
       case _ => plan

Reply via email to