This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 4b2a633cddb [HUDI-8504] Fix missing database config when building Hudi 
configs in Spark (#12238)
4b2a633cddb is described below

commit 4b2a633cddb57234385ab3ef037b12df0107f9dc
Author: fhan <aaron.han.1...@gmail.com>
AuthorDate: Sat Nov 16 07:48:48 2024 +0800

    [HUDI-8504] Fix missing database config when building Hudi configs in Spark 
(#12238)
    
    Co-authored-by: fhan <yfhan...@jd.com>
---
 .../scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala   | 5 +++++
 .../org/apache/spark/sql/hudi/TestProvidesHoodieConfig.scala     | 9 +++++++--
 2 files changed, 12 insertions(+), 2 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
index 545e220e89f..7656e611463 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
@@ -26,6 +26,7 @@ import 
org.apache.hudi.common.model.{DefaultHoodieRecordPayload, HoodieRecordMer
 import org.apache.hudi.common.table.HoodieTableConfig
 import org.apache.hudi.common.util.{ReflectionUtils, StringUtils}
 import org.apache.hudi.config.{HoodieIndexConfig, HoodieInternalConfig, 
HoodieWriteConfig}
+import org.apache.hudi.common.table.HoodieTableConfig.DATABASE_NAME
 import org.apache.hudi.config.HoodieWriteConfig.TBL_NAME
 import org.apache.hudi.hive.{HiveSyncConfig, HiveSyncConfigHolder, 
MultiPartKeysValueExtractor}
 import org.apache.hudi.hive.ddl.HiveSyncMode
@@ -82,6 +83,7 @@ trait ProvidesHoodieConfig extends Logging {
     val overridingOpts = Map[String, String](
       "path" -> hoodieCatalogTable.tableLocation,
       RECORDKEY_FIELD.key -> hoodieCatalogTable.primaryKeys.mkString(","),
+      DATABASE_NAME.key -> hoodieCatalogTable.table.database,
       TBL_NAME.key -> hoodieCatalogTable.tableName,
       PRECOMBINE_FIELD.key -> preCombineField,
       HIVE_STYLE_PARTITIONING.key -> 
tableConfig.getHiveStylePartitioningEnable,
@@ -328,6 +330,7 @@ trait ProvidesHoodieConfig extends Logging {
     val overridingOpts = extraOptions ++ Map(
       "path" -> path,
       TABLE_TYPE.key -> tableType,
+      DATABASE_NAME.key -> hoodieCatalogTable.table.database,
       TBL_NAME.key -> hoodieCatalogTable.tableName,
       OPERATION.key -> operation,
       HIVE_STYLE_PARTITIONING.key -> hiveStylePartitioningEnable,
@@ -422,6 +425,7 @@ trait ProvidesHoodieConfig extends Logging {
     val overridingOpts = Map(
       "path" -> hoodieCatalogTable.tableLocation,
       TBL_NAME.key -> hoodieCatalogTable.tableName,
+      DATABASE_NAME.key -> hoodieCatalogTable.table.database,
       TABLE_TYPE.key -> hoodieCatalogTable.tableTypeName,
       OPERATION.key -> 
DataSourceWriteOptions.DELETE_PARTITION_OPERATION_OPT_VAL,
       PARTITIONS_TO_DELETE.key -> partitionsToDrop,
@@ -471,6 +475,7 @@ trait ProvidesHoodieConfig extends Logging {
       "path" -> path,
       RECORDKEY_FIELD.key -> hoodieCatalogTable.primaryKeys.mkString(","),
       TBL_NAME.key -> tableConfig.getTableName,
+      DATABASE_NAME.key -> hoodieCatalogTable.table.database,
       HIVE_STYLE_PARTITIONING.key -> 
tableConfig.getHiveStylePartitioningEnable,
       URL_ENCODE_PARTITIONING.key -> tableConfig.getUrlEncodePartitioning,
       OPERATION.key -> DataSourceWriteOptions.DELETE_OPERATION_OPT_VAL,
diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/test/scala/org/apache/spark/sql/hudi/TestProvidesHoodieConfig.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/test/scala/org/apache/spark/sql/hudi/TestProvidesHoodieConfig.scala
index 23a96f52180..95f126e2cd9 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/test/scala/org/apache/spark/sql/hudi/TestProvidesHoodieConfig.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/test/scala/org/apache/spark/sql/hudi/TestProvidesHoodieConfig.scala
@@ -24,8 +24,9 @@ import org.apache.hudi.common.config.TypedProperties
 import org.apache.hudi.common.table.HoodieTableConfig
 import org.apache.hudi.hive.HiveSyncConfig
 import org.apache.hudi.keygen.{ComplexKeyGenerator, CustomKeyGenerator}
+import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.{RuntimeConfig, SQLContext, SparkSession}
-import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable
+import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, 
CatalogTable, CatalogTableType, HoodieCatalogTable}
 import org.apache.spark.sql.internal.{SQLConf, SessionState, StaticSQLConf}
 import org.apache.spark.sql.types.StructType
 import org.junit.jupiter.api.Assertions.assertEquals
@@ -95,7 +96,11 @@ class TestProvidesHoodieConfig {
     when(mockCatalog.preCombineKey).thenCallRealMethod()
     when(mockCatalog.partitionSchema).thenReturn(StructType(Nil))
     when(mockCatalog.primaryKeys).thenReturn(Array("key"))
-    when(mockCatalog.tableName).thenReturn("hudi_table")
+    when(mockCatalog.table).thenReturn(CatalogTable.apply(
+      TableIdentifier.apply("hudi_table", Option.apply("hudi_database")),
+      CatalogTableType.EXTERNAL,
+      CatalogStorageFormat.empty,
+      StructType(Nil)))
     val props = new TypedProperties()
     props.setProperty(HoodieTableConfig.PRECOMBINE_FIELD.key, "segment")
     val mockTableConfig = spy(classOf[HoodieTableConfig])

Reply via email to