This is an automated email from the ASF dual-hosted git repository.

codope pushed a commit to branch release-0.12.0
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit 9091202a5f04a6e766953cfe51500dbb7ad53930
Author: KnightChess <[email protected]>
AuthorDate: Fri Aug 5 09:28:08 2022 +0800

    [HUDI-4530] fix default payloadclass in mor is different with cow (#6288)
---
 .../src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala | 3 +--
 .../test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala | 4 ++--
 .../src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala | 3 ++-
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
index 8a9486ea45..732367cf5a 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.hudi
 
 import org.apache.hudi.DataSourceWriteOptions
 import org.apache.hudi.avro.HoodieAvroUtils.getRootLevelFieldName
-import org.apache.hudi.common.model.DefaultHoodieRecordPayload
 import org.apache.hudi.common.table.HoodieTableConfig
 import org.apache.hudi.common.util.ValidationUtils
 import org.apache.spark.sql.SparkSession
@@ -67,7 +66,7 @@ object HoodieOptionConfig {
     .withSqlKey("payloadClass")
     .withHoodieKey(DataSourceWriteOptions.PAYLOAD_CLASS_NAME.key)
     .withTableConfigKey(HoodieTableConfig.PAYLOAD_CLASS_NAME.key)
-    .defaultValue(classOf[DefaultHoodieRecordPayload].getName)
+    .defaultValue(DataSourceWriteOptions.PAYLOAD_CLASS_NAME.defaultValue())
     .build()
 
   /**
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala
index 14c2245d5b..44c23d146c 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala
@@ -34,12 +34,12 @@ class TestHoodieOptionConfig extends 
SparkClientFunctionalTestHarness {
     assertTrue(with1.size == 3)
     assertTrue(with1("primaryKey") == "id")
     assertTrue(with1("type") == "cow")
-    assertTrue(with1("payloadClass") == 
classOf[DefaultHoodieRecordPayload].getName)
+    assertTrue(with1("payloadClass") == 
classOf[OverwriteWithLatestAvroPayload].getName)
 
     val ops2 = Map("primaryKey" -> "id",
       "preCombineField" -> "timestamp",
       "type" -> "mor",
-      "payloadClass" -> classOf[OverwriteWithLatestAvroPayload].getName
+      "payloadClass" -> classOf[DefaultHoodieRecordPayload].getName
     )
     val with2 = HoodieOptionConfig.withDefaultSqlOptions(ops2)
     assertTrue(ops2 == with2)
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala
index 58c808d28a..5e826973d2 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala
@@ -372,7 +372,8 @@ class TestMergeIntoTable extends HoodieSparkSqlTestBase {
              | tblproperties (
              |  type = '$tableType',
              |  primaryKey = 'id',
-             |  preCombineField = 'v'
+             |  preCombineField = 'v',
+             |  hoodie.compaction.payload.class = 
'org.apache.hudi.common.model.DefaultHoodieRecordPayload'
              | )
              | partitioned by(dt)
              | location '${tmp.getCanonicalPath}/$tableName1'

Reply via email to