This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 2e2e39377e2 [HUDI-7056] Add config for merging using positions (#10022)
2e2e39377e2 is described below

commit 2e2e39377e2577bf357626d3347963bd0efd01ba
Author: Jon Vexler <jbvex...@gmail.com>
AuthorDate: Wed Nov 8 22:31:09 2023 -0500

    [HUDI-7056] Add config for merging using positions (#10022)
    
    Co-authored-by: Jonathan Vexler <=>
---
 .../java/org/apache/hudi/common/config/HoodieReaderConfig.java     | 7 +++++++
 .../main/scala/org/apache/hudi/HoodieHadoopFsRelationFactory.scala | 2 +-
 2 files changed, 8 insertions(+), 1 deletion(-)

diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieReaderConfig.java
 
b/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieReaderConfig.java
index 1738f75e9ec..c572cc21adc 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieReaderConfig.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieReaderConfig.java
@@ -58,4 +58,11 @@ public class HoodieReaderConfig extends HoodieConfig {
       .markAdvanced()
       .sinceVersion("1.0.0")
       .withDocumentation("Use engine agnostic file group reader if enabled");
+
+  public static final ConfigProperty<Boolean> MERGE_USE_RECORD_POSITIONS = 
ConfigProperty
+      .key("hoodie.merge.use.record.positions")
+      .defaultValue(false)
+      .markAdvanced()
+      .sinceVersion("1.0.0")
+      .withDocumentation("Whether to use positions in the block header for 
data blocks containing updates and delete blocks for merging.");
 }
diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieHadoopFsRelationFactory.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieHadoopFsRelationFactory.scala
index 50249d87d97..a49fee2b740 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieHadoopFsRelationFactory.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieHadoopFsRelationFactory.scala
@@ -171,7 +171,7 @@ abstract class HoodieBaseHadoopFsRelationFactory(val 
sqlContext: SQLContext,
 
   protected lazy val fileGroupReaderEnabled: Boolean = 
checkIfAConfigurationEnabled(HoodieReaderConfig.FILE_GROUP_READER_ENABLED)
 
-  protected lazy val shouldUseRecordPosition: Boolean = 
checkIfAConfigurationEnabled(HoodieWriteConfig.WRITE_RECORD_POSITIONS)
+  protected lazy val shouldUseRecordPosition: Boolean = 
checkIfAConfigurationEnabled(HoodieReaderConfig.MERGE_USE_RECORD_POSITIONS)
 
   protected def queryTimestamp: Option[String] =
     
specifiedQueryTimestamp.orElse(toScalaOption(timeline.lastInstant()).map(_.getTimestamp))

Reply via email to