This is an automated email from the ASF dual-hosted git repository.

zhangyue19921010 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hudi-rs.git


The following commit(s) were added to refs/heads/main by this push:
     new f62c998a feat(core): add general config alias support with ordering 
fields as primary config (#598)
f62c998a is described below

commit f62c998acabc7a29479f3dcbfca7f59e9a065b14
Author: Shiyan Xu <[email protected]>
AuthorDate: Thu May 7 01:30:24 2026 -0500

    feat(core): add general config alias support with ordering fields as 
primary config (#598)
    
    * feat(core): add general config alias support with ordering fields fallback
    
    Add ConfigAlias mechanism to ConfigParser trait so any config can declare
    alternative keys. Use it to resolve hoodie.table.ordering.fields as a
    fallback for PrecombineField, fixing RecordMergeStrategy derivation for
    v9 Hudi tables.
    
    * refactor(core): make ordering fields the primary config with precombine 
as deprecated alias
    
    Swap alias direction so hoodie.table.ordering.fields is the canonical
    key and hoodie.table.precombine.field is a deprecated fallback. Add
    warn-once guard so deprecated alias warnings are not repeated. Handle
    comma-separated ordering fields by using the first value.
    
    * refactor(core): rename PrecombineField to OrderingField
    
    Align enum variant name with the new canonical config key
    hoodie.table.ordering.fields.
    
    * fix(core): rename OrderingField to OrderingFields for consistency
    
    * fix(core): reject multiple ordering fields with unsupported-config error
    
    Multiple comma-separated ordering fields require end-to-end support
    that is not yet implemented. Reject them explicitly instead of silently
    using only the first value.
    
    * feat(core): support multiple ordering fields for record merging
    
    OrderingFields config now returns a List of field names parsed from
    comma-separated values. Data-record lexsort uses all ordering fields
    for multi-column comparison. Event-time comparison (data vs deletes)
    uses the first ordering field, since Hudi delete records store a
    single orderingVal.
    
    * fix(core): reject multiple ordering fields until full support is added
    
    Keep the List parsing logic for OrderingFields config, but validate
    that only a single field is provided. Multiple ordering fields require
    end-to-end support across the merge, ordering, and delete paths.
    
    * test(core): add coverage tests for ConfigParser trait and ConfigAlias
    
    * refactor(core): use log-once crate for deprecated config alias warning
    
    * fix(core): remove needless borrow flagged by clippy
---
 Cargo.toml                                    |  1 +
 crates/core/Cargo.toml                        |  1 +
 crates/core/src/config/mod.rs                 | 98 +++++++++++++++++++++++++++
 crates/core/src/config/table.rs               | 90 +++++++++++++++++++-----
 crates/core/src/file_group/log_file/reader.rs |  2 +-
 crates/core/src/file_group/record_batches.rs  | 18 ++---
 crates/core/src/merge/ordering.rs             |  8 +--
 crates/core/src/merge/record_merger.rs        | 18 ++---
 crates/core/src/table/mod.rs                  | 14 ++--
 9 files changed, 205 insertions(+), 45 deletions(-)

diff --git a/Cargo.toml b/Cargo.toml
index d64f2f1e..4ea73ec1 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -74,6 +74,7 @@ chrono = { version = "0.4" }
 chrono-tz = { version = "0.10" }
 lazy_static = { version = "1" }
 log = { version = "0.4" }
+log-once = { version = "0.4" }
 num-traits = { version = "0.2" }
 once_cell = { version = "1" }
 paste = { version = "1" }
diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml
index 2649dfde..b15f36fa 100644
--- a/crates/core/Cargo.toml
+++ b/crates/core/Cargo.toml
@@ -61,6 +61,7 @@ chrono = { workspace = true }
 chrono-tz = { workspace = true }
 lazy_static = { workspace = true }
 log = { workspace = true }
+log-once = { workspace = true }
 num-traits = { workspace = true }
 once_cell = { workspace = true }
 paste = { workspace = true }
diff --git a/crates/core/src/config/mod.rs b/crates/core/src/config/mod.rs
index 97bf489d..82e24d6a 100644
--- a/crates/core/src/config/mod.rs
+++ b/crates/core/src/config/mod.rs
@@ -40,6 +40,28 @@ pub use read_options::{QueryType, ReadOptions};
 
 pub const HUDI_CONF_DIR: &str = "HUDI_CONF_DIR";
 
+/// An alternative key for a configuration, optionally marked as deprecated.
+pub struct ConfigAlias {
+    pub key: &'static str,
+    pub deprecated: bool,
+}
+
+impl ConfigAlias {
+    pub const fn new(key: &'static str) -> Self {
+        Self {
+            key,
+            deprecated: false,
+        }
+    }
+
+    pub const fn deprecated(key: &'static str) -> Self {
+        Self {
+            key,
+            deprecated: true,
+        }
+    }
+}
+
 /// This defines some common APIs for working with configurations in Hudi.
 pub trait ConfigParser: AsRef<str> {
     /// Configuration value type.
@@ -52,11 +74,36 @@ pub trait ConfigParser: AsRef<str> {
         self.as_ref().to_string()
     }
 
+    /// Returns alternative keys for this configuration.
+    fn aliases(&self) -> &[ConfigAlias] {
+        &[]
+    }
+
     /// To indicate if the configuration is required or not, this helps in 
validation.
     fn is_required(&self) -> bool {
         false
     }
 
+    /// Resolve the raw string value from configs, checking the primary key 
first, then aliases.
+    fn resolve_raw_value<'a>(&self, configs: &'a HashMap<String, String>) -> 
Result<&'a str> {
+        if let Some(v) = configs.get(self.as_ref()) {
+            return Ok(v.as_str());
+        }
+        for alias in self.aliases() {
+            if let Some(v) = configs.get(alias.key) {
+                if alias.deprecated {
+                    log_once::warn_once!(
+                        "Config '{}' is deprecated; use '{}' instead",
+                        alias.key,
+                        self.as_ref()
+                    );
+                }
+                return Ok(v.as_str());
+            }
+        }
+        Err(NotFound(self.key()))
+    }
+
     /// Validate the configuration by parsing the given [String] value and 
check if it is required.
     fn validate(&self, configs: &HashMap<String, String>) -> Result<()> {
         match self.parse_value(configs) {
@@ -237,6 +284,57 @@ impl HudiConfigs {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::config::table::HudiTableConfig;
+
+    #[test]
+    fn test_config_alias_constructors() {
+        let alias = ConfigAlias::new("key");
+        assert_eq!(alias.key, "key");
+        assert!(!alias.deprecated);
+
+        let alias = ConfigAlias::deprecated("old_key");
+        assert_eq!(alias.key, "old_key");
+        assert!(alias.deprecated);
+    }
+
+    #[test]
+    fn test_aliases_default_returns_empty() {
+        assert!(HudiTableConfig::TableName.aliases().is_empty());
+    }
+
+    #[test]
+    fn test_resolve_raw_value_primary_key() {
+        let mut configs = HashMap::new();
+        configs.insert("hoodie.table.name".to_string(), "trips".to_string());
+        let result = HudiTableConfig::TableName.resolve_raw_value(&configs);
+        assert_eq!(result.unwrap(), "trips");
+    }
+
+    #[test]
+    fn test_resolve_raw_value_deprecated_alias() {
+        let mut configs = HashMap::new();
+        configs.insert(
+            "hoodie.table.precombine.field".to_string(),
+            "ts".to_string(),
+        );
+        let result = 
HudiTableConfig::OrderingFields.resolve_raw_value(&configs);
+        assert_eq!(result.unwrap(), "ts");
+    }
+
+    #[test]
+    fn test_resolve_raw_value_not_found() {
+        let configs = HashMap::new();
+        let result = HudiTableConfig::TableName.resolve_raw_value(&configs);
+        assert!(matches!(result.unwrap_err(), ConfigError::NotFound(_)));
+    }
+
+    #[test]
+    fn test_try_get_returns_err_on_parse_failure() {
+        let hudi_configs =
+            HudiConfigs::new([(HudiTableConfig::PopulatesMetaFields.as_ref(), 
"not_a_bool")]);
+        let result = 
hudi_configs.try_get(HudiTableConfig::PopulatesMetaFields);
+        assert!(result.is_err());
+    }
 
     #[test]
     fn test_new_using_hashmap() {
diff --git a/crates/core/src/config/table.rs b/crates/core/src/config/table.rs
index bbe32ab7..f4d0559b 100644
--- a/crates/core/src/config/table.rs
+++ b/crates/core/src/config/table.rs
@@ -25,10 +25,8 @@ use strum_macros::{AsRefStr, EnumIter, IntoStaticStr};
 
 use crate::config::Result;
 use crate::config::error::ConfigError;
-use crate::config::error::ConfigError::{
-    InvalidValue, NotFound, ParseBool, ParseInt, UnsupportedValue,
-};
-use crate::config::{ConfigParser, HudiConfigValue};
+use crate::config::error::ConfigError::{InvalidValue, ParseBool, ParseInt, 
UnsupportedValue};
+use crate::config::{ConfigAlias, ConfigParser, HudiConfigValue};
 use crate::merge::RecordMergeStrategyValue;
 
 /// Configurations for Hudi tables, most of them are persisted in 
`hoodie.properties`.
@@ -86,9 +84,11 @@ pub enum HudiTableConfig {
     /// These fields also include the partition type which is used by custom 
key generators
     PartitionFields,
 
-    /// Field used in preCombining before actual write. By default, when two 
records have the same key value,
-    /// the largest value for the precombine field determined by 
Object.compareTo(..), is picked.
-    PrecombineField,
+    /// Fields used for ordering records during merge. When two records have 
the same key,
+    /// the record with the larger ordering field value is picked.
+    ///
+    /// Alias: `hoodie.table.precombine.field` (deprecated).
+    OrderingFields,
 
     /// When enabled, populates all meta fields. When disabled, no meta fields 
are populated
     /// and incremental queries will not be functional. This is only meant to 
be used for append only/immutable data for batch processing
@@ -161,7 +161,7 @@ impl AsRef<str> for HudiTableConfig {
             Self::KeyGeneratorClass => "hoodie.table.keygenerator.class",
             Self::KeyGeneratorType => "hoodie.table.keygenerator.type",
             Self::PartitionFields => "hoodie.table.partition.fields",
-            Self::PrecombineField => "hoodie.table.precombine.field",
+            Self::OrderingFields => "hoodie.table.ordering.fields",
             Self::PopulatesMetaFields => "hoodie.populate.meta.fields",
             Self::RecordKeyFields => "hoodie.table.recordkey.fields",
             Self::RecordMergeStrategy => "hoodie.table.record.merge.strategy",
@@ -211,15 +211,23 @@ impl ConfigParser for HudiTableConfig {
         }
     }
 
+    fn aliases(&self) -> &[ConfigAlias] {
+        match self {
+            Self::OrderingFields => {
+                const ALIASES: &[ConfigAlias] =
+                    
&[ConfigAlias::deprecated("hoodie.table.precombine.field")];
+                ALIASES
+            }
+            _ => &[],
+        }
+    }
+
     fn is_required(&self) -> bool {
         matches!(self, Self::TableName | Self::TableType | Self::TableVersion)
     }
 
     fn parse_value(&self, configs: &HashMap<String, String>) -> 
Result<Self::Output> {
-        let get_result = configs
-            .get(self.as_ref())
-            .map(|v| v.as_str())
-            .ok_or(NotFound(self.key()));
+        let get_result = self.resolve_raw_value(configs);
 
         match self {
             Self::BaseFileFormat => get_result
@@ -252,7 +260,15 @@ impl ConfigParser for HudiTableConfig {
             Self::KeyGeneratorType => get_result.map(|v| 
HudiConfigValue::String(v.to_string())),
             Self::PartitionFields => get_result
                 .map(|v| 
HudiConfigValue::List(v.split(',').map(str::to_string).collect())),
-            Self::PrecombineField => get_result.map(|v| 
HudiConfigValue::String(v.to_string())),
+            Self::OrderingFields => get_result.and_then(|v| {
+                let fields: Vec<String> = 
v.split(',').map(str::to_string).collect();
+                if fields.len() > 1 {
+                    return Err(UnsupportedValue(format!(
+                        "Multiple ordering fields '{v}' are not yet supported"
+                    )));
+                }
+                Ok(HudiConfigValue::List(fields))
+            }),
             Self::PopulatesMetaFields => get_result
                 .and_then(|v| {
                     bool::from_str(v).map_err(|e| ParseBool(self.key(), 
v.to_string(), e))
@@ -308,7 +324,10 @@ impl ConfigParser for HudiTableConfig {
                         );
                     }
 
-                    if 
!configs.contains_key(HudiTableConfig::PrecombineField.as_ref()) {
+                    if HudiTableConfig::OrderingFields
+                        .parse_value(configs)
+                        .is_err()
+                    {
                         // When precombine field is not available, we treat 
the table as append-only
                         return HudiConfigValue::String(
                             
RecordMergeStrategyValue::AppendOnly.as_ref().to_string(),
@@ -540,7 +559,7 @@ mod tests {
     fn test_derive_record_merger_strategy() {
         let hudi_configs = HudiConfigs::new(vec![
             (HudiTableConfig::PopulatesMetaFields, "false"),
-            (HudiTableConfig::PrecombineField, "ts"),
+            (HudiTableConfig::OrderingFields, "ts"),
         ]);
         let actual: String = hudi_configs
             .get_or_default(HudiTableConfig::RecordMergeStrategy)
@@ -563,7 +582,7 @@ mod tests {
 
         let hudi_configs = HudiConfigs::new(vec![
             (HudiTableConfig::PopulatesMetaFields, "true"),
-            (HudiTableConfig::PrecombineField, "ts"),
+            (HudiTableConfig::OrderingFields, "ts"),
         ]);
         let actual: String = hudi_configs
             .get_or_default(HudiTableConfig::RecordMergeStrategy)
@@ -573,4 +592,43 @@ mod tests {
             RecordMergeStrategyValue::OverwriteWithLatest.as_ref()
         );
     }
+
+    #[test]
+    fn test_precombine_field_deprecated_alias() {
+        let deprecated_key = HudiTableConfig::OrderingFields.aliases()[0].key;
+        assert_eq!(deprecated_key, "hoodie.table.precombine.field");
+
+        // Deprecated alias should still resolve
+        let hudi_configs = HudiConfigs::new(vec![
+            (HudiTableConfig::PopulatesMetaFields.as_ref(), "true"),
+            (deprecated_key, "ts"),
+        ]);
+        let actual: Vec<String> = hudi_configs
+            .get(HudiTableConfig::OrderingFields)
+            .unwrap()
+            .into();
+        assert_eq!(actual, vec!["ts"]);
+        let actual: String = hudi_configs
+            .get_or_default(HudiTableConfig::RecordMergeStrategy)
+            .into();
+        assert_eq!(
+            actual,
+            RecordMergeStrategyValue::OverwriteWithLatest.as_ref(),
+            "Should derive overwrite-with-latest from deprecated precombine 
field"
+        );
+    }
+
+    #[test]
+    fn test_ordering_fields_rejects_multiple() {
+        let hudi_configs = HudiConfigs::new(vec![
+            (HudiTableConfig::PopulatesMetaFields.as_ref(), "true"),
+            (HudiTableConfig::OrderingFields.as_ref(), "ts,seq"),
+        ]);
+        assert!(matches!(
+            hudi_configs
+                .get(HudiTableConfig::OrderingFields)
+                .unwrap_err(),
+            ConfigError::UnsupportedValue(_)
+        ));
+    }
 }
diff --git a/crates/core/src/file_group/log_file/reader.rs 
b/crates/core/src/file_group/log_file/reader.rs
index 8fdb61f1..1715a947 100644
--- a/crates/core/src/file_group/log_file/reader.rs
+++ b/crates/core/src/file_group/log_file/reader.rs
@@ -316,7 +316,7 @@ mod tests {
         file_name: &str,
     ) -> Result<LogFileReader<StorageReader>> {
         let dir_url = parse_uri(dir)?;
-        let hudi_configs = 
Arc::new(HudiConfigs::new([(HudiTableConfig::PrecombineField, "ts")]));
+        let hudi_configs = 
Arc::new(HudiConfigs::new([(HudiTableConfig::OrderingFields, "ts")]));
         let storage = Storage::new_with_base_url(dir_url)?;
         LogFileReader::new(hudi_configs, storage, file_name).await
     }
diff --git a/crates/core/src/file_group/record_batches.rs 
b/crates/core/src/file_group/record_batches.rs
index 9713cc6a..c407f2dd 100644
--- a/crates/core/src/file_group/record_batches.rs
+++ b/crates/core/src/file_group/record_batches.rs
@@ -115,7 +115,9 @@ impl RecordBatches {
         &self,
         hudi_configs: Arc<HudiConfigs>,
     ) -> Result<RecordBatch> {
-        let ordering_field: String = 
hudi_configs.get(HudiTableConfig::PrecombineField)?.into();
+        let ordering_fields: Vec<String> =
+            hudi_configs.get(HudiTableConfig::OrderingFields)?.into();
+        let ordering_field = &ordering_fields[0];
 
         if self.num_delete_rows == 0 {
             return 
Ok(RecordBatch::new_empty(SchemaRef::from(Schema::empty())));
@@ -123,7 +125,7 @@ impl RecordBatches {
 
         let mut delete_batches = Vec::with_capacity(self.delete_batches.len());
         for (batch, instant_time) in &self.delete_batches {
-            let batch = transform_delete_record_batch(batch, instant_time, 
&ordering_field)?;
+            let batch = transform_delete_record_batch(batch, instant_time, 
ordering_field)?;
             delete_batches.push(batch);
         }
 
@@ -506,7 +508,7 @@ mod tests {
     fn test_concat_delete_batches_transformed_empty() {
         let record_batches = RecordBatches::new();
         let hudi_configs = Arc::new(HudiConfigs::new([(
-            HudiTableConfig::PrecombineField.as_ref(),
+            HudiTableConfig::OrderingFields.as_ref(),
             "any_ordering_field",
         )]));
 
@@ -533,7 +535,7 @@ mod tests {
         record_batches.push_delete_batch(create_test_delete_batch(3), 
"20240101000000".to_string());
 
         let hudi_configs = Arc::new(HudiConfigs::new([(
-            HudiTableConfig::PrecombineField.as_ref(),
+            HudiTableConfig::OrderingFields.as_ref(),
             ordering_field,
         )]));
 
@@ -569,7 +571,7 @@ mod tests {
         record_batches.push_delete_batch(create_test_delete_batch(1), 
"20240103000000".to_string());
 
         let hudi_configs = Arc::new(HudiConfigs::new([(
-            HudiTableConfig::PrecombineField.as_ref(),
+            HudiTableConfig::OrderingFields.as_ref(),
             ordering_field,
         )]));
 
@@ -613,7 +615,7 @@ mod tests {
         record_batches.push_delete_batch(create_test_delete_batch(2), 
"20240101000000".to_string());
 
         let hudi_configs = Arc::new(HudiConfigs::new([(
-            HudiTableConfig::PrecombineField.as_ref(),
+            HudiTableConfig::OrderingFields.as_ref(),
             ordering_field,
         )]));
 
@@ -635,14 +637,14 @@ mod tests {
         record_batches.push_data_batch(create_test_data_batch(1));
         record_batches.push_delete_batch(create_test_delete_batch(1), 
"20240101000000".to_string());
 
-        // Create config without PrecombineField
+        // Create config without OrderingFields
         let hudi_configs = Arc::new(HudiConfigs::empty());
 
         // This should return an error
         let result = 
record_batches.concat_delete_batches_transformed(hudi_configs);
         match result {
             Err(CoreError::Config(ConfigError::NotFound(s))) => {
-                assert_eq!(s, HudiTableConfig::PrecombineField.as_ref());
+                assert_eq!(s, HudiTableConfig::OrderingFields.as_ref());
             }
             _ => panic!(
                 "Expected ConfigError::NotFound, got {:?}",
diff --git a/crates/core/src/merge/ordering.rs 
b/crates/core/src/merge/ordering.rs
index 9add39a0..b9d7f44e 100644
--- a/crates/core/src/merge/ordering.rs
+++ b/crates/core/src/merge/ordering.rs
@@ -62,11 +62,11 @@ pub fn process_batch_for_max_orderings(
         return Ok(());
     }
 
-    let ordering_field: String = 
hudi_configs.get(HudiTableConfig::PrecombineField)?.into();
+    let ordering_fields: Vec<String> = 
hudi_configs.get(HudiTableConfig::OrderingFields)?.into();
 
     let keys = extract_record_keys(key_converter, batch)?;
     let event_times =
-        extract_event_time_ordering_values(event_time_converter, batch, 
&ordering_field)?;
+        extract_event_time_ordering_values(event_time_converter, batch, 
&ordering_fields[0])?;
     let commit_times = 
extract_commit_time_ordering_values(commit_time_converter, batch)?;
     for i in 0..batch.num_rows() {
         let key = keys.row(i).owned();
@@ -340,7 +340,7 @@ mod tests {
     // Helper function to create test HudiConfigs
     fn create_test_hudi_configs() -> Arc<HudiConfigs> {
         Arc::new(HudiConfigs::new([(
-            HudiTableConfig::PrecombineField.as_ref(),
+            HudiTableConfig::OrderingFields.as_ref(),
             "an_ordering_field",
         )]))
     }
@@ -570,7 +570,7 @@ mod tests {
         let (key_converter, event_time_converter, commit_time_converter) =
             create_test_converters(schema);
 
-        // Create configs without PrecombineField
+        // Create configs without OrderingFields
         let hudi_configs = Arc::new(HudiConfigs::empty());
 
         let mut max_ordering = HashMap::new();
diff --git a/crates/core/src/merge/record_merger.rs 
b/crates/core/src/merge/record_merger.rs
index 5188a9c4..5e325069 100644
--- a/crates/core/src/merge/record_merger.rs
+++ b/crates/core/src/merge/record_merger.rs
@@ -21,7 +21,7 @@ use crate::config::HudiConfigs;
 use crate::config::error::ConfigError;
 use crate::config::error::Result as ConfigResult;
 use crate::config::table::HudiTableConfig::{
-    PopulatesMetaFields, PrecombineField, RecordMergeStrategy,
+    OrderingFields, PopulatesMetaFields, RecordMergeStrategy,
 };
 use crate::file_group::record_batches::RecordBatches;
 use crate::merge::RecordMergeStrategyValue;
@@ -64,7 +64,7 @@ impl RecordMerger {
             )));
         }
 
-        let precombine_field = hudi_configs.try_get(PrecombineField)?;
+        let precombine_field = hudi_configs.try_get(OrderingFields)?;
         if precombine_field.is_none()
             && merge_strategy == RecordMergeStrategyValue::OverwriteWithLatest
         {
@@ -72,7 +72,7 @@ impl RecordMerger {
                 "When {:?} is {:?}, {:?} must be set.",
                 RecordMergeStrategy,
                 RecordMergeStrategyValue::OverwriteWithLatest,
-                PrecombineField
+                OrderingFields
             )));
         }
 
@@ -101,18 +101,18 @@ impl RecordMerger {
                 }
 
                 // Use sorting fields to get sorted indices of the data batch 
(inserts and updates)
+                let ordering_fields: Vec<String> = 
self.hudi_configs.get(OrderingFields)?.into();
+                let ordering_field = &ordering_fields[0];
                 let key_array = 
data_batch.get_array(MetaField::RecordKey.as_ref())?;
-                let ordering_field: String = 
self.hudi_configs.get(PrecombineField)?.into();
-                let ordering_array = data_batch.get_array(&ordering_field)?;
+                let ordering_array = data_batch.get_array(ordering_field)?;
                 let commit_seqno_array = 
data_batch.get_array(MetaField::CommitSeqno.as_ref())?;
                 let desc_indices =
                     lexsort_to_indices(&[key_array, ordering_array, 
commit_seqno_array], true);
 
                 // Create shared converters for record keys and ordering values
                 let key_converter = 
create_record_key_converter(data_batch.schema())?;
-                let ordering_field: String = 
self.hudi_configs.get(PrecombineField)?.into();
                 let event_time_converter =
-                    create_event_time_ordering_converter(data_batch.schema(), 
&ordering_field)?;
+                    create_event_time_ordering_converter(data_batch.schema(), 
ordering_field)?;
                 let commit_time_converter =
                     
create_commit_time_ordering_converter(data_batch.schema())?;
 
@@ -143,7 +143,7 @@ impl RecordMerger {
                 let event_times = extract_event_time_ordering_values(
                     &event_time_converter,
                     &data_batch,
-                    &ordering_field,
+                    ordering_field,
                 )?;
                 let commit_times =
                     
extract_commit_time_ordering_values(&commit_time_converter, &data_batch)?;
@@ -204,7 +204,7 @@ mod tests {
             HudiConfigs::new([
                 (RecordMergeStrategy, strategy.to_string()),
                 (PopulatesMetaFields, populates_meta_fields.to_string()),
-                (PrecombineField, precombine.to_string()),
+                (OrderingFields, precombine.to_string()),
             ])
         } else {
             HudiConfigs::new([
diff --git a/crates/core/src/table/mod.rs b/crates/core/src/table/mod.rs
index 7ca640fb..5e1d3cb9 100644
--- a/crates/core/src/table/mod.rs
+++ b/crates/core/src/table/mod.rs
@@ -950,8 +950,8 @@ mod tests {
     use crate::config::table::BaseFileFormatValue;
     use crate::config::table::HudiTableConfig::{
         BaseFileFormat, Checksum, DatabaseName, DropsPartitionFields, 
IsHiveStylePartitioning,
-        IsPartitionPathUrlencoded, KeyGeneratorClass, PartitionFields, 
PopulatesMetaFields,
-        PrecombineField, RecordKeyFields, TableName, TableType, TableVersion,
+        IsPartitionPathUrlencoded, KeyGeneratorClass, OrderingFields, 
PartitionFields,
+        PopulatesMetaFields, RecordKeyFields, TableName, TableType, 
TableVersion,
         TimelineLayoutVersion, TimelineTimezone,
     };
     use crate::config::util::empty_options;
@@ -1227,7 +1227,7 @@ mod tests {
             "non-required config is missing"
         );
         assert!(
-            configs.validate(PrecombineField).is_ok(),
+            configs.validate(OrderingFields).is_ok(),
             "non-required config is missing"
         );
         assert!(
@@ -1266,7 +1266,7 @@ mod tests {
         assert!(configs.get(IsPartitionPathUrlencoded).is_err());
         assert!(configs.get(KeyGeneratorClass).is_err());
         assert!(configs.get(PartitionFields).is_err());
-        assert!(configs.get(PrecombineField).is_err());
+        assert!(configs.get(OrderingFields).is_err());
         assert!(configs.get(PopulatesMetaFields).is_err());
         assert!(configs.get(RecordKeyFields).is_err());
         assert!(configs.get(TableName).is_err());
@@ -1294,7 +1294,7 @@ mod tests {
         assert!(panic::catch_unwind(|| 
configs.get_or_default(KeyGeneratorClass)).is_err());
         let actual: Vec<String> = 
configs.get_or_default(PartitionFields).into();
         assert!(actual.is_empty());
-        assert!(panic::catch_unwind(|| 
configs.get_or_default(PrecombineField)).is_err());
+        assert!(panic::catch_unwind(|| 
configs.get_or_default(OrderingFields)).is_err());
         let actual: bool = configs.get_or_default(PopulatesMetaFields).into();
         assert!(actual);
         assert!(panic::catch_unwind(|| 
configs.get_or_default(RecordKeyFields)).is_err());
@@ -1328,8 +1328,8 @@ mod tests {
         assert_eq!(actual, "org.apache.hudi.keygen.SimpleKeyGenerator");
         let actual: Vec<String> = configs.get(PartitionFields).unwrap().into();
         assert_eq!(actual, vec!["city"]);
-        let actual: String = configs.get(PrecombineField).unwrap().into();
-        assert_eq!(actual, "ts");
+        let actual: Vec<String> = configs.get(OrderingFields).unwrap().into();
+        assert_eq!(actual, vec!["ts"]);
         let actual: bool = configs.get(PopulatesMetaFields).unwrap().into();
         assert!(actual);
         let actual: Vec<String> = configs.get(RecordKeyFields).unwrap().into();

Reply via email to