This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 58191bdf5ff [HUDI-9524] Fix include in DFSPropertiesConfiguration
(#13433)
58191bdf5ff is described below
commit 58191bdf5ffbf563386b69a1ac159e1436d47e57
Author: Alex R <[email protected]>
AuthorDate: Sat Jun 14 03:16:03 2025 -0700
[HUDI-9524] Fix include in DFSPropertiesConfiguration (#13433)
* fix include= in DFSPropertiesConfiguration
* fix storage path
---
.../common/config/DFSPropertiesConfiguration.java | 2 +-
.../util/TestDFSPropertiesConfiguration.java | 22 ++++++++++++++++++++++
2 files changed, 23 insertions(+), 1 deletion(-)
diff --git
a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
index a8e537eea36..3f74727a4fa 100644
---
a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
+++
b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
@@ -90,7 +90,7 @@ public class DFSPropertiesConfiguration extends
PropertiesConfig {
}
public DFSPropertiesConfiguration() {
- this.hadoopConfig = null;
+ this.hadoopConfig = new Configuration();
this.mainFilePath = null;
this.hoodieConfig = new HoodieConfig();
this.visitedFilePaths = new HashSet<>();
diff --git
a/hudi-hadoop-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
b/hudi-hadoop-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
index e4f316170a9..1521580fe9e 100644
---
a/hudi-hadoop-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
+++
b/hudi-hadoop-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
@@ -208,4 +208,26 @@ public class TestDFSPropertiesConfiguration {
assertEquals("BLOOM",
DFSPropertiesConfiguration.getGlobalProps().get("hoodie.index.type"));
assertEquals("true",
DFSPropertiesConfiguration.getGlobalProps().get("hoodie.metadata.enable"));
}
+
+ @Test
+ public void testDefaultConstructorHandlesIncludes() {
+ // Use default ctor (hadoopConfig should be non-null internally)
+ DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration();
+
+ // Should load t3.props (which includes t2.props which includes t1.props)
without NPE
+ cfg.addPropsFromFile(new StoragePath(dfsBasePath + "/t3.props"));
+ TypedProperties props = cfg.getProps();
+
+ // Values from t1, t2 and t3 should be resolved in order
+ assertEquals(123, props.getInteger("int.prop"));
+ assertEquals(243.4, props.getDouble("double.prop"), 0.001);
+ assertTrue(props.getBoolean("boolean.prop"));
+ assertEquals("t3.value", props.getString("string.prop"));
+ assertEquals(1354354354L, props.getLong("long.prop"));
+
+ // And a self include still triggers the loop detection
+ assertThrows(IllegalStateException.class, () -> {
+ cfg.addPropsFromFile(new StoragePath(dfsBasePath + "/t4.props"));
+ });
+ }
}