This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new ee7d3a79a0a [HUDI-8218] Change the properties to be loaded from both
default path and environment variable (#10835)
ee7d3a79a0a is described below
commit ee7d3a79a0a0a519e9b25b3daabace0fa2bd2e0f
Author: Amarjeet Singh <[email protected]>
AuthorDate: Sat Sep 21 11:45:38 2024 +0530
[HUDI-8218] Change the properties to be loaded from both default path and
environment variable (#10835)
Co-authored-by: Zupee <[email protected]>
Co-authored-by: Y Ethan Guo <[email protected]>
---
.../main/java/org/apache/hudi/common/config/ConfigGroups.java | 9 ++++++---
.../apache/hudi/common/config/DFSPropertiesConfiguration.java | 11 +++++------
2 files changed, 11 insertions(+), 9 deletions(-)
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
b/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
index ac19a33f64c..f4b6bffd808 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
@@ -111,9 +111,12 @@ public class ConfigGroups {
+ "job configurations/tunings, so all the jobs on your cluster can
utilize it. "
+ "It also works with Spark SQL DML/DDL, and helps avoid having to
pass configs "
+ "inside the SQL statements.\n\n"
- + "By default, Hudi would load the configuration file under
`/etc/hudi/conf` "
- + "directory. You can specify a different configuration directory
location by "
- + "setting the `HUDI_CONF_DIR` environment variable.";
+ + "Hudi always loads the configuration file under default
directory "
+ + "`file:/etc/hudi/conf`, if exists, to set the default configs. "
+ + "Besides, you can specify another configuration "
+ + "directory location by setting the `HUDI_CONF_DIR` environment
variable. "
+ + "The configs stored in `HUDI_CONF_DIR/hudi-defaults.conf` are
loaded, "
+ + "overriding any configs already set by the config file in the
default directory.";
break;
case SPARK_DATASOURCE:
description = "These configs control the Hudi Spark Datasource, "
diff --git
a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
index 3941c28e570..5632825d32a 100644
---
a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
+++
b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
@@ -117,15 +117,14 @@ public class DFSPropertiesConfiguration extends
PropertiesConfig {
}
}
// Try loading the external config file from local file system
+ try {
+ conf.addPropsFromFile(DEFAULT_PATH);
+ } catch (Exception e) {
+ LOG.warn("Cannot load default config file: " + DEFAULT_PATH, e);
+ }
Option<StoragePath> defaultConfPath = getConfPathFromEnv();
if (defaultConfPath.isPresent()) {
conf.addPropsFromFile(defaultConfPath.get());
- } else {
- try {
- conf.addPropsFromFile(DEFAULT_PATH);
- } catch (Exception e) {
- LOG.warn("Cannot load default config file: " + DEFAULT_PATH, e);
- }
}
return conf.getProps();
}