This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new e4b56b090fd [MINOR] Added condition to check default value to fix
extracting password from credential store (#11246)
e4b56b090fd is described below
commit e4b56b090fdcb76416c60bd7ddd4247f0955c152
Author: Aditya Goenka <[email protected]>
AuthorDate: Fri May 17 21:17:07 2024 +0530
[MINOR] Added condition to check default value to fix extracting password
from credential store (#11246)
---
.../src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
index e852445283c..3c28b1a2e0a 100644
---
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
+++
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
@@ -878,7 +878,7 @@ class HoodieSparkSqlWriterInternal {
properties.put(HoodieSyncConfig.META_SYNC_SPARK_VERSION.key,
SPARK_VERSION)
properties.put(HoodieSyncConfig.META_SYNC_USE_FILE_LISTING_FROM_METADATA.key,
hoodieConfig.getBoolean(HoodieMetadataConfig.ENABLE))
if ((fs.getConf.get(HiveConf.ConfVars.METASTOREPWD.varname) == null ||
fs.getConf.get(HiveConf.ConfVars.METASTOREPWD.varname).isEmpty) &&
- (properties.get(HiveSyncConfigHolder.HIVE_PASS.key()) == null ||
properties.get(HiveSyncConfigHolder.HIVE_PASS.key()).toString.isEmpty)){
+ (properties.get(HiveSyncConfigHolder.HIVE_PASS.key()) == null ||
properties.get(HiveSyncConfigHolder.HIVE_PASS.key()).toString.isEmpty ||
properties.get(HiveSyncConfigHolder.HIVE_PASS.key()).toString.equalsIgnoreCase(HiveSyncConfigHolder.HIVE_PASS.defaultValue()))){
try {
val passwd =
ShimLoader.getHadoopShims.getPassword(spark.sparkContext.hadoopConfiguration,
HiveConf.ConfVars.METASTOREPWD.varname)
if (passwd != null && !passwd.isEmpty) {