This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new b02477abed62 feat: support extract hadoop conf from Flink runtime 
(#13259)
b02477abed62 is described below

commit b02477abed624897b08ee2caccfe5821ea441635
Author: Peter Huang <[email protected]>
AuthorDate: Sun Dec 21 22:41:37 2025 -0800

    feat: support extract hadoop conf from Flink runtime (#13259)
---
 hudi-client/hudi-flink-client/pom.xml              | 12 +++++
 .../java/org/apache/hudi/util/FlinkClientUtil.java | 23 ++++++++++
 .../org/apache/hudi/util/TestFlinkClientUtil.java  | 52 ++++++++++++++++++++++
 3 files changed, 87 insertions(+)

diff --git a/hudi-client/hudi-flink-client/pom.xml 
b/hudi-client/hudi-flink-client/pom.xml
index 08e55ca40e32..ba445d038eda 100644
--- a/hudi-client/hudi-flink-client/pom.xml
+++ b/hudi-client/hudi-flink-client/pom.xml
@@ -252,6 +252,18 @@
                 <groupId>org.apache.rat</groupId>
                 <artifactId>apache-rat-plugin</artifactId>
             </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <environmentVariables>
+                        <FLINK_CONF_DIR>test-path</FLINK_CONF_DIR>
+                    </environmentVariables>
+                    <forkCount>1</forkCount>
+                    <reuseForks>false</reuseForks>
+                </configuration>
+            </plugin>
         </plugins>
 
         <resources>
diff --git 
a/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/util/FlinkClientUtil.java
 
b/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/util/FlinkClientUtil.java
index a429a42b16b4..63844216f40e 100644
--- 
a/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/util/FlinkClientUtil.java
+++ 
b/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/util/FlinkClientUtil.java
@@ -23,18 +23,27 @@ import org.apache.hudi.common.config.TypedProperties;
 import org.apache.hudi.common.table.HoodieTableConfig;
 import org.apache.hudi.common.table.HoodieTableMetaClient;
 import org.apache.hudi.config.HoodieWriteConfig;
+import org.apache.hudi.common.util.StringUtils;
 import org.apache.hudi.hadoop.fs.HadoopFSUtils;
 
 import org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils;
 import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.ConfigurationUtils;
+import org.apache.flink.configuration.GlobalConfiguration;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
+import java.util.Map;
 
 /**
  * Utilities for Hoodie Flink client.
  */
 public class FlinkClientUtil {
+  private static final Logger LOG = 
LoggerFactory.getLogger(FlinkClientUtil.class);
+  public static String FLINK_HADOOP_CONFIG_PREFIX = "flink.hadoop";
+  public static String FLINK_CONF_DIR = "FLINK_CONF_DIR";
 
   /**
    * Creates the meta client.
@@ -60,6 +69,20 @@ public class FlinkClientUtil {
     if (hadoopConf == null) {
       hadoopConf = new org.apache.hadoop.conf.Configuration();
     }
+
+    String confDir = System.getenv(FLINK_CONF_DIR);
+    if (!StringUtils.isNullOrEmpty(confDir)) {
+      LOG.info("Flink conf dir: {}", confDir);
+      try {
+        Configuration configuration = 
GlobalConfiguration.loadConfiguration(confDir);
+        for (Map.Entry<String, String> kv : 
ConfigurationUtils.getPrefixedKeyValuePairs(FLINK_HADOOP_CONFIG_PREFIX, 
configuration).entrySet()) {
+          hadoopConf.set(kv.getKey(), kv.getValue());
+        }
+      } catch (Exception e) {
+        LOG.warn("Fail to load flink configuration from path {}", confDir, e);
+      }
+    }
+
     return hadoopConf;
   }
 
diff --git 
a/hudi-client/hudi-flink-client/src/test/java/org/apache/hudi/util/TestFlinkClientUtil.java
 
b/hudi-client/hudi-flink-client/src/test/java/org/apache/hudi/util/TestFlinkClientUtil.java
new file mode 100644
index 000000000000..6d807f589989
--- /dev/null
+++ 
b/hudi-client/hudi-flink-client/src/test/java/org/apache/hudi/util/TestFlinkClientUtil.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hudi.util;
+
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.GlobalConfiguration;
+import org.junit.jupiter.api.Test;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+
+import static org.apache.hudi.util.FlinkClientUtil.FLINK_HADOOP_CONFIG_PREFIX;
+import static org.mockito.ArgumentMatchers.anyString;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+/**
+ * Test cases for {@link FlinkClientUtil}
+ */
+public class TestFlinkClientUtil {
+  private static final String FLINK_CRYPTO_RETRIEVER_KEY = 
FLINK_HADOOP_CONFIG_PREFIX + 
"parquet.crypto.encryptor.decryptor.retriever.class";
+  private static final String CRYPTO_RETRIEVER_KEY = 
"parquet.crypto.encryptor.decryptor.retriever.class";
+  private static final String CRYPTO_RETRIEVER_CLASS = 
"org.apache.parquet.crypto.CryptoMetadataRetriever";
+
+  @Test
+  public void testGetHadoopConf() throws Exception {
+    try (MockedStatic<GlobalConfiguration> mockedStatic = 
Mockito.mockStatic(GlobalConfiguration.class)) {
+      Configuration flinkConf = new Configuration();
+      flinkConf.setString(FLINK_CRYPTO_RETRIEVER_KEY, CRYPTO_RETRIEVER_CLASS);
+      mockedStatic.when(() -> 
GlobalConfiguration.loadConfiguration(anyString())).thenReturn(flinkConf);
+      org.apache.hadoop.conf.Configuration hadoopConf =
+          FlinkClientUtil.getHadoopConf();
+
+      assertEquals(CRYPTO_RETRIEVER_CLASS, 
hadoopConf.get(CRYPTO_RETRIEVER_KEY));
+    }
+  }
+}

Reply via email to