This is an automated email from the ASF dual-hosted git repository.

yuzelin pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/paimon-trino.git


The following commit(s) were added to refs/heads/main by this push:
     new cf37c4a  Add support reading hadoop configuration files again (#81)
cf37c4a is described below

commit cf37c4a4fc2ae32f1011de68256bdde49b2d77df
Author: tsreaper <[email protected]>
AuthorDate: Fri Aug 30 17:06:54 2024 +0800

    Add support reading hadoop configuration files again (#81)
---
 .../apache/paimon/trino/TrinoConnectorFactory.java | 57 ++++++++++++++++++++++
 1 file changed, 57 insertions(+)

diff --git 
a/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactory.java
 
b/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactory.java
index f6ad6a8..ad1d500 100644
--- 
a/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactory.java
+++ 
b/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactory.java
@@ -18,6 +18,8 @@
 
 package org.apache.paimon.trino;
 
+import org.apache.paimon.utils.StringUtils;
+
 import com.google.inject.Binder;
 import com.google.inject.Injector;
 import com.google.inject.Module;
@@ -38,14 +40,30 @@ import io.trino.spi.connector.Connector;
 import io.trino.spi.connector.ConnectorContext;
 import io.trino.spi.connector.ConnectorFactory;
 import io.trino.spi.type.TypeManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.xml.parsers.DocumentBuilderFactory;
 
+import java.io.File;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
+import java.util.HashMap;
 import java.util.Map;
 
 /** Trino {@link ConnectorFactory}. */
 public class TrinoConnectorFactory implements ConnectorFactory {
 
+    private static final Logger LOG = 
LoggerFactory.getLogger(TrinoConnectorFactory.class);
+
+    // see 
https://trino.io/docs/current/connector/hive.html#hive-general-configuration-properties
+    private static final String HADOOP_CONF_FILES_KEY = 
"hive.config.resources";
+    // see org.apache.paimon.utils.HadoopUtils
+    private static final String HADOOP_CONF_PREFIX = "hadoop.";
+
     @Override
     public String getName() {
         return "paimon";
@@ -62,6 +80,19 @@ public class TrinoConnectorFactory implements 
ConnectorFactory {
             Map<String, String> config,
             ConnectorContext context,
             Module module) {
+        config = new HashMap<>(config);
+        if (config.containsKey(HADOOP_CONF_FILES_KEY)) {
+            for (String hadoopXml : 
config.get(HADOOP_CONF_FILES_KEY).split(",")) {
+                try {
+                    readHadoopXml(hadoopXml, config);
+                } catch (Exception e) {
+                    LOG.warn(
+                            "Failed to read hadoop xml file " + hadoopXml + ", 
skipping this file.",
+                            e);
+                }
+            }
+        }
+
         ClassLoader classLoader = TrinoConnectorFactory.class.getClassLoader();
         try (ThreadContextClassLoader ignored = new 
ThreadContextClassLoader(classLoader)) {
             Bootstrap app =
@@ -112,6 +143,32 @@ public class TrinoConnectorFactory implements 
ConnectorFactory {
         }
     }
 
+    private static void readHadoopXml(String path, Map<String, String> config) 
throws Exception {
+        path = path.trim();
+        if (path.isEmpty()) {
+            return;
+        }
+
+        File xmlFile = new File(path);
+        NodeList propertyNodes =
+                DocumentBuilderFactory.newInstance()
+                        .newDocumentBuilder()
+                        .parse(xmlFile)
+                        .getElementsByTagName("property");
+        for (int i = 0; i < propertyNodes.getLength(); i++) {
+            Node propertyNode = propertyNodes.item(i);
+            if (propertyNode.getNodeType() == 1) {
+                Element propertyElement = (Element) propertyNode;
+                String key = 
propertyElement.getElementsByTagName("name").item(0).getTextContent();
+                String value =
+                        
propertyElement.getElementsByTagName("value").item(0).getTextContent();
+                if (!StringUtils.isNullOrWhitespaceOnly(value)) {
+                    config.putIfAbsent(HADOOP_CONF_PREFIX + key, value);
+                }
+            }
+        }
+    }
+
     /** Empty module for paimon connector factory. */
     public static class EmptyModule implements Module {
         @Override

Reply via email to