This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-paimon-trino.git
The following commit(s) were added to refs/heads/main by this push:
new 45e003a Support reading hadoop configuration files (#32)
45e003a is described below
commit 45e003a1180fd959f59d11037677e742139f15fe
Author: tsreaper <[email protected]>
AuthorDate: Thu Sep 14 15:16:40 2023 +0800
Support reading hadoop configuration files (#32)
---
.../paimon/trino/TrinoConnectorFactoryBase.java | 57 ++++++++++++++++++++++
1 file changed, 57 insertions(+)
diff --git
a/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactoryBase.java
b/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactoryBase.java
index cc55371..3808d28 100644
---
a/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactoryBase.java
+++
b/paimon-trino-common/src/main/java/org/apache/paimon/trino/TrinoConnectorFactoryBase.java
@@ -19,15 +19,33 @@
package org.apache.paimon.trino;
import org.apache.paimon.options.Options;
+import org.apache.paimon.utils.StringUtils;
import io.trino.spi.connector.Connector;
import io.trino.spi.connector.ConnectorContext;
import io.trino.spi.connector.ConnectorFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import java.io.File;
+import java.util.HashMap;
import java.util.Map;
/** Trino {@link ConnectorFactory}. */
public abstract class TrinoConnectorFactoryBase implements ConnectorFactory {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(TrinoConnectorFactoryBase.class);
+
+ // see
https://trino.io/docs/current/connector/hive.html#hive-general-configuration-properties
+ private static final String HADOOP_CONF_FILES_KEY =
"hive.config.resources";
+ // see org.apache.paimon.utils.HadoopUtils
+ private static final String HADOOP_CONF_PREFIX = "hadoop.";
+
@Override
public String getName() {
return "paimon";
@@ -36,9 +54,48 @@ public abstract class TrinoConnectorFactoryBase implements
ConnectorFactory {
@Override
public Connector create(
String catalogName, Map<String, String> config, ConnectorContext
context) {
+ config = new HashMap<>(config);
+ if (config.containsKey(HADOOP_CONF_FILES_KEY)) {
+ for (String hadoopXml :
config.get(HADOOP_CONF_FILES_KEY).split(",")) {
+ try {
+ readHadoopXml(hadoopXml, config);
+ } catch (Exception e) {
+ LOG.warn(
+ "Failed to read hadoop xml file " + hadoopXml + ",
skipping this file.",
+ e);
+ }
+ }
+ }
+
return new TrinoConnector(
new TrinoMetadata(Options.fromMap(config)),
new TrinoSplitManager(),
new TrinoPageSourceProvider());
}
+
+ private void readHadoopXml(String path, Map<String, String> config) throws
Exception {
+ path = path.trim();
+ if (path.isEmpty()) {
+ return;
+ }
+
+ File xmlFile = new File(path);
+ NodeList propertyNodes =
+ DocumentBuilderFactory.newInstance()
+ .newDocumentBuilder()
+ .parse(xmlFile)
+ .getElementsByTagName("property");
+ for (int i = 0; i < propertyNodes.getLength(); i++) {
+ Node propertyNode = propertyNodes.item(i);
+ if (propertyNode.getNodeType() == 1) {
+ Element propertyElement = (Element) propertyNode;
+ String key =
propertyElement.getElementsByTagName("name").item(0).getTextContent();
+ String value =
+
propertyElement.getElementsByTagName("value").item(0).getTextContent();
+ if (!StringUtils.isNullOrWhitespaceOnly(value)) {
+ config.putIfAbsent(HADOOP_CONF_PREFIX + key, value);
+ }
+ }
+ }
+ }
}