This is an automated email from the ASF dual-hosted git repository.
wuchunfu pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/seatunnel-web.git
The following commit(s) were added to refs/heads/main by this push:
new d110b97e [Improvement][Seatunnel-web][Hive-JDBC] Add Hive-JDBC into
seatunnel-web (#218)
d110b97e is described below
commit d110b97eddf8d93bc60704b328e97179824ee5a7
Author: Mohammad Arshad <[email protected]>
AuthorDate: Thu Sep 26 10:57:01 2024 +0530
[Improvement][Seatunnel-web][Hive-JDBC] Add Hive-JDBC into seatunnel-web
(#218)
---
.../classloader/DatasourceLoadConfig.java | 5 ++
.../datasource-hive/pom.xml | 3 +-
.../datasource-jdbc-hive/pom.xml | 12 +++
.../plugin/hive/jdbc/HiveJdbcConstants.java | 5 +-
.../hive/jdbc/HiveJdbcDataSourceChannel.java | 98 ++++++++++++++--------
.../hive/jdbc/HiveJdbcDataSourceFactory.java | 6 +-
.../plugin/hive/jdbc/HiveJdbcOptionRule.java | 55 +++++++++++-
.../seatunnel-datasource-plugins/pom.xml | 4 +
.../seatunnel/app/bean/engine/EngineDataType.java | 3 +-
.../impl/HiveJdbcDataSourceConfigSwitcher.java | 88 +++++++++++++++++++
.../resources/connector-datasource-mapper.yaml | 11 +++
11 files changed, 242 insertions(+), 48 deletions(-)
diff --git
a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/classloader/DatasourceLoadConfig.java
b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/classloader/DatasourceLoadConfig.java
index 431261bf..af8399e9 100644
---
a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/classloader/DatasourceLoadConfig.java
+++
b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/classloader/DatasourceLoadConfig.java
@@ -73,6 +73,9 @@ public class DatasourceLoadConfig {
classLoaderFactoryName.put(
"JDBC-TIDB",
"org.apache.seatunnel.datasource.plugin.tidb.jdbc.TidbJdbcDataSourceFactory");
+ classLoaderFactoryName.put(
+ "JDBC-HIVE",
+
"org.apache.seatunnel.datasource.plugin.hive.jdbc.HiveJdbcDataSourceFactory");
classLoaderFactoryName.put(
"KAFKA",
"org.apache.seatunnel.datasource.plugin.kafka.KafkaDataSourceFactory");
classLoaderFactoryName.put(
@@ -124,6 +127,7 @@ public class DatasourceLoadConfig {
classLoaderJarName.put("JDBC-STARROCKS", "datasource-jdbc-starrocks-");
classLoaderJarName.put("MONGODB", "datasource-mongodb-");
classLoaderJarName.put("JDBC-DB2", "datasource-jdbc-db2-");
+ classLoaderJarName.put("JDBC-HIVE", "datasource-jdbc-hive-");
classLoaderJarName.put("FAKESOURCE", "datasource-fakesource-");
classLoaderJarName.put("CONSOLE", "datasource-console-");
}
@@ -138,6 +142,7 @@ public class DatasourceLoadConfig {
"JDBC-Postgres",
"JDBC-SQLServer",
"JDBC-TiDB",
+ "JDBC-Hive",
"Kafka",
"MySQL-CDC",
"S3",
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-hive/pom.xml
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-hive/pom.xml
index 2344fa71..e209dee3 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-hive/pom.xml
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-hive/pom.xml
@@ -25,7 +25,6 @@
<artifactId>datasource-hive</artifactId>
<properties>
- <hive.exec.version>3.1.3</hive.exec.version>
<guava.version>24.1-jre</guava.version>
</properties>
@@ -51,7 +50,7 @@
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
- <version>${hive.exec.version}</version>
+ <version>${hive.version}</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml
index f24fdb7c..2ad03297 100644
---
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml
+++
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml
@@ -31,6 +31,18 @@
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${hive.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
<build>
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java
index 8b133ac7..8b857c52 100644
---
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java
+++
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java
@@ -22,8 +22,7 @@ import com.google.common.collect.Sets;
import java.util.Set;
public class HiveJdbcConstants {
-
+ public static final String PLUGIN_NAME = "JDBC-Hive";
public static final Set<String> HIVE_SYSTEM_DATABASES =
- Sets.newHashSet(
- "information_schema", "mysql", "performance_schema",
"sys", "test", "hivedb");
+ Sets.newHashSet("information_schema", "performance_schema", "sys");
}
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
index 2fa5b196..1e62fa59 100644
---
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
+++
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
@@ -24,13 +24,13 @@ import
org.apache.seatunnel.datasource.plugin.api.model.TableField;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
@@ -41,6 +41,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Properties;
@Slf4j
public class HiveJdbcDataSourceChannel implements DataSourceChannel {
@@ -61,15 +62,15 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
Map<String, String> requestParams,
String database,
Map<String, String> option) {
- return getTables(pluginName, requestParams, database, option);
+ return getTableNames(requestParams, database);
}
@Override
public List<String> getDatabases(
@NonNull String pluginName, @NonNull Map<String, String>
requestParams) {
try {
- return getDataBaseNames(pluginName, requestParams);
- } catch (SQLException e) {
+ return getDataBaseNames(requestParams);
+ } catch (SQLException | IOException e) {
log.error("Query Hive databases error, request params is {}",
requestParams, e);
throw new DataSourcePluginException("Query Hive databases error,",
e);
}
@@ -104,7 +105,7 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
}
protected boolean checkJdbcConnectivity(Map<String, String> requestParams)
{
- try (Connection ignored = init(requestParams)) {
+ try (Connection ignored = getHiveConnection(requestParams)) {
return true;
} catch (Exception e) {
throw new DataSourcePluginException(
@@ -112,25 +113,61 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
}
}
- protected Connection init(Map<String, String> requestParams) throws
SQLException {
+ protected Connection getHiveConnection(Map<String, String> requestParams)
+ throws IOException, SQLException {
if (MapUtils.isEmpty(requestParams)) {
throw new DataSourcePluginException(
"Hive jdbc request params is null, please check your
config");
}
- String url = requestParams.get(HiveJdbcOptionRule.URL.key());
- return DriverManager.getConnection(url);
+ String driverClass =
+ requestParams.getOrDefault(
+ HiveJdbcOptionRule.DRIVER.key(),
"org.apache.hive.jdbc.HiveDriver");
+ try {
+ Class.forName(driverClass);
+ } catch (ClassNotFoundException e) {
+ throw new DataSourcePluginException(
+ "Hive jdbc driver " + driverClass + " not found", e);
+ }
+ Properties connProps = new Properties();
+ boolean isKerberosEnabled =
+
Boolean.parseBoolean(requestParams.get(HiveJdbcOptionRule.USE_KERBEROS.key()));
+ if (isKerberosEnabled) {
+ String krb5ConfPath =
requestParams.get(HiveJdbcOptionRule.KRB5_PATH.key());
+ if (StringUtils.isNotEmpty(krb5ConfPath)) {
+ System.setProperty("java.security.krb5.conf", krb5ConfPath);
+ }
+ Configuration conf = new Configuration();
+ conf.set("hadoop.security.authentication", "Kerberos");
+ UserGroupInformation.setConfiguration(conf);
+ String principal =
requestParams.get(HiveJdbcOptionRule.KERBEROS_PRINCIPAL.key());
+ connProps.setProperty("principal", principal);
+ String keytabPath =
requestParams.get(HiveJdbcOptionRule.KERBEROS_KEYTAB_PATH.key());
+ UserGroupInformation.loginUserFromKeytab(principal, keytabPath);
+ }
+
+ String user = requestParams.get(HiveJdbcOptionRule.USER.key());
+ String password = requestParams.get(HiveJdbcOptionRule.PASSWORD.key());
+ if (StringUtils.isNotEmpty(user)) {
+ connProps.setProperty("user", user);
+ }
+ if (StringUtils.isNotEmpty(password)) {
+ connProps.setProperty("password", password);
+ }
+
+ String jdbcUrl = requestParams.get(HiveJdbcOptionRule.URL.key());
+ return DriverManager.getConnection(jdbcUrl, connProps);
}
- protected List<String> getDataBaseNames(String pluginName, Map<String,
String> requestParams)
- throws SQLException {
+ protected List<String> getDataBaseNames(Map<String, String> requestParams)
+ throws SQLException, IOException {
List<String> dbNames = new ArrayList<>();
- try (Connection connection = init(requestParams);
- Statement statement = connection.createStatement(); ) {
- ResultSet re = statement.executeQuery("SHOW DATABASES;");
+ try (Connection connection = getHiveConnection(requestParams);
+ Statement statement = connection.createStatement()) {
+ ResultSet re = statement.executeQuery("SHOW DATABASES");
// filter system databases
while (re.next()) {
- String dbName = re.getString("database");
- if (StringUtils.isNotBlank(dbName) &&
isNotSystemDatabase(pluginName, dbName)) {
+ String dbName = re.getString("database_name");
+ if (StringUtils.isNotBlank(dbName) &&
isNotSystemDatabase(dbName)) {
dbNames.add(dbName);
}
}
@@ -140,9 +177,11 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
protected List<String> getTableNames(Map<String, String> requestParams,
String dbName) {
List<String> tableNames = new ArrayList<>();
- try (Connection connection = init(requestParams); ) {
+ try (Connection connection = getHiveConnection(requestParams)) {
ResultSet resultSet =
- connection.getMetaData().getTables(dbName, null, null, new
String[] {"TABLE"});
+ connection
+ .getMetaData()
+ .getTables(dbName, dbName, null, new String[]
{"TABLE"});
while (resultSet.next()) {
String tableName = resultSet.getString("TABLE_NAME");
if (StringUtils.isNotBlank(tableName)) {
@@ -150,7 +189,7 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
}
}
return tableNames;
- } catch (SQLException e) {
+ } catch (SQLException | IOException e) {
throw new DataSourcePluginException("get table names failed", e);
}
}
@@ -158,7 +197,7 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
protected List<TableField> getTableFields(
Map<String, String> requestParams, String dbName, String
tableName) {
List<TableField> tableFields = new ArrayList<>();
- try (Connection connection = init(requestParams); ) {
+ try (Connection connection = getHiveConnection(requestParams)) {
DatabaseMetaData metaData = connection.getMetaData();
String primaryKey = getPrimaryKey(metaData, dbName, tableName);
ResultSet resultSet = metaData.getColumns(dbName, null, tableName,
null);
@@ -177,7 +216,7 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
tableField.setNullable(isNullable);
tableFields.add(tableField);
}
- } catch (SQLException e) {
+ } catch (SQLException | IOException e) {
throw new DataSourcePluginException("get table fields failed", e);
}
return tableFields;
@@ -186,25 +225,14 @@ public class HiveJdbcDataSourceChannel implements
DataSourceChannel {
private String getPrimaryKey(DatabaseMetaData metaData, String dbName,
String tableName)
throws SQLException {
ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%",
tableName);
- while (primaryKeysInfo.next()) {
+ if (primaryKeysInfo.next()) {
return primaryKeysInfo.getString("COLUMN_NAME");
}
return null;
}
- @SuppressWarnings("checkstyle:MagicNumber")
- private static boolean checkHostConnectable(String host, int port) {
- try (Socket socket = new Socket()) {
- socket.connect(new InetSocketAddress(host, port), 1000);
- return true;
- } catch (IOException e) {
- return false;
- }
- }
-
- private boolean isNotSystemDatabase(String pluginName, String dbName) {
- // FIXME,filters system databases
- return true;
+ private boolean isNotSystemDatabase(String dbName) {
+ return
!HiveJdbcConstants.HIVE_SYSTEM_DATABASES.contains(dbName.toLowerCase());
}
private boolean convertToBoolean(Object value) {
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java
index b149cc3b..83eca41f 100644
---
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java
+++
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java
@@ -28,17 +28,17 @@ import java.util.Set;
public class HiveJdbcDataSourceFactory implements DataSourceFactory {
@Override
public String factoryIdentifier() {
- return "Hive-JDBC";
+ return HiveJdbcConstants.PLUGIN_NAME;
}
@Override
public Set<DataSourcePluginInfo> supportedDataSources() {
DataSourcePluginInfo dataSourcePluginInfo =
DataSourcePluginInfo.builder()
- .name("Hive-JDBC")
+ .name(HiveJdbcConstants.PLUGIN_NAME)
.type(DatasourcePluginTypeEnum.DATABASE.getCode())
.version("1.0.0")
- .icon("Hive-JDBC")
+ .icon(HiveJdbcConstants.PLUGIN_NAME)
.supportVirtualTables(false)
.build();
Set<DataSourcePluginInfo> dataSourceInfos = new HashSet<>();
diff --git
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java
index fb004b69..9d31cb58 100644
---
a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java
+++
b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java
@@ -25,18 +25,65 @@ public class HiveJdbcOptionRule {
public static final Option<String> URL =
Options.key("url")
+ .stringType()
+ .defaultValue("jdbc:hive2://localhost:10000/default")
+ .withDescription(
+ "The URL of the JDBC connection. Refer to a case:
jdbc:hive2://localhost:10000/default");
+
+ public static final Option<String> DRIVER =
+ Options.key("driver")
+ .stringType()
+ .defaultValue("org.apache.hive.jdbc.HiveDriver")
+ .withDescription(
+ "The jdbc class name used to connect to the remote
data source");
+
+ public static final Option<String> USER =
+
Options.key("user").stringType().noDefaultValue().withDescription("user");
+
+ public static final Option<String> PASSWORD =
+
Options.key("password").stringType().noDefaultValue().withDescription("password");
+
+ public static final Option<Boolean> USE_KERBEROS =
+ Options.key("use_kerberos")
+ .booleanType()
+ .defaultValue(false)
+ .withDescription("Whether to enable Kerberos, default is
false.");
+
+ public static final Option<String> KERBEROS_PRINCIPAL =
+ Options.key("kerberos_principal")
+ .stringType()
+ .noDefaultValue()
+ .withDescription(
+ "When use kerberos, we should set kerberos
principal such as 'test_user@xxx'. ");
+
+ public static final Option<String> KERBEROS_KEYTAB_PATH =
+ Options.key("kerberos_keytab_path")
.stringType()
.noDefaultValue()
.withDescription(
- "jdbc url, eg:"
- +
"jdbc:hive2://localhost:10000/default?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+ "When use kerberos, we should set kerberos
principal file path such as '/home/test/test_user.keytab'. ");
+
+ public static final Option<String> KRB5_PATH =
+ Options.key("krb5_path")
+ .stringType()
+ .defaultValue("/etc/krb5.conf")
+ .withDescription(
+ "When use kerberos, we should set krb5 path file
path such as '/seatunnel/krb5.conf' or use the default path '/etc/krb5.conf");
public static OptionRule optionRule() {
- return OptionRule.builder().required(URL).build();
+ return OptionRule.builder()
+ .required(URL)
+ .required(DRIVER)
+ .optional(USER)
+ .optional(PASSWORD)
+ .optional(USE_KERBEROS)
+ .optional(KERBEROS_PRINCIPAL)
+ .optional(KERBEROS_KEYTAB_PATH)
+ .optional(KRB5_PATH)
+ .build();
}
public static OptionRule metadataRule() {
- // todo
return OptionRule.builder().build();
}
}
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
index 7a456858..64dda5d2 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
@@ -49,5 +49,9 @@
<module>datasource-fakesource</module>
<module>datasource-console</module>
</modules>
+ <properties>
+ <hive.version>3.1.3</hive.version>
+ <hadoop.version>3.1.0</hadoop.version>
+ </properties>
</project>
diff --git
a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java
b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java
index 86fa0c6b..f7fae731 100644
---
a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java
+++
b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java
@@ -28,6 +28,7 @@ import lombok.Getter;
import java.util.Arrays;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -85,7 +86,7 @@ public class EngineDataType {
@Override
public SeaTunnelDataType<?> toSeaTunnelType(String field, String
connectorDataType) {
- return DATA_TYPE_MAP.get(connectorDataType).getRawType();
+ return
DATA_TYPE_MAP.get(connectorDataType.toLowerCase(Locale.ROOT)).getRawType();
}
@Override
diff --git
a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thirdparty/datasource/impl/HiveJdbcDataSourceConfigSwitcher.java
b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thirdparty/datasource/impl/HiveJdbcDataSourceConfigSwitcher.java
new file mode 100644
index 00000000..114ffc63
--- /dev/null
+++
b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thirdparty/datasource/impl/HiveJdbcDataSourceConfigSwitcher.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.thirdparty.datasource.impl;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
+import org.apache.seatunnel.api.configuration.Option;
+import org.apache.seatunnel.api.configuration.util.OptionRule;
+import org.apache.seatunnel.api.configuration.util.RequiredOption;
+import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
+import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
+import org.apache.seatunnel.app.domain.request.job.SelectTableFields;
+import
org.apache.seatunnel.app.domain.response.datasource.VirtualTableDetailRes;
+import org.apache.seatunnel.app.dynamicforms.FormStructure;
+import org.apache.seatunnel.app.thirdparty.datasource.DataSourceConfigSwitcher;
+import org.apache.seatunnel.common.constants.PluginType;
+
+import com.google.auto.service.AutoService;
+
+import java.util.Collections;
+import java.util.List;
+
+@AutoService(DataSourceConfigSwitcher.class)
+public class HiveJdbcDataSourceConfigSwitcher extends
BaseJdbcDataSourceConfigSwitcher {
+
+ @Override
+ public String getDataSourceName() {
+ return "JDBC-HIVE";
+ }
+
+ @Override
+ public FormStructure filterOptionRule(
+ String connectorName,
+ OptionRule dataSourceOptionRule,
+ OptionRule virtualTableOptionRule,
+ BusinessMode businessMode,
+ PluginType pluginType,
+ OptionRule connectorOptionRule,
+ List<RequiredOption> addRequiredOptions,
+ List<Option<?>> addOptionalOptions,
+ List<String> excludedKeys) {
+ return super.filterOptionRule(
+ connectorName,
+ dataSourceOptionRule,
+ virtualTableOptionRule,
+ businessMode,
+ pluginType,
+ connectorOptionRule,
+ addRequiredOptions,
+ addOptionalOptions,
+ Collections.emptyList());
+ }
+
+ @Override
+ public Config mergeDatasourceConfig(
+ Config dataSourceInstanceConfig,
+ VirtualTableDetailRes virtualTableDetail,
+ DataSourceOption dataSourceOption,
+ SelectTableFields selectTableFields,
+ BusinessMode businessMode,
+ PluginType pluginType,
+ Config connectorConfig) {
+
+ return super.mergeDatasourceConfig(
+ dataSourceInstanceConfig,
+ virtualTableDetail,
+ dataSourceOption,
+ selectTableFields,
+ businessMode,
+ pluginType,
+ connectorConfig);
+ }
+}
diff --git
a/seatunnel-server/seatunnel-app/src/main/resources/connector-datasource-mapper.yaml
b/seatunnel-server/seatunnel-app/src/main/resources/connector-datasource-mapper.yaml
index 9dabf7b6..96e277a6 100644
---
a/seatunnel-server/seatunnel-app/src/main/resources/connector-datasource-mapper.yaml
+++
b/seatunnel-server/seatunnel-app/src/main/resources/connector-datasource-mapper.yaml
@@ -26,6 +26,7 @@ connector-datasource-mapper:
- JDBC-KingBase
- JDBC-Oracle
- JDBC-Db2
+ - JDBC-Hive
MySQL-CDC:
dataSources:
@@ -152,6 +153,11 @@ connector-datasource-mapper:
- DATA_INTEGRATION
sceneMode:
- SINGLE_TABLE
+ JDBC-Hive:
+ businessMode:
+ - DATA_INTEGRATION
+ sceneMode:
+ - SINGLE_TABLE
FakeSource:
businessMode:
- DATA_INTEGRATION
@@ -250,6 +256,11 @@ connector-datasource-mapper:
- DATA_INTEGRATION
sceneMode:
- SINGLE_TABLE
+ JDBC-Hive:
+ businessMode:
+ - DATA_INTEGRATION
+ sceneMode:
+ - SINGLE_TABLE
Console:
businessMode: