This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.0 by this push:
new 1eca457ba91 [Enhancement](jdbc catalog) Change Jdbc connection pool to
hikari (#34045) (#34312)
1eca457ba91 is described below
commit 1eca457ba9132c3dd70eb20626f6241766477cfe
Author: zy-kkk <[email protected]>
AuthorDate: Tue Apr 30 12:24:20 2024 +0800
[Enhancement](jdbc catalog) Change Jdbc connection pool to hikari (#34045)
(#34312)
pick #34045
---
fe/be-java-extensions/jdbc-scanner/pom.xml | 4 +-
.../java/org/apache/doris/jdbc/JdbcDataSource.java | 12 ++--
.../java/org/apache/doris/jdbc/JdbcExecutor.java | 77 +++++++++++-----------
fe/be-java-extensions/preload-extensions/pom.xml | 4 +-
fe/fe-core/pom.xml | 5 +-
.../doris/datasource/jdbc/client/JdbcClient.java | 56 ++++++++--------
fe/pom.xml | 10 +--
7 files changed, 84 insertions(+), 84 deletions(-)
diff --git a/fe/be-java-extensions/jdbc-scanner/pom.xml
b/fe/be-java-extensions/jdbc-scanner/pom.xml
index 0f3eace7dda..a37b5e0f62f 100644
--- a/fe/be-java-extensions/jdbc-scanner/pom.xml
+++ b/fe/be-java-extensions/jdbc-scanner/pom.xml
@@ -46,8 +46,8 @@ under the License.
<scope>provided</scope>
</dependency>
<dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>druid</artifactId>
+ <groupId>com.zaxxer</groupId>
+ <artifactId>HikariCP</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
diff --git
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
index 3c8ac38cf7d..447566596a7 100644
---
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
+++
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
@@ -17,7 +17,7 @@
package org.apache.doris.jdbc;
-import com.alibaba.druid.pool.DruidDataSource;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import java.util.Map;
@@ -30,7 +30,7 @@ import java.util.concurrent.TimeUnit;
public class JdbcDataSource {
private static final Logger LOG = Logger.getLogger(JdbcDataSource.class);
private static final JdbcDataSource jdbcDataSource = new JdbcDataSource();
- private final Map<String, DruidDataSource> sourcesMap = new
ConcurrentHashMap<>();
+ private final Map<String, HikariDataSource> sourcesMap = new
ConcurrentHashMap<>();
private final Map<String, Long> lastAccessTimeMap = new
ConcurrentHashMap<>();
private final ScheduledExecutorService executor =
Executors.newSingleThreadScheduledExecutor();
private long cleanupInterval = 8 * 60 * 60 * 1000; // 8 hours
@@ -44,17 +44,17 @@ public class JdbcDataSource {
return jdbcDataSource;
}
- public DruidDataSource getSource(String cacheKey) {
+ public HikariDataSource getSource(String cacheKey) {
lastAccessTimeMap.put(cacheKey, System.currentTimeMillis());
return sourcesMap.get(cacheKey);
}
- public void putSource(String cacheKey, DruidDataSource ds) {
+ public void putSource(String cacheKey, HikariDataSource ds) {
sourcesMap.put(cacheKey, ds);
lastAccessTimeMap.put(cacheKey, System.currentTimeMillis());
}
- public Map<String, DruidDataSource> getSourcesMap() {
+ public Map<String, HikariDataSource> getSourcesMap() {
return sourcesMap;
}
@@ -72,7 +72,7 @@ public class JdbcDataSource {
long now = System.currentTimeMillis();
lastAccessTimeMap.forEach((key, lastAccessTime) -> {
if (now - lastAccessTime > cleanupInterval) {
- DruidDataSource ds = sourcesMap.remove(key);
+ HikariDataSource ds = sourcesMap.remove(key);
if (ds != null) {
ds.close();
}
diff --git
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
index 6f15600eddc..614418ad491 100644
---
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
+++
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
@@ -28,7 +28,6 @@ import org.apache.doris.thrift.TJdbcExecutorCtorParams;
import org.apache.doris.thrift.TJdbcOperation;
import org.apache.doris.thrift.TOdbcTableType;
-import com.alibaba.druid.pool.DruidDataSource;
import com.clickhouse.data.value.UnsignedByte;
import com.clickhouse.data.value.UnsignedInteger;
import com.clickhouse.data.value.UnsignedLong;
@@ -36,6 +35,7 @@ import com.clickhouse.data.value.UnsignedShort;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.MoreExecutors;
import com.vesoft.nebula.client.graph.data.ValueWrapper;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
@@ -84,8 +84,8 @@ public class JdbcExecutor {
private int batchSizeNum = 0;
private int curBlockRows = 0;
private static final byte[] emptyBytes = new byte[0];
- private DruidDataSource druidDataSource = null;
- private final byte[] druidDataSourceLock = new byte[0];
+ private HikariDataSource hikariDataSource = null;
+ private final byte[] hikariDataSourceLock = new byte[0];
private TOdbcTableType tableType;
private JdbcDataSourceConfig config;
@@ -143,10 +143,10 @@ public class JdbcExecutor {
closeResources(resultSet, stmt, conn);
}
} finally {
- if (config.getConnectionPoolMinSize() == 0 && druidDataSource !=
null) {
- druidDataSource.close();
+ if (config.getConnectionPoolMinSize() == 0 && hikariDataSource !=
null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
}
@@ -182,10 +182,10 @@ public class JdbcExecutor {
public void cleanDataSource() {
- if (druidDataSource != null) {
- druidDataSource.close();
+ if (hikariDataSource != null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
@@ -469,7 +469,8 @@ public class JdbcExecutor {
}
private void init(JdbcDataSourceConfig config, String sql) throws
UdfRuntimeException {
- String druidDataSourceKey = config.createCacheKey();
+ ClassLoader oldClassLoader =
Thread.currentThread().getContextClassLoader();
+ String hikariDataSourceKey = config.createCacheKey();
try {
if (isNebula()) {
batchSizeNum = config.getBatchSize();
@@ -480,36 +481,29 @@ public class JdbcExecutor {
} else {
ClassLoader parent = getClass().getClassLoader();
ClassLoader classLoader =
UdfUtils.getClassLoader(config.getJdbcDriverUrl(), parent);
- druidDataSource =
JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
- synchronized (druidDataSourceLock) {
- druidDataSource =
JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
+ hikariDataSource =
JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
+ synchronized (hikariDataSourceLock) {
+ hikariDataSource =
JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
long start = System.currentTimeMillis();
- DruidDataSource ds = new DruidDataSource();
- ds.setDriverClassLoader(classLoader);
+
Thread.currentThread().setContextClassLoader(classLoader);
+ HikariDataSource ds = new HikariDataSource();
ds.setDriverClassName(config.getJdbcDriverClass());
- ds.setUrl(config.getJdbcUrl());
+ ds.setJdbcUrl(config.getJdbcUrl());
ds.setUsername(config.getJdbcUser());
ds.setPassword(config.getJdbcPassword());
- ds.setMinIdle(config.getConnectionPoolMinSize());
// default 1
-
ds.setInitialSize(config.getConnectionPoolMinSize()); // default 1
-
ds.setMaxActive(config.getConnectionPoolMaxSize()); // default 10
-
ds.setMaxWait(config.getConnectionPoolMaxWaitTime()); // default 5000
- ds.setTestWhileIdle(true);
- ds.setTestOnBorrow(false);
+
ds.setMinimumIdle(config.getConnectionPoolMinSize()); // default 1
+
ds.setMaximumPoolSize(config.getConnectionPoolMaxSize()); // default 10
+
ds.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); // default 5000
+
ds.setMaxLifetime(config.getConnectionPoolMaxLifeTime()); // default 30 min
+
ds.setIdleTimeout(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
setValidationQuery(ds, config.getTableType());
- // default 3 min
-
ds.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime() /
10L);
- // default 15 min
-
ds.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime() / 2L);
- // default 30 min
-
ds.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime());
-
ds.setKeepAlive(config.isConnectionPoolKeepAlive());
- // default 6 min
-
ds.setKeepAliveBetweenTimeMillis(config.getConnectionPoolMaxLifeTime() / 5L);
- druidDataSource = ds;
-
JdbcDataSource.getDataSource().putSource(druidDataSourceKey, ds);
+ if (config.isConnectionPoolKeepAlive()) {
+
ds.setKeepaliveTime(config.getConnectionPoolMaxLifeTime() / 5L); // default 6
min
+ }
+ hikariDataSource = ds;
+
JdbcDataSource.getDataSource().putSource(hikariDataSourceKey, ds);
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " +
config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " +
config.getConnectionPoolMaxSize()
@@ -521,8 +515,9 @@ public class JdbcExecutor {
}
}
}
+
long start = System.currentTimeMillis();
- conn = druidDataSource.getConnection();
+ conn = hikariDataSource.getConnection();
LOG.info("get connection [" + (config.getJdbcUrl() +
config.getJdbcUser()) + "] cost: " + (
System.currentTimeMillis() - start)
+ " ms");
@@ -549,16 +544,18 @@ public class JdbcExecutor {
throw new UdfRuntimeException("FileNotFoundException failed: ", e);
} catch (Exception e) {
throw new UdfRuntimeException("Initialize datasource failed: ", e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
- private void setValidationQuery(DruidDataSource ds, TOdbcTableType
tableType) {
+ private void setValidationQuery(HikariDataSource ds, TOdbcTableType
tableType) {
if (tableType == TOdbcTableType.ORACLE || tableType ==
TOdbcTableType.OCEANBASE_ORACLE) {
- ds.setValidationQuery("SELECT 1 FROM dual");
+ ds.setConnectionTestQuery("SELECT 1 FROM dual");
} else if (tableType == TOdbcTableType.SAP_HANA) {
- ds.setValidationQuery("SELECT 1 FROM DUMMY");
+ ds.setConnectionTestQuery("SELECT 1 FROM DUMMY");
} else {
- ds.setValidationQuery("SELECT 1");
+ ds.setConnectionTestQuery("SELECT 1");
}
}
diff --git a/fe/be-java-extensions/preload-extensions/pom.xml
b/fe/be-java-extensions/preload-extensions/pom.xml
index ae557fc9e87..bf7fef49d32 100644
--- a/fe/be-java-extensions/preload-extensions/pom.xml
+++ b/fe/be-java-extensions/preload-extensions/pom.xml
@@ -198,8 +198,8 @@ under the License.
<artifactId>ojdbc8</artifactId>
</dependency>
<dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>druid</artifactId>
+ <groupId>com.zaxxer</groupId>
+ <artifactId>HikariCP</artifactId>
</dependency>
<dependency>
<groupId>com.clickhouse</groupId>
diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml
index a9b9df42c44..65d52ffbabf 100644
--- a/fe/fe-core/pom.xml
+++ b/fe/fe-core/pom.xml
@@ -658,10 +658,9 @@ under the License.
<version>${antlr4.version}</version>
</dependency>
- <!-- https://mvnrepository.com/artifact/com.alibaba/druid -->
<dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>druid</artifactId>
+ <groupId>com.zaxxer</groupId>
+ <artifactId>HikariCP</artifactId>
</dependency>
<!-- for aliyun dlf -->
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
index 1db27ef6eaf..fe685625016 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
@@ -24,8 +24,8 @@ import org.apache.doris.catalog.Type;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.util.Util;
-import com.alibaba.druid.pool.DruidDataSource;
import com.google.common.collect.Lists;
+import com.zaxxer.hikari.HikariDataSource;
import lombok.Data;
import lombok.Getter;
import org.apache.logging.log4j.LogManager;
@@ -56,8 +56,8 @@ public abstract class JdbcClient {
private String catalog;
protected String dbType;
protected String jdbcUser;
- protected URLClassLoader classLoader = null;
- protected DruidDataSource dataSource = null;
+ protected ClassLoader classLoader = null;
+ protected HikariDataSource dataSource = null;
protected boolean isOnlySpecifiedDatabase;
protected boolean isLowerCaseTableNames;
protected String oceanbaseMode = "";
@@ -108,53 +108,53 @@ public abstract class JdbcClient {
Optional.ofNullable(jdbcClientConfig.getExcludeDatabaseMap()).orElse(Collections.emptyMap());
String jdbcUrl = jdbcClientConfig.getJdbcUrl();
this.dbType = parseDbType(jdbcUrl);
+ initializeClassLoader(jdbcClientConfig);
initializeDataSource(jdbcClientConfig);
}
- // Initialize DruidDataSource
+ // Initialize DataSource
private void initializeDataSource(JdbcClientConfig config) {
ClassLoader oldClassLoader =
Thread.currentThread().getContextClassLoader();
try {
- // TODO(ftw): The problem here is that the jar package is handled
by FE
- // and URLClassLoader may load the jar package directly into
memory
- URL[] urls = {new
URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
- // set parent ClassLoader to null, we can achieve class loading
isolation.
- ClassLoader parent = getClass().getClassLoader();
- ClassLoader classLoader = URLClassLoader.newInstance(urls, parent);
- LOG.debug("parent ClassLoader: {}, old ClassLoader: {}, class
Loader: {}.",
- parent, oldClassLoader, classLoader);
- Thread.currentThread().setContextClassLoader(classLoader);
- dataSource = new DruidDataSource();
- dataSource.setDriverClassLoader(classLoader);
+ Thread.currentThread().setContextClassLoader(this.classLoader);
+ dataSource = new HikariDataSource();
dataSource.setDriverClassName(config.getDriverClass());
- dataSource.setUrl(config.getJdbcUrl());
+ dataSource.setJdbcUrl(config.getJdbcUrl());
dataSource.setUsername(config.getUser());
dataSource.setPassword(config.getPassword());
- dataSource.setMinIdle(config.getConnectionPoolMinSize()); //
default 1
- dataSource.setInitialSize(config.getConnectionPoolMinSize()); //
default 1
- dataSource.setMaxActive(config.getConnectionPoolMaxSize()); //
default 10
+ dataSource.setMinimumIdle(config.getConnectionPoolMinSize()); //
default 1
+ dataSource.setMaximumPoolSize(config.getConnectionPoolMaxSize());
// default 10
// set connection timeout to 5s.
// The default is 30s, which is too long.
// Because when querying information_schema db, BE will call
thrift rpc(default timeout is 30s)
// to FE to get schema info, and may create connection here, if we
set it too long and the url is invalid,
// it may cause the thrift rpc timeout.
- dataSource.setMaxWait(config.getConnectionPoolMaxWaitTime()); //
default 5000
-
dataSource.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime()
/ 10L); // default 3 min
-
dataSource.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime()
/ 2L); // default 15 min
-
dataSource.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime());
// default 30 min
+
dataSource.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); //
default 5000
+ dataSource.setMaxLifetime(config.getConnectionPoolMaxLifeTime());
// default 30 min
+ dataSource.setIdleTimeout(config.getConnectionPoolMaxLifeTime() /
2L); // default 15 min
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " +
config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " +
config.getConnectionPoolMaxSize()
+ ", ConnectionPoolMaxWaitTime = " +
config.getConnectionPoolMaxWaitTime()
+ ", ConnectionPoolMaxLifeTime = " +
config.getConnectionPoolMaxLifeTime());
- } catch (MalformedURLException e) {
- throw new JdbcClientException("MalformedURLException to load class
about " + config.getDriverUrl(), e);
+ } catch (Exception e) {
+ throw new JdbcClientException(e.getMessage());
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
- private static String parseDbType(String jdbcUrl) {
+ private void initializeClassLoader(JdbcClientConfig config) {
+ try {
+ URL[] urls = {new
URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
+ ClassLoader parent = getClass().getClassLoader();
+ this.classLoader = URLClassLoader.newInstance(urls, parent);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException("Error loading JDBC driver.", e);
+ }
+ }
+
+ public static String parseDbType(String jdbcUrl) {
try {
return JdbcResource.parseDbType(jdbcUrl);
} catch (DdlException e) {
@@ -167,13 +167,17 @@ public abstract class JdbcClient {
}
public Connection getConnection() throws JdbcClientException {
+ ClassLoader oldClassLoader =
Thread.currentThread().getContextClassLoader();
Connection conn;
try {
+ Thread.currentThread().setContextClassLoader(this.classLoader);
conn = dataSource.getConnection();
} catch (Exception e) {
String errorMessage = String.format("Can not connect to jdbc due
to error: %s, Catalog name: %s",
e.getMessage(), this.getCatalog());
throw new JdbcClientException(errorMessage, e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(oldClassLoader);
}
return conn;
}
diff --git a/fe/pom.xml b/fe/pom.xml
index e4d9761c9ab..cd4cfdd11f0 100644
--- a/fe/pom.xml
+++ b/fe/pom.xml
@@ -221,7 +221,7 @@ under the License.
<doris.home>${fe.dir}/../</doris.home>
<revision>1.2-SNAPSHOT</revision>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-
<doris.hive.catalog.shade.version>2.0.0</doris.hive.catalog.shade.version>
+
<doris.hive.catalog.shade.version>2.0.1</doris.hive.catalog.shade.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<!--plugin parameters-->
@@ -251,7 +251,7 @@ under the License.
<commons-io.version>2.7</commons-io.version>
<json-simple.version>1.1.1</json-simple.version>
<junit.version>5.8.2</junit.version>
- <druid.version>1.2.5</druid.version>
+ <hikaricp.version>4.0.3</hikaricp.version>
<clickhouse.version>0.6.0</clickhouse.version>
<thrift.version>0.16.0</thrift.version>
<tomcat-embed-core.version>8.5.86</tomcat-embed-core.version>
@@ -1736,9 +1736,9 @@ under the License.
</exclusions>
</dependency>
<dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>druid</artifactId>
- <version>${druid.version}</version>
+ <groupId>com.zaxxer</groupId>
+ <artifactId>HikariCP</artifactId>
+ <version>${hikaricp.version}</version>
</dependency>
<dependency>
<groupId>com.clickhouse</groupId>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]