This is an automated email from the ASF dual-hosted git repository.

boroknagyz pushed a commit to branch branch-4.4.0
in repository https://gitbox.apache.org/repos/asf/impala.git

commit 01401a0368cb8f19c86dc3fab764ee4b5732f2f6
Author: wzhou-code <wz...@cloudera.com>
AuthorDate: Thu Apr 11 09:37:33 2024 -0700

    IMPALA-12910: Support running TPCH/TPCDS queries for JDBC tables
    
    This patch adds script to create external JDBC tables for the dataset of
    TPCH and TPCDS, and adds unit-tests to run TPCH and TPCDS queries for
    external JDBC tables with Impala-Impala federation. Note that JDBC
    tables are mapping tables, they don't take additional disk spaces.
    It fixes the race condition when caching of SQL DataSource objects by
    using a new DataSourceObjectCache class, which checks reference count
    before closing SQL DataSource.
    Adds a new query-option 'clean_dbcp_ds_cache' with default value as
    true. When it's set as false, SQL DataSource object will not be closed
    when its reference count equals 0 and will be kept in cache until
    the SQL DataSource is idle for more than 5 minutes. Flag variable
    'dbcp_data_source_idle_timeout_s' is added to make the duration
    configurable.
    java.sql.Connection.close() fails to remove a closed connection from
    connection pool sometimes, which causes JDBC working threads to wait
    for available connections from the connection pool for a long time.
    The work around is to call BasicDataSource.invalidateConnection() API
    to close a connection.
    Two flag variables are added for DBCP configuration properties
    'maxTotal' and 'maxWaitMillis'. Note that 'maxActive' and 'maxWait'
    properties are renamed to 'maxTotal' and 'maxWaitMillis' respectively
    in apache.commons.dbcp v2.
    Fixes a bug for database type comparison since the type strings
    specified by user could be lower case or mix of upper/lower cases, but
    the code compares the types with upper case string.
    Fixes issue to close SQL DataSource object in JdbcDataSource.open()
    and JdbcDataSource.getNext() when some errors returned from DBCP APIs
    or JDBC drivers.
    
    testdata/bin/create-tpc-jdbc-tables.py supports to create JDBC tables
    for Impala-Impala, Postgres and MySQL.
    Following sample commands creates TPCDS JDBC tables for Impala-Impala
    federation with remote coordinator running at 10.19.10.86, and Postgres
    server running at 10.19.10.86:
      ${IMPALA_HOME}/testdata/bin/create-tpc-jdbc-tables.py \
        --jdbc_db_name=tpcds_jdbc --workload=tpcds \
        --database_type=IMPALA --database_host=10.19.10.86 --clean
    
      ${IMPALA_HOME}/testdata/bin/create-tpc-jdbc-tables.py \
        --jdbc_db_name=tpcds_jdbc --workload=tpcds \
        --database_type=POSTGRES --database_host=10.19.10.86 \
        --database_name=tpcds --clean
    
    TPCDS tests for JDBC tables run only for release/exhaustive builds.
    TPCH tests for JDBC tables run for core and exhaustive builds, except
    Dockerized builds.
    
    Remaining Issues:
     - tpcds-decimal_v2-q80a failed with returned rows not matching expected
       results for some decimal values. This will be fixed in IMPALA-13018.
    
    Testing:
     - Passed core tests.
     - Passed query_test/test_tpcds_queries.py in release/exhaustive build.
     - Manually verified that only one SQL DataSource object was created for
       test_tpcds_queries.py::TestTpcdsQueryForJdbcTables since query option
       'clean_dbcp_ds_cache' was set as false, and the SQL DataSource object
       was closed by cleanup thread.
    
    Change-Id: I44e8c1bb020e90559c7f22483a7ab7a151b8f48a
    Reviewed-on: http://gerrit.cloudera.org:8080/21304
    Reviewed-by: Abhishek Rawat <ara...@cloudera.com>
    Tested-by: Impala Public Jenkins <impala-public-jenk...@cloudera.com>
    (cherry picked from commit 08f8a300250df7b4f9a517cdb6bab48c379b7e03)
---
 be/src/exec/data-source-scan-node.cc               |   1 +
 be/src/service/frontend.cc                         |  13 +
 be/src/service/query-options.cc                    |   4 +
 be/src/service/query-options.h                     |   3 +-
 be/src/util/backend-gflag-util.cc                  |   6 +
 common/thrift/BackendGflags.thrift                 |   6 +
 common/thrift/ExternalDataSource.thrift            |   5 +
 common/thrift/ImpalaService.thrift                 |   5 +
 common/thrift/Query.thrift                         |   2 +
 .../impala/extdatasource/jdbc/JdbcDataSource.java  |  55 +-
 .../jdbc/conf/JdbcStorageConfigManager.java        |   2 +-
 .../jdbc/dao/DataSourceObjectCache.java            | 211 ++++++
 .../extdatasource/jdbc/dao/DatabaseAccessor.java   |   3 +-
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java      | 118 ++-
 .../extdatasource/jdbc/dao/JdbcRecordIterator.java |   4 +-
 .../org/apache/impala/service/BackendConfig.java   |  12 +
 testdata/bin/create-load-data.sh                   |   7 +-
 testdata/bin/create-tpc-jdbc-tables.py             | 182 +++++
 .../datasets/tpcds/tpcds_jdbc_schema_template.sql  | 804 +++++++++++++++++++++
 .../datasets/tpch/tpch_jdbc_schema_template.sql    | 200 +++++
 tests/common/skip.py                               |   2 +
 tests/query_test/test_tpcds_queries.py             | 373 +++++++++-
 tests/query_test/test_tpch_queries.py              |  29 +-
 23 files changed, 1948 insertions(+), 99 deletions(-)

diff --git a/be/src/exec/data-source-scan-node.cc 
b/be/src/exec/data-source-scan-node.cc
index df2eb7f5a..71275fdfa 100644
--- a/be/src/exec/data-source-scan-node.cc
+++ b/be/src/exec/data-source-scan-node.cc
@@ -127,6 +127,7 @@ Status DataSourceScanNode::Open(RuntimeState* state) {
   params.__set_row_schema(row_schema);
   params.__set_batch_size(FLAGS_data_source_batch_size);
   params.__set_predicates(data_src_node_.accepted_predicates);
+  params.__set_clean_dbcp_ds_cache(state->query_options().clean_dbcp_ds_cache);
   TOpenResult result;
   RETURN_IF_ERROR(data_source_executor_->Open(params, &result));
   RETURN_IF_ERROR(Status(result.status));
diff --git a/be/src/service/frontend.cc b/be/src/service/frontend.cc
index 6cc00a710..aff23809f 100644
--- a/be/src/service/frontend.cc
+++ b/be/src/service/frontend.cc
@@ -96,6 +96,19 @@ DEFINE_bool(allow_catalog_cache_op_from_masked_users, false, 
"Whether to allow t
     "operations are blocked since such users are considered read-only users. 
Note that "
     "checking column masking policies requires loading column info of the 
table, which "
     "could slow down simple commands like INVALIDATE METADATA <table>");
+DEFINE_int32(dbcp_max_conn_pool_size, 8,
+    "The maximum number of active connections that can be allocated from a 
DBCP "
+    "connection pool at the same time, or -1 for no limit. DBCP connection 
pools are "
+    "created when accessing remote RDBMS for external JDBC tables. This 
setting applies "
+    "to all DBCP connection pools created on the coordinator.");
+DEFINE_int32(dbcp_max_wait_millis_for_conn, -1,
+    "The maximum number of milliseconds that DBCP connection pool will wait 
(when "
+    "there are no available connections) for a connection to be returned 
before "
+    "throwing an exception, or -1 to wait indefinitely. 0 means immediately 
throwing "
+    "exception if there are no available connections in the pool.");
+DEFINE_int32(dbcp_data_source_idle_timeout_s, 300,
+    "Timeout value in seconds for idle DBCP DataSource objects in cache. It 
only takes "
+    "effect when query option 'clean_dbcp_ds_cache' is set as false.");
 
 Frontend::Frontend() {
   JniMethodDescriptor methods[] = {
diff --git a/be/src/service/query-options.cc b/be/src/service/query-options.cc
index 7f52e943c..49aa2fc74 100644
--- a/be/src/service/query-options.cc
+++ b/be/src/service/query-options.cc
@@ -1289,6 +1289,10 @@ Status impala::SetQueryOption(TImpalaQueryOptions::type 
option, const string& va
         query_options->__set_slot_count_strategy(enum_type);
         break;
       }
+      case TImpalaQueryOptions::CLEAN_DBCP_DS_CACHE: {
+        query_options->__set_clean_dbcp_ds_cache(IsTrue(value));
+        break;
+      }
       default:
         string key = to_string(option);
         if (IsRemovedQueryOption(key)) {
diff --git a/be/src/service/query-options.h b/be/src/service/query-options.h
index e9e23a38a..aa6d73ffb 100644
--- a/be/src/service/query-options.h
+++ b/be/src/service/query-options.h
@@ -52,7 +52,7 @@ typedef std::unordered_map<string, 
beeswax::TQueryOptionLevel::type>
 // time we add or remove a query option to/from the enum TImpalaQueryOptions.
 #define QUERY_OPTS_TABLE                                                       
          \
   DCHECK_EQ(_TImpalaQueryOptions_VALUES_TO_NAMES.size(),                       
          \
-      TImpalaQueryOptions::SLOT_COUNT_STRATEGY + 1);                           
          \
+      TImpalaQueryOptions::CLEAN_DBCP_DS_CACHE + 1);                           
          \
   REMOVED_QUERY_OPT_FN(abort_on_default_limit_exceeded, 
ABORT_ON_DEFAULT_LIMIT_EXCEEDED) \
   QUERY_OPT_FN(abort_on_error, ABORT_ON_ERROR, TQueryOptionLevel::REGULAR)     
          \
   REMOVED_QUERY_OPT_FN(allow_unsupported_formats, ALLOW_UNSUPPORTED_FORMATS)   
          \
@@ -331,6 +331,7 @@ typedef std::unordered_map<string, 
beeswax::TQueryOptionLevel::type>
   QUERY_OPT_FN(runtime_filter_ids_to_skip,                                     
          \
       RUNTIME_FILTER_IDS_TO_SKIP, TQueryOptionLevel::DEVELOPMENT)              
          \
   QUERY_OPT_FN(slot_count_strategy, SLOT_COUNT_STRATEGY, 
TQueryOptionLevel::ADVANCED)    \
+  QUERY_OPT_FN(clean_dbcp_ds_cache, CLEAN_DBCP_DS_CACHE, 
TQueryOptionLevel::ADVANCED)    \
   ;
 
 /// Enforce practical limits on some query options to avoid undesired query 
state.
diff --git a/be/src/util/backend-gflag-util.cc 
b/be/src/util/backend-gflag-util.cc
index 18bd639a8..f1b94562b 100644
--- a/be/src/util/backend-gflag-util.cc
+++ b/be/src/util/backend-gflag-util.cc
@@ -125,6 +125,9 @@ DECLARE_bool(enable_workload_mgmt);
 DECLARE_string(query_log_table_name);
 DECLARE_string(default_skipped_hms_event_types);
 DECLARE_string(common_hms_event_types);
+DECLARE_int32(dbcp_max_conn_pool_size);
+DECLARE_int32(dbcp_max_wait_millis_for_conn);
+DECLARE_int32(dbcp_data_source_idle_timeout_s);
 
 // HS2 SAML2.0 configuration
 // Defined here because TAG_FLAG caused issues in global-flags.cc
@@ -480,6 +483,9 @@ Status PopulateThriftBackendGflags(TBackendGflags& cfg) {
   cfg.__set_query_cpu_root_factor(FLAGS_query_cpu_root_factor);
   
cfg.__set_default_skipped_hms_event_types(FLAGS_default_skipped_hms_event_types);
   cfg.__set_common_hms_event_types(FLAGS_common_hms_event_types);
+  cfg.__set_dbcp_max_conn_pool_size(FLAGS_dbcp_max_conn_pool_size);
+  cfg.__set_dbcp_max_wait_millis_for_conn(FLAGS_dbcp_max_wait_millis_for_conn);
+  
cfg.__set_dbcp_data_source_idle_timeout(FLAGS_dbcp_data_source_idle_timeout_s);
   return Status::OK();
 }
 
diff --git a/common/thrift/BackendGflags.thrift 
b/common/thrift/BackendGflags.thrift
index 285316a3c..e51b6bad4 100644
--- a/common/thrift/BackendGflags.thrift
+++ b/common/thrift/BackendGflags.thrift
@@ -298,4 +298,10 @@ struct TBackendGflags {
   133: required string default_skipped_hms_event_types
 
   134: required string common_hms_event_types
+
+  135: required i32 dbcp_max_conn_pool_size
+
+  136: required i32 dbcp_max_wait_millis_for_conn
+
+  137: required i32 dbcp_data_source_idle_timeout
 }
diff --git a/common/thrift/ExternalDataSource.thrift 
b/common/thrift/ExternalDataSource.thrift
index 20fa9ec0f..7ec0d8079 100644
--- a/common/thrift/ExternalDataSource.thrift
+++ b/common/thrift/ExternalDataSource.thrift
@@ -139,6 +139,11 @@ struct TOpenParams {
 
   // The query limit, if specified.
   8: optional i64 limit
+
+  // Indicate if external JDBC table handler should clean DBCP DataSource 
object from
+  // cache when its reference count equals 0. Note that the reference count is 
tracked
+  // across all queries for a given data source in the coordinator.
+  9: optional bool clean_dbcp_ds_cache
 }
 
 // Returned by open().
diff --git a/common/thrift/ImpalaService.thrift 
b/common/thrift/ImpalaService.thrift
index d1a5ebad0..c83f46cdb 100644
--- a/common/thrift/ImpalaService.thrift
+++ b/common/thrift/ImpalaService.thrift
@@ -935,6 +935,11 @@ enum TImpalaQueryOptions {
   // Default to number of instances of largest query fragment 
(LARGEST_FRAGMENT).
   // See TSlotCountStrategy in Query.thrift for documentation of its possible 
values.
   SLOT_COUNT_STRATEGY = 177
+
+  // Indicate if external JDBC table handler should clean DBCP DataSource 
object from
+  // cache when its reference count equals 0. By caching DBCP DataSource 
objects, we can
+  // avoid to reload JDBC driver.
+  CLEAN_DBCP_DS_CACHE = 178
 }
 
 // The summary of a DML statement.
diff --git a/common/thrift/Query.thrift b/common/thrift/Query.thrift
index 6160c4a2f..075cf2739 100644
--- a/common/thrift/Query.thrift
+++ b/common/thrift/Query.thrift
@@ -727,6 +727,8 @@ struct TQueryOptions {
   // See comment in ImpalaService.thrift
   178: optional TSlotCountStrategy slot_count_strategy =
     TSlotCountStrategy.LARGEST_FRAGMENT
+
+  179: optional bool clean_dbcp_ds_cache = true;
 }
 
 // Impala currently has three types of sessions: Beeswax, HiveServer2 and 
external
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/JdbcDataSource.java 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/JdbcDataSource.java
index 3cf978a9b..1c335d593 100644
--- a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/JdbcDataSource.java
+++ b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/JdbcDataSource.java
@@ -17,7 +17,7 @@
 
 package org.apache.impala.extdatasource.jdbc;
 
-
+import java.sql.Connection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -68,11 +68,6 @@ public class JdbcDataSource implements ExternalDataSource {
 
   private final static Logger LOG = 
LoggerFactory.getLogger(JdbcDataSource.class);
 
-  /**
-   * @see org.apache.impala.extdatasource.ExternalDataSourceExecutor
-   */
-  private final static String CACHE_CLASS_PREFIX = "CACHE_CLASS::";
-
   private static final TStatus STATUS_OK =
           new TStatus(TErrorCode.OK, Lists.newArrayList());
 
@@ -85,10 +80,10 @@ public class JdbcDataSource implements ExternalDataSource {
   // In getNext() and close() APIs, compare this value with the handle value 
passed in
   // input parameters to make sure the object is valid.
   private String scanHandle_;
-  // Set to true if initString started with "CACHE_CLASS::".
+  // Set as value of query option 'clean_dbcp_ds_cache'.
   // It is passed to DatabaseAccessor::close() to indicate if dataSourceCache 
should be
-  // cleaned when DatabaseAccessor object is closed.
-  private boolean cacheClass_ = false;
+  // cleaned when its reference count equals 0.
+  private boolean cleanDbcpDSCache_ = true;
 
   // Properties of external jdbc table, converted from initString which is 
specified in
   // create table statement.
@@ -145,6 +140,9 @@ public class JdbcDataSource implements ExternalDataSource {
     state_ = DataSourceState.OPENED;
     batchSize_ = params.getBatch_size();
     schema_ = params.getRow_schema();
+    if (params.isSetClean_dbcp_ds_cache()) {
+      cleanDbcpDSCache_ = params.isClean_dbcp_ds_cache();
+    }
     // 1. Check init string again because the call in prepare() was from
     // the frontend and used a different instance of this JdbcDataSource class.
     if (!convertInitStringToConfiguration(params.getInit_string())) {
@@ -158,6 +156,10 @@ public class JdbcDataSource implements ExternalDataSource {
       dbAccessor_ = DatabaseAccessorFactory.getAccessor(tableConfig_);
       buildQueryAndExecute(params);
     } catch (JdbcDatabaseAccessException e) {
+      if (dbAccessor_ != null) {
+        dbAccessor_.close(null, cleanDbcpDSCache_);
+        dbAccessor_ = null;
+      }
       return new TOpenResult(
           new TStatus(TErrorCode.RUNTIME_ERROR, 
Lists.newArrayList(e.getMessage())));
     }
@@ -190,6 +192,21 @@ public class JdbcDataSource implements ExternalDataSource {
           ++numRows;
         }
       } catch (UnsupportedOperationException e) {
+        try {
+          Connection connToBeClosed = null;
+          if (iterator_ != null) {
+            Preconditions.checkNotNull(dbAccessor_);
+            connToBeClosed = iterator_.getConnection();
+            iterator_.close();
+          }
+          if (dbAccessor_ != null) {
+            dbAccessor_.close(connToBeClosed, cleanDbcpDSCache_);
+          }
+          iterator_ = null;
+          dbAccessor_ = null;
+        } catch (JdbcDatabaseAccessException e2) {
+          LOG.warn("Failed to close connection or DataSource", e2);
+        }
         return new TGetNextResult(new TStatus(
             TErrorCode.JDBC_CONFIGURATION_ERROR, 
Lists.newArrayList(e.getMessage())));
       } catch (Exception e) {
@@ -211,8 +228,20 @@ public class JdbcDataSource implements ExternalDataSource {
     Preconditions.checkState(state_ == DataSourceState.OPENED);
     Preconditions.checkArgument(params.getScan_handle().equals(scanHandle_));
     try {
-      if (iterator_ != null) iterator_.close();
-      if (dbAccessor_ != null) dbAccessor_.close(!cacheClass_);
+      // JdbcRecordIterator.close() call java.sql.Connection.close() to close 
connection.
+      // But that API does not effectively add closed connection back to 
connection pool.
+      // We should call GenericJdbcDatabaseAccessor.close() to close a 
connection since
+      // the function call BasicDataSource.invalidateConnection() to close 
connection
+      // which is more efficient than java.sql.Connection.close().
+      Connection connToBeClosed = null;
+      if (iterator_ != null) {
+        Preconditions.checkNotNull(dbAccessor_);
+        connToBeClosed = iterator_.getConnection();
+        iterator_.close();
+      }
+      if (dbAccessor_ != null) {
+        dbAccessor_.close(connToBeClosed, cleanDbcpDSCache_);
+      }
       state_ = DataSourceState.CLOSED;
       return new TCloseResult(STATUS_OK);
     } catch (Exception e) {
@@ -228,10 +257,6 @@ public class JdbcDataSource implements ExternalDataSource {
         TypeReference<HashMap<String, String>> typeRef
             = new TypeReference<HashMap<String, String>>() {
         };
-        if (initString.startsWith(CACHE_CLASS_PREFIX)) {
-          initString = initString.substring(CACHE_CLASS_PREFIX.length());
-          cacheClass_ = true;
-        }
         // Replace '\n' with single space character so that one property 
setting in
         // initString can be broken into multiple lines for better readability.
         initString = initString.replace('\n', ' ');
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/conf/JdbcStorageConfigManager.java
 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/conf/JdbcStorageConfigManager.java
index 280fbccd5..1463330bd 100644
--- 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/conf/JdbcStorageConfigManager.java
+++ 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/conf/JdbcStorageConfigManager.java
@@ -99,7 +99,7 @@ public class JdbcStorageConfigManager {
 
     try {
       String dbTypeName = 
props.get(JdbcStorageConfig.DATABASE_TYPE.getPropertyName());
-      DatabaseType.valueOf(dbTypeName);
+      DatabaseType.valueOf(dbTypeName.toUpperCase());
     } catch (Exception e) {
       throw new IllegalArgumentException("Unknown database type.", e);
     }
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DataSourceObjectCache.java
 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DataSourceObjectCache.java
new file mode 100644
index 000000000..5937a0757
--- /dev/null
+++ 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DataSourceObjectCache.java
@@ -0,0 +1,211 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.extdatasource.jdbc.dao;
+
+import java.io.File;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.dbcp2.BasicDataSource;
+import org.apache.commons.dbcp2.BasicDataSourceFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.impala.common.FileSystemUtil;
+import 
org.apache.impala.extdatasource.jdbc.exception.JdbcDatabaseAccessException;
+import org.apache.impala.service.BackendConfig;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+
+/**
+ * Thread safe cache for storing SQL DataSource object with reference count.
+ */
+public class DataSourceObjectCache {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(DataSourceObjectCache.class);
+  private static final long CLEANUP_INTERVAL_MS = 10000;
+  private static final long DEFAULT_IDLE_DATA_SOURCE_TIMEOUT_MS = 300000;
+
+  /**
+   * Map entry object with reference count.
+   */
+  class Entry {
+    private DataSource dbcpDataSource_;
+    private String driverFilePath_;
+    private int referenceCount_;
+    private long idleTimeStamp_;
+
+    public Entry(DataSource dataSource, String driverFilePath) {
+      dbcpDataSource_ = dataSource;
+      driverFilePath_ = driverFilePath;
+      referenceCount_ = 1;
+      idleTimeStamp_ = 0;
+    }
+    public DataSource getDataSource() { return dbcpDataSource_; }
+    public int getReference() { return referenceCount_; }
+    public String getDriverFilePath() { return driverFilePath_; }
+    public void incrementReference() { ++referenceCount_; }
+    public int decrementReference() { return --referenceCount_; }
+    public long getIdleTimeStamp() { return idleTimeStamp_; }
+    public void setIdleTimeStamp(long timeStamp) { idleTimeStamp_ = timeStamp; 
}
+  }
+
+  private final Map<String, Entry> cacheMap_ = new HashMap<>();
+  private Thread cleanupThread_;
+
+  public DataSourceObjectCache() {
+    cleanupThread_ = new Thread(new Runnable() {
+      @Override
+      public void run() {
+        cleanup();
+      }
+    });
+    cleanupThread_.setDaemon(true);
+    cleanupThread_.setName("DataSourceObjectCache daemon thread");
+    cleanupThread_.start();
+  }
+
+  /*
+   * Return DataSource for the given cache-key. The cache-key is generated as
+   * jdbc-url + username, like 
'jdbc:impala://10.96.132.138:21050/tpcds.impala_user'.
+   */
+  public DataSource get(String cacheKey, Properties props)
+      throws JdbcDatabaseAccessException {
+    synchronized (this) {
+      Entry entry = cacheMap_.get(cacheKey);
+      if (entry != null) {
+        entry.incrementReference();
+        return entry.getDataSource();
+      }
+
+      LOG.info("Datasource for '{}' was not cached. Loading now.", cacheKey);
+      String driverUrl = props.getProperty("driverUrl");
+      try {
+        BasicDataSource dbcpDs = 
BasicDataSourceFactory.createDataSource(props);
+        // Copy jdbc driver to local file system.
+        String driverLocalPath = 
FileSystemUtil.copyFileFromUriToLocal(driverUrl);
+        // Create class loader for jdbc driver and set it for the
+        // BasicDataSource object so that the driver class could be loaded
+        // from jar file without searching classpath.
+        URL driverJarUrl = new File(driverLocalPath).toURI().toURL();
+        URLClassLoader driverLoader = URLClassLoader.newInstance(
+            new URL[] { driverJarUrl }, getClass().getClassLoader());
+        dbcpDs.setDriverClassLoader(driverLoader);
+        entry = new Entry(dbcpDs, driverLocalPath);
+        cacheMap_.put(cacheKey, entry);
+        return dbcpDs;
+      } catch (Exception e) {
+        throw new JdbcDatabaseAccessException(String.format(
+            "Unable to fetch jdbc driver jar from location '%s'. ", 
driverUrl));
+      }
+    }
+  }
+
+  /*
+   * Release DataSource object for the given cache-key.
+   */
+  public void remove(String cacheKey, boolean cleanDbcpDSCache) {
+    if (Strings.isNullOrEmpty(cacheKey)) return;
+    Entry entry = null;
+    synchronized (this) {
+      entry = cacheMap_.get(cacheKey);
+      if (entry == null) return;
+      Preconditions.checkState(entry.getReference() > 0);
+      entry.decrementReference();
+      if (entry.getReference() > 0) {
+        return;
+      } else if (!cleanDbcpDSCache) {
+        entry.setIdleTimeStamp(System.currentTimeMillis());
+        return;
+      }
+      // Remove SQL DataSource from cache when reference count reaches 0
+      // and cleanDbcpDSCache is set as true.
+      cacheMap_.remove(cacheKey);
+    }
+    closeDataSource(cacheKey, entry);
+  }
+
+  /*
+   * This function is running on a working thread in the coordinator daemon.
+   * It cleans up idle DataSource objects in 10 seconds interval.
+   */
+  private void cleanup() {
+    long idleTimeoutInMS =
+        BackendConfig.INSTANCE.getDbcpDataSourceIdleTimeoutInSeconds() * 1000;
+    if (idleTimeoutInMS < 0) {
+      idleTimeoutInMS = DEFAULT_IDLE_DATA_SOURCE_TIMEOUT_MS;
+    }
+    while (true) {
+      try {
+        Thread.sleep((long) CLEANUP_INTERVAL_MS);
+      } catch (InterruptedException e) {
+        LOG.info("Thread.sleep failed, ", e);
+      }
+      long currTime = System.currentTimeMillis();
+      Map<String, Entry> entriesToBeClosed = new HashMap<>();
+      synchronized (this) {
+        Iterator<Map.Entry<String, Entry>> iter = 
cacheMap_.entrySet().iterator();
+        while (iter.hasNext()) {
+          Map.Entry<String, Entry> mapEntry = iter.next();
+          String cacheKey = mapEntry.getKey();
+          Entry entry = mapEntry.getValue();
+          if (entry.getReference() <= 0 &&
+              currTime - entry.getIdleTimeStamp() > idleTimeoutInMS) {
+            iter.remove();
+            entriesToBeClosed.put(cacheKey, entry);
+          }
+        }
+      }
+      for (Map.Entry<String, Entry> mapEntry : entriesToBeClosed.entrySet()) {
+        closeDataSource(mapEntry.getKey(), mapEntry.getValue());
+      }
+      entriesToBeClosed.clear();
+    }
+  }
+
+  /*
+   * Close DataSource object referenced by the given entry.
+   */
+  private void closeDataSource(String cacheKey, Entry entry) {
+    if (Strings.isNullOrEmpty(cacheKey) || entry == null) return;
+    DataSource ds = entry.getDataSource();
+    String driverFilePath = entry.getDriverFilePath();
+    Preconditions.checkState(ds instanceof BasicDataSource);
+    BasicDataSource dbcpDs = (BasicDataSource) ds;
+    try {
+      dbcpDs.close();
+      LOG.info("Close datasource for '{}'.", cacheKey);
+      if (driverFilePath != null) {
+        // Delete the jar file of jdbc driver from local file system.
+        Path localJarPath = new Path("file://" + driverFilePath);
+        FileSystemUtil.deleteIfExists(localJarPath);
+      }
+    } catch (SQLException e) {
+      LOG.warn("Caught exception during datasource cleanup.", e);
+    }
+  }
+}
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DatabaseAccessor.java
 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DatabaseAccessor.java
index 941d2745d..e1af2af76 100644
--- 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DatabaseAccessor.java
+++ 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/DatabaseAccessor.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.extdatasource.jdbc.dao;
 
+import java.sql.Connection;
 
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.impala.extdatasource.jdbc.exception.JdbcDatabaseAccessException;
@@ -29,7 +30,7 @@ public interface DatabaseAccessor {
   JdbcRecordIterator getRecordIterator(Configuration conf, int limit, int 
offset)
       throws JdbcDatabaseAccessException;
 
-  void close(boolean cleanCache);
+  void close(Connection connToBeClosed, boolean cleanDbcpDSCache);
 
   String getCaseSensitiveName(String name);
 
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/GenericJdbcDatabaseAccessor.java
 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/GenericJdbcDatabaseAccessor.java
index f071ee173..0f7091d50 100644
--- 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -17,10 +17,7 @@
 
 package org.apache.impala.extdatasource.jdbc.dao;
 
-import java.io.File;
 import java.io.IOException;
-import java.net.URL;
-import java.net.URLClassLoader;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -39,13 +36,11 @@ import java.util.regex.Pattern;
 import javax.sql.DataSource;
 
 import org.apache.commons.dbcp2.BasicDataSource;
-import org.apache.commons.dbcp2.BasicDataSourceFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.extdatasource.jdbc.conf.JdbcStorageConfig;
 import org.apache.impala.extdatasource.jdbc.conf.JdbcStorageConfigManager;
 import 
org.apache.impala.extdatasource.jdbc.exception.JdbcDatabaseAccessException;
+import org.apache.impala.service.BackendConfig;
 import org.apache.impala.service.FeSupport;
 import org.apache.impala.thrift.TCacheJarResult;
 import org.apache.impala.thrift.TErrorCode;
@@ -55,9 +50,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.RemovalListener;
 
 /**
  * A data accessor that should in theory work with all JDBC compliant database 
drivers.
@@ -69,30 +61,14 @@ public class GenericJdbcDatabaseAccessor implements 
DatabaseAccessor {
 
   protected static final String DBCP_CONFIG_PREFIX = "dbcp";
   protected static final int DEFAULT_FETCH_SIZE = 1000;
-  protected static final int CACHE_EXPIRE_TIMEOUT_S = 1800;
-  protected static final int CACHE_SIZE = 100;
-  protected String jdbcDriverLocalPath = null;
   protected static final long MILLI_SECONDS_PER_DAY = 86400000;
 
   protected DataSource dbcpDataSource = null;
+  protected String dataSourceCacheKey = null;
+
   // Cache datasource for sharing
-  protected static final Cache<String, DataSource> dataSourceCache = 
CacheBuilder
-      .newBuilder()
-      .removalListener((RemovalListener<String, DataSource>) notification -> {
-        DataSource ds = notification.getValue();
-        if (ds instanceof BasicDataSource) {
-          BasicDataSource dbcpDs = (BasicDataSource) ds;
-          try {
-            dbcpDs.close();
-            LOG.info("Close datasource for '{}'.", notification.getKey());
-          } catch (SQLException e) {
-            LOG.warn("Caught exception during datasource cleanup.", e);
-          }
-        }
-      })
-      .expireAfterAccess(CACHE_EXPIRE_TIMEOUT_S, TimeUnit.SECONDS)
-      .maximumSize(CACHE_SIZE)
-      .build();
+  private static final DataSourceObjectCache dataSourceCache =
+      new DataSourceObjectCache();
 
   @Override
   public int getTotalNumberOfRecords(Configuration conf)
@@ -162,17 +138,14 @@ public class GenericJdbcDatabaseAccessor implements 
DatabaseAccessor {
 
 
   @Override
-  public void close(boolean cleanCache) {
-    dbcpDataSource = null;
-    if (cleanCache) {
-      Preconditions.checkNotNull(dataSourceCache);
-      dataSourceCache.invalidateAll();
-    }
-    if (jdbcDriverLocalPath != null) {
-      // Delete the jar file of jdbc driver.
-      Path localJarPath = new Path("file://" + jdbcDriverLocalPath);
-      FileSystemUtil.deleteIfExists(localJarPath);
+  public void close(Connection connToBeClosed, boolean cleanDbcpDSCache) {
+    if (connToBeClosed != null) {
+      Preconditions.checkNotNull(dbcpDataSource);
+      invalidateConnection(connToBeClosed);
     }
+    Preconditions.checkNotNull(dataSourceCache);
+    dataSourceCache.remove(dataSourceCacheKey, cleanDbcpDSCache);
+    dbcpDataSource = null;
   }
 
   @Override
@@ -269,53 +242,39 @@ public class GenericJdbcDatabaseAccessor implements 
DatabaseAccessor {
       LOG.warn("Caught exception during statement cleanup.", e);
     }
 
+    if (conn != null) {
+      Preconditions.checkNotNull(dbcpDataSource);
+      // Call BasicDataSource.invalidateConnection() to close a connection 
since this API
+      // is more effective than Connection.close().
+      invalidateConnection(conn);
+    }
+  }
+
+  /*
+   * Manually invalidates a connection, effectively requesting the pool to try 
to close
+   * it, and reclaim pool capacity.
+   */
+  private void invalidateConnection(Connection conn) {
     try {
-      if (conn != null) {
-        conn.close();
-      }
-    } catch (SQLException e) {
+      Preconditions.checkState(dbcpDataSource instanceof BasicDataSource);
+      BasicDataSource basicDataSource = (BasicDataSource) dbcpDataSource;
+      basicDataSource.invalidateConnection(conn);
+    } catch (Exception e) {
       LOG.warn("Caught exception during connection cleanup.", e);
     }
   }
 
   protected void initializeDatabaseSource(Configuration conf)
-      throws ExecutionException {
+      throws JdbcDatabaseAccessException {
     if (dbcpDataSource == null) {
       synchronized (this) {
         if (dbcpDataSource == null) {
           Properties props = getConnectionPoolProperties(conf);
           String jdbcUrl = props.getProperty("url");
           String username = props.getProperty("username", "-");
-          String cacheMapKey = String.format("%s.%s", jdbcUrl, username);
+          dataSourceCacheKey = String.format("%s.%s", jdbcUrl, username);
           Preconditions.checkNotNull(dataSourceCache);
-          dbcpDataSource = dataSourceCache.get(cacheMapKey,
-              () -> {
-                LOG.info("Datasource for '{}' was not cached. "
-                    + "Loading now.", cacheMapKey);
-                BasicDataSource basicDataSource =
-                    BasicDataSourceFactory.createDataSource(props);
-                // Put jdbc driver to cache
-                String driverUrl = props.getProperty("driverUrl");
-                String driverLocalPath;
-                try {
-                  driverLocalPath =
-                    FileSystemUtil.copyFileFromUriToLocal(driverUrl);
-                } catch (IOException e) {
-                  throw new JdbcDatabaseAccessException(String.format(
-                      "Unable to fetch jdbc driver jar from location '%s'. ",
-                      driverUrl));
-                }
-                // Create class loader for jdbc driver and set it for the
-                // BasicDataSource object so that the driver class could be 
loaded
-                // from jar file without searching classpath.
-                jdbcDriverLocalPath = driverLocalPath;
-                URL driverJarUrl = new File(driverLocalPath).toURI().toURL();
-                URLClassLoader driverLoader =
-                    URLClassLoader.newInstance( new URL[] { driverJarUrl },
-                        getClass().getClassLoader());
-                basicDataSource.setDriverClassLoader(driverLoader);
-                return basicDataSource;
-              });
+          dbcpDataSource = dataSourceCache.get(dataSourceCacheKey, props);
         }
       }
     }
@@ -372,10 +331,15 @@ public class GenericJdbcDatabaseAccessor implements 
DatabaseAccessor {
     // BasicDataSourceFactory.createDataSource() before the class loader is set
     // by calling BasicDataSource.setDriverClassLoader.
     // props.put("initialSize", "1");
-    props.put("maxActive", "3");
-    props.put("maxIdle", "0");
-    props.put("maxWait", "10000");
-    props.put("timeBetweenEvictionRunsMillis", "30000");
+    // 'maxActive' and 'maxWait' properties are renamed as 'maxTotal' and 
'maxWaitMillis'
+    // respectively in org.apache.commons.dbcp2.
+    props.put("maxTotal",
+        String.valueOf(BackendConfig.INSTANCE.getDbcpMaxConnPoolSize()));
+    props.put("maxIdle",
+        String.valueOf(BackendConfig.INSTANCE.getDbcpMaxConnPoolSize()));
+    props.put("minIdle", "0");
+    props.put("maxWaitMillis",
+        String.valueOf(BackendConfig.INSTANCE.getDbcpMaxWaitMillisForConn()));
     return props;
   }
 
diff --git 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/JdbcRecordIterator.java
 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/JdbcRecordIterator.java
index 7db628d2c..602726ab6 100644
--- 
a/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/JdbcRecordIterator.java
+++ 
b/fe/src/main/java/org/apache/impala/extdatasource/jdbc/dao/JdbcRecordIterator.java
@@ -82,6 +82,8 @@ public class JdbcRecordIterator {
     LOGGER.debug("Iterator ColumnNames = {}", jdbcColumnNames);
   }
 
+  public Connection getConnection() { return conn; }
+
   public boolean hasNext() throws JdbcDatabaseAccessException {
     try {
       return rs.next();
@@ -189,7 +191,7 @@ public class JdbcRecordIterator {
     try {
       rs.close();
       ps.close();
-      conn.close();
+      // Connection is closed in GenericJdbcDatabaseAccessor.close().
     } catch (Exception e) {
       LOGGER.warn("Caught exception while trying to close database objects", 
e);
       throw new JdbcDatabaseAccessException(
diff --git a/fe/src/main/java/org/apache/impala/service/BackendConfig.java 
b/fe/src/main/java/org/apache/impala/service/BackendConfig.java
index 2add99a2f..a050fe016 100644
--- a/fe/src/main/java/org/apache/impala/service/BackendConfig.java
+++ b/fe/src/main/java/org/apache/impala/service/BackendConfig.java
@@ -506,4 +506,16 @@ public class BackendConfig {
   public String getCommonHmsEventTypes() {
     return backendCfg_.common_hms_event_types;
   }
+
+  public int getDbcpMaxConnPoolSize() {
+    return backendCfg_.dbcp_max_conn_pool_size;
+  }
+
+  public int getDbcpMaxWaitMillisForConn() {
+    return backendCfg_.dbcp_max_wait_millis_for_conn;
+  }
+
+  public int getDbcpDataSourceIdleTimeoutInSeconds() {
+    return backendCfg_.dbcp_data_source_idle_timeout;
+  }
 }
diff --git a/testdata/bin/create-load-data.sh b/testdata/bin/create-load-data.sh
index 011576495..07b2971a2 100755
--- a/testdata/bin/create-load-data.sh
+++ b/testdata/bin/create-load-data.sh
@@ -491,7 +491,12 @@ function copy-and-load-ext-data-source {
   ${IMPALA_HOME}/testdata/bin/load-ext-data-sources.sh
   # Create data sources table.
   ${IMPALA_HOME}/bin/impala-shell.sh -i ${IMPALAD} -f\
-    ${IMPALA_HOME}/testdata/bin/create-ext-data-source-table.sql
+      ${IMPALA_HOME}/testdata/bin/create-ext-data-source-table.sql
+  # Create external JDBC tables for TPCH/TPCDS queries.
+  ${IMPALA_HOME}/testdata/bin/create-tpc-jdbc-tables.py 
--jdbc_db_name=tpch_jdbc \
+      --workload=tpch --database_type=impala --clean
+  ${IMPALA_HOME}/testdata/bin/create-tpc-jdbc-tables.py 
--jdbc_db_name=tpcds_jdbc \
+      --workload=tpcds --database_type=impala --clean
 }
 
 function check-hdfs-health {
diff --git a/testdata/bin/create-tpc-jdbc-tables.py 
b/testdata/bin/create-tpc-jdbc-tables.py
new file mode 100755
index 000000000..d6bf226b8
--- /dev/null
+++ b/testdata/bin/create-tpc-jdbc-tables.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env impala-python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Script to create TPC-[H|DS] external JDBC tables.
+#
+# External JDBC tables are created in the specified 'jdbc_db_name' database.
+
+from __future__ import absolute_import, division, print_function
+import logging
+import os
+import sqlparse
+
+LOG = logging.getLogger(os.path.splitext(os.path.basename(__file__))[0])
+
+jdbc_db_name = None
+verbose = False
+workload = None
+database_type = None
+jdbc_url = None
+jdbc_driver = None
+jdbc_auth = None
+jdbc_properties = None
+dbcp_username = None
+dbcp_password = None
+
+
+def drop_tables():
+  """Drop the specified 'jdbc_db_name' database and all its tables"""
+  with cluster.impala.cursor() as impala:
+    # DROP DATABASE CASCADE
+    impala.execute("drop database if exists {0} cascade".format(jdbc_db_name))
+
+
+def create_tables():
+  """Create the 'jdbc_db_name' database and tables for the given workload"""
+  # SQL parameters for JDBC schema template.
+  sql_params = {
+      "jdbc_db_name": jdbc_db_name,
+      "database_type": database_type,
+      "jdbc_url": jdbc_url,
+      "jdbc_driver": jdbc_driver,
+      "driver_url": driver_url,
+      "jdbc_auth": jdbc_auth,
+      "jdbc_properties": jdbc_properties,
+      "dbcp_username": dbcp_username,
+      "dbcp_password": dbcp_password}
+
+  sql_file_path = get_test_file_path(workload)
+  with open(sql_file_path, "r") as test:
+    queries = sqlparse.split(test.read())
+
+  with cluster.impala.cursor() as impala:
+    impala.create_db_if_not_exists(jdbc_db_name)
+    impala.execute("USE %s" % jdbc_db_name)
+    for query in queries:
+      query = sqlparse.format(query.rstrip(';'), strip_comments=True)
+      query_str = query.format(**sql_params)
+      if (len(query_str)) == 0: continue
+      if verbose: print(query_str)
+      impala.execute(query_str)
+
+
+def get_test_file_path(workload):
+  """Get filename of schema template file:
+  tpch_jdbc_schema_template.sql or tpcds_jdbc_schema_template.sql """
+  if "IMPALA_HOME" not in os.environ:
+    raise Exception("IMPALA_HOME must be set")
+  sql_file_path = os.path.join(os.environ["IMPALA_HOME"], "testdata", 
"datasets",
+      workload, "%s_jdbc_schema_template.sql" % (workload))
+  return sql_file_path
+
+
+if __name__ == "__main__":
+  from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
+  import tests.comparison.cli_options as cli_options
+
+  parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
+  cli_options.add_logging_options(parser)
+  cli_options.add_cluster_options(parser)
+  parser.add_argument("-t", "--jdbc_db_name", required=True,
+      help="Target JDBC database name.")
+  parser.add_argument("-w", "--workload", choices=['tpch', 'tpcds'],
+      required=True)
+  parser.add_argument("--database_type", required=True,
+      help="Database type")
+  parser.add_argument("--database_host", required=False,
+      help="Hostname or IP address of RDBMS server")
+  parser.add_argument("--database_port", required=False,
+      help="TCP port of RDBMS server")
+  parser.add_argument("--database_name", required=False,
+      help="The name of database on RDBMS server")
+  parser.add_argument("--jdbc_auth", required=False,
+      help="The authentication method for RDBMS server")
+  parser.add_argument("--jdbc_properties", required=False,
+      help="Additional JDBC properties as comma seperated name/value pairs")
+  parser.add_argument("-v", "--verbose", action='store_true',
+      help="Print the executed statements.")
+  parser.add_argument("--clean", action='store_true',
+      help="Drop all tables in the specified target JDBC database.")
+  args = parser.parse_args()
+
+  FILESYSTEM_PREFIX = os.getenv("FILESYSTEM_PREFIX")
+  INTERNAL_LISTEN_HOST = os.getenv("INTERNAL_LISTEN_HOST")
+
+  cli_options.configure_logging(args.log_level, 
debug_log_file=args.debug_log_file)
+  cluster = cli_options.create_cluster(args)
+  db_port = args.database_port
+  jdbc_db_name = args.jdbc_db_name
+  jdbc_auth = args.jdbc_auth
+  jdbc_properties = args.jdbc_properties
+  workload = args.workload
+  database_type = args.database_type.upper()
+  if args.database_host:
+    rdbms_db_host = args.database_host
+  elif INTERNAL_LISTEN_HOST:
+    rdbms_db_host = INTERNAL_LISTEN_HOST
+  else:
+    rdbms_db_host = 'localhost'
+  if args.database_name:
+    rdbms_db_name = args.database_name
+  else:
+    rdbms_db_name = workload
+  verbose = args.verbose
+
+  if database_type == 'IMPALA':
+    if not db_port:
+      db_port = 21050
+    jdbc_url = 'jdbc:impala://{0}:{1}/{2}'.format(rdbms_db_host, db_port, 
rdbms_db_name)
+    jdbc_driver = 'com.cloudera.impala.jdbc.Driver'
+    driver_url = '{0}/test-warehouse/data-sources/jdbc-drivers/{1}'.format(
+        FILESYSTEM_PREFIX, 'ImpalaJDBC42.jar')
+    if not jdbc_auth:
+      jdbc_auth = 'AuthMech=0'
+    if not jdbc_properties:
+      jdbc_properties = ''
+    dbcp_username = 'impala'
+    dbcp_password = 'cloudera'
+  elif database_type == 'POSTGRES':
+    if not db_port:
+      db_port = 5432
+    jdbc_url = 'jdbc:postgresql://{0}:{1}/{2}'.format(
+        rdbms_db_host, db_port, rdbms_db_name)
+    jdbc_driver = 'org.postgresql.Driver'
+    driver_url = '{0}/test-warehouse/data-sources/jdbc-drivers/{1}'.format(
+        FILESYSTEM_PREFIX, 'postgresql-jdbc.jar')
+    jdbc_auth = ''
+    jdbc_properties = ''
+    dbcp_username = 'hiveuser'
+    dbcp_password = 'password'
+  elif database_type == 'MYSQL':
+    if not db_port:
+      db_port = 3306
+    jdbc_url = 'jdbc:mysql://{0}:{1}/{2}'.format(rdbms_db_host, db_port, 
rdbms_db_name)
+    jdbc_driver = 'com.mysql.cj.jdbc.Driver"'
+    driver_url = '{0}/test-warehouse/data-sources/jdbc-drivers/{1}'.format(
+        FILESYSTEM_PREFIX, 'mysql-jdbc.jar')
+    jdbc_auth = ''
+    jdbc_properties = ''
+    dbcp_username = 'hiveuser'
+    dbcp_password = 'password'
+  else:
+    # TODO support other database servers.
+    raise Exception('Unsupported database type: {0}'.format(database_type))
+
+  if args.clean: drop_tables()
+  create_tables()
diff --git a/testdata/datasets/tpcds/tpcds_jdbc_schema_template.sql 
b/testdata/datasets/tpcds/tpcds_jdbc_schema_template.sql
new file mode 100644
index 000000000..bb4f9c62c
--- /dev/null
+++ b/testdata/datasets/tpcds/tpcds_jdbc_schema_template.sql
@@ -0,0 +1,804 @@
+---- Licensed to the Apache Software Foundation (ASF) under one
+---- or more contributor license agreements.  See the NOTICE file
+---- distributed with this work for additional information
+---- regarding copyright ownership.  The ASF licenses this file
+---- to you under the Apache License, Version 2.0 (the
+---- "License"); you may not use this file except in compliance
+---- with the License.  You may obtain a copy of the License at
+----
+----   http://www.apache.org/licenses/LICENSE-2.0
+----
+---- Unless required by applicable law or agreed to in writing,
+---- software distributed under the License is distributed on an
+---- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+---- KIND, either express or implied.  See the License for the
+---- specific language governing permissions and limitations
+---- under the License.
+
+---- Template SQL statements to create external JDBC tables for TPCDS dataset
+---- in specified 'jdbc_db_name' JDBC database.
+
+--- store_sales
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.store_sales (
+  ss_sold_time_sk INT,
+  ss_item_sk BIGINT,
+  ss_customer_sk INT,
+  ss_cdemo_sk INT,
+  ss_hdemo_sk INT,
+  ss_addr_sk INT,
+  ss_store_sk INT,
+  ss_promo_sk INT,
+  ss_ticket_number BIGINT,
+  ss_quantity INT,
+  ss_wholesale_cost DECIMAL(7,2),
+  ss_list_price DECIMAL(7,2),
+  ss_sales_price DECIMAL(7,2),
+  ss_ext_discount_amt DECIMAL(7,2),
+  ss_ext_sales_price DECIMAL(7,2),
+  ss_ext_wholesale_cost DECIMAL(7,2),
+  ss_ext_list_price DECIMAL(7,2),
+  ss_ext_tax DECIMAL(7,2),
+  ss_coupon_amt DECIMAL(7,2),
+  ss_net_paid DECIMAL(7,2),
+  ss_net_paid_inc_tax DECIMAL(7,2),
+  ss_net_profit DECIMAL(7,2),
+  ss_sold_date_sk INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='store_sales');
+
+--- web_sales
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.web_sales (
+  ws_sold_date_sk INT,
+  ws_sold_time_sk INT,
+  ws_ship_date_sk INT,
+  ws_item_sk BIGINT,
+  ws_bill_customer_sk INT,
+  ws_bill_cdemo_sk INT,
+  ws_bill_hdemo_sk INT,
+  ws_bill_addr_sk INT,
+  ws_ship_customer_sk INT,
+  ws_ship_cdemo_sk INT,
+  ws_ship_hdemo_sk INT,
+  ws_ship_addr_sk INT,
+  ws_web_page_sk INT,
+  ws_web_site_sk INT,
+  ws_ship_mode_sk INT,
+  ws_warehouse_sk INT,
+  ws_promo_sk INT,
+  ws_order_number BIGINT,
+  ws_quantity INT,
+  ws_wholesale_cost DECIMAL(7,2),
+  ws_list_price DECIMAL(7,2),
+  ws_sales_price DECIMAL(7,2),
+  ws_ext_discount_amt DECIMAL(7,2),
+  ws_ext_sales_price DECIMAL(7,2),
+  ws_ext_wholesale_cost DECIMAL(7,2),
+  ws_ext_list_price DECIMAL(7,2),
+  ws_ext_tax DECIMAL(7,2),
+  ws_coupon_amt DECIMAL(7,2),
+  ws_ext_ship_cost DECIMAL(7,2),
+  ws_net_paid DECIMAL(7,2),
+  ws_net_paid_inc_tax DECIMAL(7,2),
+  ws_net_paid_inc_ship DECIMAL(7,2),
+  ws_net_paid_inc_ship_tax DECIMAL(7,2),
+  ws_net_profit DECIMAL(7,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='web_sales');
+
+--- catalog_sales
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.catalog_sales (
+  cs_sold_date_sk INT,
+  cs_sold_time_sk INT,
+  cs_ship_date_sk INT,
+  cs_bill_customer_sk INT,
+  cs_bill_cdemo_sk INT,
+  cs_bill_hdemo_sk INT,
+  cs_bill_addr_sk INT,
+  cs_ship_customer_sk INT,
+  cs_ship_cdemo_sk INT,
+  cs_ship_hdemo_sk INT,
+  cs_ship_addr_sk INT,
+  cs_call_center_sk INT,
+  cs_catalog_page_sk INT,
+  cs_ship_mode_sk INT,
+  cs_warehouse_sk INT,
+  cs_item_sk BIGINT,
+  cs_promo_sk INT,
+  cs_order_number BIGINT,
+  cs_quantity INT,
+  cs_wholesale_cost DECIMAL(7,2),
+  cs_list_price DECIMAL(7,2),
+  cs_sales_price DECIMAL(7,2),
+  cs_ext_discount_amt DECIMAL(7,2),
+  cs_ext_sales_price DECIMAL(7,2),
+  cs_ext_wholesale_cost DECIMAL(7,2),
+  cs_ext_list_price DECIMAL(7,2),
+  cs_ext_tax DECIMAL(7,2),
+  cs_coupon_amt DECIMAL(7,2),
+  cs_ext_ship_cost DECIMAL(7,2),
+  cs_net_paid DECIMAL(7,2),
+  cs_net_paid_inc_tax DECIMAL(7,2),
+  cs_net_paid_inc_ship DECIMAL(7,2),
+  cs_net_paid_inc_ship_tax DECIMAL(7,2),
+  cs_net_profit DECIMAL(7,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='catalog_sales');
+
+--- store_returns
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.store_returns (
+  sr_returned_date_sk INT,
+  sr_return_time_sk INT,
+  sr_item_sk BIGINT,
+  sr_customer_sk INT,
+  sr_cdemo_sk INT,
+  sr_hdemo_sk INT,
+  sr_addr_sk INT,
+  sr_store_sk INT,
+  sr_reason_sk INT,
+  sr_ticket_number BIGINT,
+  sr_return_quantity INT,
+  sr_return_amt DECIMAL(7,2),
+  sr_return_tax DECIMAL(7,2),
+  sr_return_amt_inc_tax DECIMAL(7,2),
+  sr_fee DECIMAL(7,2),
+  sr_return_ship_cost DECIMAL(7,2),
+  sr_refunded_cash DECIMAL(7,2),
+  sr_reversed_charge DECIMAL(7,2),
+  sr_store_credit DECIMAL(7,2),
+  sr_net_loss DECIMAL(7,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='store_returns');
+
+--- web_returns
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.web_returns (
+  wr_returned_date_sk INT,
+  wr_returned_time_sk INT,
+  wr_item_sk BIGINT,
+  wr_refunded_customer_sk INT,
+  wr_refunded_cdemo_sk INT,
+  wr_refunded_hdemo_sk INT,
+  wr_refunded_addr_sk INT,
+  wr_returning_customer_sk INT,
+  wr_returning_cdemo_sk INT,
+  wr_returning_hdemo_sk INT,
+  wr_returning_addr_sk INT,
+  wr_web_page_sk INT,
+  wr_reason_sk INT,
+  wr_order_number BIGINT,
+  wr_return_quantity INT,
+  wr_return_amt DECIMAL(7,2),
+  wr_return_tax DECIMAL(7,2),
+  wr_return_amt_inc_tax DECIMAL(7,2),
+  wr_fee DECIMAL(7,2),
+  wr_return_ship_cost DECIMAL(7,2),
+  wr_refunded_cash DECIMAL(7,2),
+  wr_reversed_charge DECIMAL(7,2),
+  wr_account_credit DECIMAL(7,2),
+  wr_net_loss DECIMAL(7,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='web_returns');
+
+--- catalog_returns
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.catalog_returns (
+  cr_returned_date_sk INT,
+  cr_returned_time_sk INT,
+  cr_item_sk BIGINT,
+  cr_refunded_customer_sk INT,
+  cr_refunded_cdemo_sk INT,
+  cr_refunded_hdemo_sk INT,
+  cr_refunded_addr_sk INT,
+  cr_returning_customer_sk INT,
+  cr_returning_cdemo_sk INT,
+  cr_returning_hdemo_sk INT,
+  cr_returning_addr_sk INT,
+  cr_call_center_sk INT,
+  cr_catalog_page_sk INT,
+  cr_ship_mode_sk INT,
+  cr_warehouse_sk INT,
+  cr_reason_sk INT,
+  cr_order_number BIGINT,
+  cr_return_quantity INT,
+  cr_return_amount DECIMAL(7,2),
+  cr_return_tax DECIMAL(7,2),
+  cr_return_amt_inc_tax DECIMAL(7,2),
+  cr_fee DECIMAL(7,2),
+  cr_return_ship_cost DECIMAL(7,2),
+  cr_refunded_cash DECIMAL(7,2),
+  cr_reversed_charge DECIMAL(7,2),
+  cr_store_credit DECIMAL(7,2),
+  cr_net_loss DECIMAL(7,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='catalog_returns');
+
+--- inventory
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.inventory (
+  inv_date_sk INT,
+  inv_item_sk BIGINT,
+  inv_warehouse_sk INT,
+  inv_quantity_on_hand INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='inventory');
+
+--- customer
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.customer (
+  c_customer_sk INT,
+  c_customer_id STRING,
+  c_current_cdemo_sk INT,
+  c_current_hdemo_sk INT,
+  c_current_addr_sk INT,
+  c_first_shipto_date_sk INT,
+  c_first_sales_date_sk INT,
+  c_salutation STRING,
+  c_first_name STRING,
+  c_last_name STRING,
+  c_preferred_cust_flag STRING,
+  c_birth_day INT,
+  c_birth_month INT,
+  c_birth_year INT,
+  c_birth_country STRING,
+  c_login STRING,
+  c_email_address STRING,
+  c_last_review_date STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='customer');
+
+--- customer_address
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.customer_address (
+  ca_address_sk INT,
+  ca_address_id STRING,
+  ca_street_number STRING,
+  ca_street_name STRING,
+  ca_street_type STRING,
+  ca_suite_number STRING,
+  ca_city STRING,
+  ca_county STRING,
+  ca_state STRING,
+  ca_zip STRING,
+  ca_country STRING,
+  ca_gmt_offset DECIMAL(5,2),
+  ca_location_type STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='customer_address');
+
+--- customer_demographics
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.customer_demographics (
+  cd_demo_sk INT,
+  cd_gender STRING,
+  cd_marital_status STRING,
+  cd_education_status STRING,
+  cd_purchase_estimate INT,
+  cd_credit_rating STRING,
+  cd_dep_count INT,
+  cd_dep_employed_count INT,
+  cd_dep_college_count INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='customer_demographics');
+
+--- date_dim
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.date_dim (
+  d_date_sk INT,
+  d_date_id STRING,
+  d_date STRING,
+  d_month_seq INT,
+  d_week_seq INT,
+  d_quarter_seq INT,
+  d_year INT,
+  d_dow INT,
+  d_moy INT,
+  d_dom INT,
+  d_qoy INT,
+  d_fy_year INT,
+  d_fy_quarter_seq INT,
+  d_fy_week_seq INT,
+  d_day_name STRING,
+  d_quarter_name STRING,
+  d_holiday STRING,
+  d_weekend STRING,
+  d_following_holiday STRING,
+  d_first_dom INT,
+  d_last_dom INT,
+  d_same_day_ly INT,
+  d_same_day_lq INT,
+  d_current_day STRING,
+  d_current_week STRING,
+  d_current_month STRING,
+  d_current_quarter STRING,
+  d_current_year STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='date_dim');
+
+--- household_demographics
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.household_demographics (
+  hd_demo_sk INT,
+  hd_income_band_sk INT,
+  hd_buy_potential STRING,
+  hd_dep_count INT,
+  hd_vehicle_count INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='household_demographics');
+
+--- item
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.item (
+  i_item_sk BIGINT,
+  i_item_id STRING,
+  i_rec_start_date STRING,
+  i_rec_end_date STRING,
+  i_item_desc STRING,
+  i_current_price DECIMAL(7,2),
+  i_wholesale_cost DECIMAL(7,2),
+  i_brand_id INT,
+  i_brand STRING,
+  i_class_id INT,
+  i_class STRING,
+  i_category_id INT,
+  i_category STRING,
+  i_manufact_id INT,
+  i_manufact STRING,
+  i_size STRING,
+  i_formulation STRING,
+  i_color STRING,
+  i_units STRING,
+  i_container STRING,
+  i_manager_id INT,
+  i_product_name STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='item');
+
+--- promotion
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.promotion (
+  p_promo_sk INT,
+  p_promo_id STRING,
+  p_start_date_sk INT,
+  p_end_date_sk INT,
+  p_item_sk BIGINT,
+  p_cost DECIMAL(15,2),
+  p_response_target INT,
+  p_promo_name STRING,
+  p_channel_dmail STRING,
+  p_channel_email STRING,
+  p_channel_catalog STRING,
+  p_channel_tv STRING,
+  p_channel_radio STRING,
+  p_channel_press STRING,
+  p_channel_event STRING,
+  p_channel_demo STRING,
+  p_channel_details STRING,
+  p_purpose STRING,
+  p_discount_active STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='promotion');
+
+--- store
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.store (
+  s_store_sk INT,
+  s_store_id STRING,
+  s_rec_start_date STRING,
+  s_rec_end_date STRING,
+  s_closed_date_sk INT,
+  s_store_name STRING,
+  s_number_employees INT,
+  s_floor_space INT,
+  s_hours STRING,
+  s_manager STRING,
+  s_market_id INT,
+  s_geography_class STRING,
+  s_market_desc STRING,
+  s_market_manager STRING,
+  s_division_id INT,
+  s_division_name STRING,
+  s_company_id INT,
+  s_company_name STRING,
+  s_street_number STRING,
+  s_street_name STRING,
+  s_street_type STRING,
+  s_suite_number STRING,
+  s_city STRING,
+  s_county STRING,
+  s_state STRING,
+  s_zip STRING,
+  s_country STRING,
+  s_gmt_offset DECIMAL(5,2),
+  s_tax_precentage DECIMAL(5,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='store');
+
+--- time_dim
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.time_dim (
+  t_time_sk INT,
+  t_time_id STRING,
+  t_time INT,
+  t_hour INT,
+  t_minute INT,
+  t_second INT,
+  t_am_pm STRING,
+  t_shift STRING,
+  t_sub_shift STRING,
+  t_meal_time STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='time_dim');
+
+--- call_center
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.call_center (
+  cc_call_center_sk INT,
+  cc_call_center_id STRING,
+  cc_rec_start_date STRING,
+  cc_rec_end_date STRING,
+  cc_closed_date_sk INT,
+  cc_open_date_sk INT,
+  cc_name STRING,
+  cc_class STRING,
+  cc_employees INT,
+  cc_sq_ft INT,
+  cc_hours STRING,
+  cc_manager STRING,
+  cc_mkt_id INT,
+  cc_mkt_class STRING,
+  cc_mkt_desc STRING,
+  cc_market_manager STRING,
+  cc_division INT,
+  cc_division_name STRING,
+  cc_company INT,
+  cc_company_name STRING,
+  cc_street_number STRING,
+  cc_street_name STRING,
+  cc_street_type STRING,
+  cc_suite_number STRING,
+  cc_city STRING,
+  cc_county STRING,
+  cc_state STRING,
+  cc_zip STRING,
+  cc_country STRING,
+  cc_gmt_offset DECIMAL(5,2),
+  cc_tax_percentage DECIMAL(5,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='call_center');
+
+--- catalog_page
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.catalog_page (
+  cp_catalog_page_sk INT,
+  cp_catalog_page_id STRING,
+  cp_start_date_sk INT,
+  cp_end_date_sk INT,
+  cp_department STRING,
+  cp_catalog_number INT,
+  cp_catalog_page_number INT,
+  cp_description STRING,
+  cp_type STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='catalog_page');
+
+--- income_band
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.income_band (
+  ib_income_band_sk INT,
+  ib_lower_bound INT,
+  ib_upper_bound INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='income_band');
+
+--- reason
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.reason (
+  r_reason_sk INT,
+  r_reason_id STRING,
+  r_reason_desc STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='reason');
+
+--- ship_mode
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.ship_mode (
+  sm_ship_mode_sk INT,
+  sm_ship_mode_id STRING,
+  sm_type STRING,
+  sm_code STRING,
+  sm_carrier STRING,
+  sm_contract STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='ship_mode');
+
+--- warehouse
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.warehouse (
+  w_warehouse_sk INT,
+  w_warehouse_id STRING,
+  w_warehouse_name STRING,
+  w_warehouse_sq_ft INT,
+  w_street_number STRING,
+  w_street_name STRING,
+  w_street_type STRING,
+  w_suite_number STRING,
+  w_city STRING,
+  w_county STRING,
+  w_state STRING,
+  w_zip STRING,
+  w_country STRING,
+  w_gmt_offset DECIMAL(5,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='warehouse');
+
+--- web_page
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.web_page (
+  wp_web_page_sk INT,
+  wp_web_page_id STRING,
+  wp_rec_start_date STRING,
+  wp_rec_end_date STRING,
+  wp_creation_date_sk INT,
+  wp_access_date_sk INT,
+  wp_autogen_flag STRING,
+  wp_customer_sk INT,
+  wp_url STRING,
+  wp_type STRING,
+  wp_char_count INT,
+  wp_link_count INT,
+  wp_image_count INT,
+  wp_max_ad_count INT
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='web_page');
+
+--- web_site
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.web_site (
+  web_site_sk INT,
+  web_site_id STRING,
+  web_rec_start_date STRING,
+  web_rec_end_date STRING,
+  web_name STRING,
+  web_open_date_sk INT,
+  web_close_date_sk INT,
+  web_class STRING,
+  web_manager STRING,
+  web_mkt_id INT,
+  web_mkt_class STRING,
+  web_mkt_desc STRING,
+  web_market_manager STRING,
+  web_company_id INT,
+  web_company_name STRING,
+  web_street_number STRING,
+  web_street_name STRING,
+  web_street_type STRING,
+  web_suite_number STRING,
+  web_city STRING,
+  web_county STRING,
+  web_state STRING,
+  web_zip STRING,
+  web_country STRING,
+  web_gmt_offset DECIMAL(5,2),
+  web_tax_percentage DECIMAL(5,2)
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='web_site');
diff --git a/testdata/datasets/tpch/tpch_jdbc_schema_template.sql 
b/testdata/datasets/tpch/tpch_jdbc_schema_template.sql
new file mode 100644
index 000000000..e01b536dd
--- /dev/null
+++ b/testdata/datasets/tpch/tpch_jdbc_schema_template.sql
@@ -0,0 +1,200 @@
+---- Licensed to the Apache Software Foundation (ASF) under one
+---- or more contributor license agreements.  See the NOTICE file
+---- distributed with this work for additional information
+---- regarding copyright ownership.  The ASF licenses this file
+---- to you under the Apache License, Version 2.0 (the
+---- "License"); you may not use this file except in compliance
+---- with the License.  You may obtain a copy of the License at
+----
+----   http://www.apache.org/licenses/LICENSE-2.0
+----
+---- Unless required by applicable law or agreed to in writing,
+---- software distributed under the License is distributed on an
+---- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+---- KIND, either express or implied.  See the License for the
+---- specific language governing permissions and limitations
+---- under the License.
+
+---- Template SQL statements to create external JDBC tables for TPCH dataset
+---- in specified 'jdbc_db_name' JDBC database.
+
+--- lineitem
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.lineitem (
+  l_orderkey BIGINT,
+  l_partkey BIGINT,
+  l_suppkey BIGINT,
+  l_linenumber INT,
+  l_quantity DECIMAL(12,2),
+  l_extendedprice DECIMAL(12,2),
+  l_discount DECIMAL(12,2),
+  l_tax DECIMAL(12,2),
+  l_returnflag STRING,
+  l_linestatus STRING,
+  l_shipdate STRING,
+  l_commitdate STRING,
+  l_receiptdate STRING,
+  l_shipinstruct STRING,
+  l_shipmode STRING,
+  l_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='lineitem');
+
+--- part
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.part (
+  p_partkey BIGINT,
+  p_name STRING,
+  p_mfgr STRING,
+  p_brand STRING,
+  p_type STRING,
+  p_size INT,
+  p_container STRING,
+  p_retailprice DECIMAL(12,2),
+  p_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='part');
+
+--- partsupp
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.partsupp (
+  ps_partkey BIGINT,
+  ps_suppkey BIGINT,
+  ps_availqty INT,
+  ps_supplycost DECIMAL(12,2),
+  ps_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='partsupp');
+
+--- supplier
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.supplier (
+  s_suppkey BIGINT,
+  s_name STRING,
+  s_address STRING,
+  s_nationkey SMALLINT,
+  s_phone STRING,
+  s_acctbal DECIMAL(12,2),
+  s_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='supplier');
+
+--- nation
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.nation (
+  n_nationkey SMALLINT,
+  n_name STRING,
+  n_regionkey SMALLINT,
+  n_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='nation');
+
+--- region
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.region (
+  r_regionkey SMALLINT,
+  r_name STRING,
+  r_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='region');
+
+--- orders
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.orders (
+  o_orderkey BIGINT,
+  o_custkey BIGINT,
+  o_orderstatus STRING,
+  o_totalprice DECIMAL(12,2),
+  o_orderdate STRING,
+  o_orderpriority STRING,
+  o_clerk STRING,
+  o_shippriority INT,
+  o_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='orders');
+
+--- customer
+CREATE EXTERNAL TABLE IF NOT EXISTS {jdbc_db_name}.customer (
+  c_custkey BIGINT,
+  c_name STRING,
+  c_address STRING,
+  c_nationkey SMALLINT,
+  c_phone STRING,
+  c_acctbal DECIMAL(12,2),
+  c_mktsegment STRING,
+  c_comment STRING
+)
+STORED AS JDBC
+TBLPROPERTIES (
+  'database.type'='{database_type}',
+  'jdbc.url'='{jdbc_url}',
+  'jdbc.auth'='{jdbc_auth}',
+  'jdbc.properties'='{jdbc_properties}',
+  'jdbc.driver'='{jdbc_driver}',
+  'driver.url'='{driver_url}',
+  'dbcp.username'='{dbcp_username}',
+  'dbcp.password'='{dbcp_password}',
+  'table'='customer');
diff --git a/tests/common/skip.py b/tests/common/skip.py
index 0963739c6..c26eb3e16 100644
--- a/tests/common/skip.py
+++ b/tests/common/skip.py
@@ -161,6 +161,8 @@ class SkipIfNotHdfsMinicluster:
              "with no EC")
 
 class SkipIfBuildType:
+  dev_build = pytest.mark.skipif(IMPALA_TEST_CLUSTER_PROPERTIES.is_dev(),
+      reason="Test takes too much time on debug build.")
   not_dev_build = pytest.mark.skipif(not 
IMPALA_TEST_CLUSTER_PROPERTIES.is_dev(),
       reason="Test depends on debug build startup option.")
   remote = 
pytest.mark.skipif(IMPALA_TEST_CLUSTER_PROPERTIES.is_remote_cluster(),
diff --git a/tests/query_test/test_tpcds_queries.py 
b/tests/query_test/test_tpcds_queries.py
index 2dbb896e6..c9fd5a821 100644
--- a/tests/query_test/test_tpcds_queries.py
+++ b/tests/query_test/test_tpcds_queries.py
@@ -18,11 +18,14 @@
 # Functional tests running the TPC-DS workload
 #
 from __future__ import absolute_import, division, print_function
+import os
 import pytest
 from copy import deepcopy
 
 from tests.common.impala_test_suite import ImpalaTestSuite
-from tests.common.skip import SkipIfDockerizedCluster
+from tests.common.skip import (
+    SkipIfBuildType,
+    SkipIfDockerizedCluster)
 from tests.common.test_dimensions import (
     add_mandatory_exec_option,
     create_single_exec_option_dimension,
@@ -770,3 +773,371 @@ class TestTpcdsQueryWithProcessingCost(TestTpcdsQuery):
     new_vector = deepcopy(vector)
     new_vector.get_value('exec_option')['max_fragment_instances_per_node'] = 2
     self.run_test_case(self.get_workload() + '-q67a', new_vector)
+
+
+@SkipIfBuildType.dev_build
+@SkipIfDockerizedCluster.insufficient_mem_limit
+class TestTpcdsQueryForJdbcTables(ImpalaTestSuite):
+  """TPCDS query tests for external jdbc tables."""
+
+  @classmethod
+  def get_workload(self):
+    return 'tpcds'
+
+  @classmethod
+  def add_test_dimensions(cls):
+    super(TestTpcdsQueryForJdbcTables, cls).add_test_dimensions()
+
+    # For exhaustive test on Jenkins, cls.exploration_strategy() returns 'core'
+    # (IMPALA-4904, IMPALA-4914). The workaround is to get exploration 
strategy from
+    # environment variable 'EXPLORATION_STRATEGY'.
+    try:
+      env_exploration_strategy = os.environ['EXPLORATION_STRATEGY']
+    except KeyError:
+      env_exploration_strategy = ''
+    if env_exploration_strategy != 'exhaustive' \
+        and cls.exploration_strategy() != 'exhaustive':
+      pytest.skip("These tests only run in exhaustive mode.")
+    cls.ImpalaTestMatrix.add_dimension(create_single_exec_option_dimension())
+    cls.ImpalaTestMatrix.add_constraint(lambda v:
+        v.get_value('table_format').file_format == 'parquet'
+        and v.get_value('table_format').compression_codec == 'none')
+    add_mandatory_exec_option(cls, 'clean_dbcp_ds_cache', 'false')
+
+  # Marked serially to make sure it runs first.
+  def test_tpcds_count(self, vector):
+    self.run_test_case('count', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q1(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q1', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q2(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q2', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q3(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q3', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q4(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q4', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q5(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q5', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q6(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q6', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q7(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q7', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q8(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q8', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q9(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q9', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q10a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q10a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q11(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q11', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q12(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q12', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q13(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q13', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q14_1(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q14-1', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q14_2(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q14-2', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q15(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q15', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q16(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q16', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q17(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q17', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q18(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q18', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q18a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q18a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q19(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q19', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q20(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q20', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q21(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q21', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q22(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q22', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q22a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q22a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q23_1(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q23-1', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q23_2(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q23-2', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q24_1(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q24-1', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q24_2(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q24-2', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q25(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q25', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q26(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q26', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q27(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q27', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q28(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q28', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q29(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q29', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q30(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q30', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q31(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q31', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q32(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q32', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q33(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q33', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q34(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q34', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q35a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q35a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q36(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q36', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q37(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q37', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q38(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q38', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q39_1(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q39-1', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q39_2(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q39-2', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q40(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q40', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q41(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q41', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q42(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q42', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q43(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q43', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q44(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q44', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q45(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q45', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q46(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q46', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q47(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q47', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q48(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q48', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q49(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q49', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q50(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q50', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q51(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q51', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q51a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q51a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q52(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q52', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q53(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q53', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q54(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q54', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q55(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q55', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q56(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q56', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q57(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q57', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q58(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q58', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q59(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q59', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q60(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q60', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q61(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q61', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q62(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q62', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q63(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q63', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q64(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q64', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q65(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q65', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q66(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q66', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q67(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q67', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q67a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q67a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q68(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q68', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q69(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q69', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q70(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q70', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q70a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q70a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q71(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q71', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q72(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q72', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q73(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q73', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q74(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q74', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q75(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q75', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q76(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q76', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q77(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q77', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q77a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q77a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q78(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q78', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q79(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q79', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q80(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q80', vector, use_db='tpcds_jdbc')
+
+  # TODO: IMPALA-13018 fix unmatched results for external JDBC tables.
+  # def test_tpcds_q80a(self, vector):
+  #   self.run_test_case('tpcds-decimal_v2-q80a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q81(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q81', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q82(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q82', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q83(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q83', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q84(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q84', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q85(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q85', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q86(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q86', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q86a(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q86a', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q87(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q87', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q88(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q88', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q89(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q89', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q90(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q90', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q91(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q91', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q92(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q92', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q93(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q93', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q94(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q94', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q95(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q95', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q96(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q96', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q97(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q97', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q98(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q98', vector, use_db='tpcds_jdbc')
+
+  def test_tpcds_q99(self, vector):
+    self.run_test_case('tpcds-decimal_v2-q99', vector, use_db='tpcds_jdbc')
diff --git a/tests/query_test/test_tpch_queries.py 
b/tests/query_test/test_tpch_queries.py
index 4d2bec912..1f08e46b9 100644
--- a/tests/query_test/test_tpch_queries.py
+++ b/tests/query_test/test_tpch_queries.py
@@ -21,7 +21,12 @@ from builtins import range
 import pytest
 
 from tests.common.impala_test_suite import ImpalaTestSuite
-from tests.common.test_dimensions import create_single_exec_option_dimension
+from tests.common.skip import SkipIfDockerizedCluster
+from tests.common.test_dimensions import (
+    add_mandatory_exec_option,
+    create_single_exec_option_dimension,
+    create_uncompressed_text_dimension)
+
 
 class TestTpchQuery(ImpalaTestSuite):
   @classmethod
@@ -48,3 +53,25 @@ class TestTpchQuery(ImpalaTestSuite):
   def test_tpch(self, vector, query):
     self.run_test_case('tpch-q{0}'.format(query), vector)
 
+
+@SkipIfDockerizedCluster.insufficient_mem_limit
+class TestTpchQueryForJdbcTables(ImpalaTestSuite):
+  """TPCH query tests for external jdbc tables."""
+
+  @classmethod
+  def get_workload(self):
+    return 'tpch'
+
+  @classmethod
+  def add_test_dimensions(cls):
+    super(TestTpchQueryForJdbcTables, cls).add_test_dimensions()
+    cls.ImpalaTestMatrix.add_dimension(
+        create_uncompressed_text_dimension(cls.get_workload()))
+    add_mandatory_exec_option(cls, 'clean_dbcp_ds_cache', 'false')
+
+  def idfn(val):
+    return "TPC-H: Q{0}".format(val)
+
+  @pytest.mark.parametrize("query", range(1, 23), ids=idfn)
+  def test_tpch(self, vector, query):
+    self.run_test_case('tpch-q{0}'.format(query), vector, use_db='tpch_jdbc')


Reply via email to