This is an automated email from the ASF dual-hosted git repository.

panjuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 605e05c9 improve metadata load performance (#11767)
605e05c9 is described below

commit 605e05c907bb4bd4e14bb682c01e6f46e562a87b
Author: tuichenchuxin <[email protected]>
AuthorDate: Tue Aug 17 15:06:22 2021 +0800

    improve metadata load performance (#11767)
    
    * improve metadata load performance
    
    * improve metadata load performance
    
    * metaDataImprove test case
    
    * metaDataImprove add test case
    
    * metaDataImprove add test case
    
    * metaDataImprove add test case
    
    * metaDataImprove add test case
    
    * metaDataImprove add test case
    
    * metaDataImprove add test case
    
    * metaDataImprove fix index
    
    * metaDataImprove optimize review
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
    
    * metaDataImprove optimize review suggestions
---
 .../metadata/EncryptTableMetaDataBuilder.java      |  33 ++
 .../metadata/EncryptTableMetaDataBuilderTest.java  | 210 ++++++++++++-
 .../metadata/ShardingTableMetaDataBuilder.java     |  63 +++-
 .../metadata/ShardingTableMetaDataBuilderTest.java | 331 +++++++++++++++++++++
 .../metadata/schema/builder/SchemaBuilder.java     |  16 +-
 .../schema/builder/TableMetaDataBuilder.java       |  39 ++-
 .../schema/builder/loader/TableMetaDataLoader.java |  75 +++++
 .../loader/dialect/H2TableMetaDataLoader.java      |  73 +++--
 .../loader/dialect/MySQLTableMetaDataLoader.java   |  34 ++-
 .../loader/dialect/OracleTableMetaDataLoader.java  |  55 ++--
 .../dialect/PostgreSQLTableMetaDataLoader.java     |  34 ++-
 .../dialect/SQLServerTableMetaDataLoader.java      |  35 ++-
 .../builder/spi/DialectTableMetaDataLoader.java    |  10 +
 .../builder/spi/RuleBasedTableMetaDataBuilder.java |  15 +
 .../schema/builder/TableMetaDataBuilderTest.java   |   7 +
 .../loader/CommonFixtureTableMetaDataBuilder.java  |  11 +
 ...tainedFixtureRuleBasedTableMetaDataBuilder.java |  18 ++
 17 files changed, 969 insertions(+), 90 deletions(-)

diff --git 
a/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/main/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilder.java
 
b/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/main/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilder.java
index 8c39e22..f8e6155 100644
--- 
a/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/main/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilder.java
+++ 
b/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/main/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilder.java
@@ -24,23 +24,31 @@ import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNode;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.loader.TableMetaDataLoader;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.DialectTableMetaDataLoader;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
 import org.apache.shardingsphere.infra.metadata.schema.model.ColumnMetaData;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
 
 import javax.sql.DataSource;
 import java.sql.SQLException;
+
+import java.util.Collections;
 import java.util.Collection;
+import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
+import java.util.stream.Collectors;
 
 /**
  * Table meta data builder for encrypt.
  */
 public final class EncryptTableMetaDataBuilder implements 
RuleBasedTableMetaDataBuilder<EncryptRule> {
     
+    // TODO remove this method
     @Override
     public Optional<TableMetaData> load(final String tableName, final 
DatabaseType databaseType, final Map<String, DataSource> dataSourceMap, final 
DataNodes dataNodes,
                                         final EncryptRule encryptRule, final 
ConfigurationProperties props) throws SQLException {
@@ -50,6 +58,31 @@ public final class EncryptTableMetaDataBuilder implements 
RuleBasedTableMetaData
     }
     
     @Override
+    public Map<String, TableMetaData> load(final Collection<String> 
tableNames, final EncryptRule rule, final SchemaBuilderMaterials materials,
+                                           final ExecutorService 
executorService) throws SQLException {
+        Optional<DialectTableMetaDataLoader> loader = 
TableMetaDataLoader.findDialectTableMetaDataLoader(materials.getDatabaseType());
+        Collection<String> loadTableNames = tableNames.stream().filter(each -> 
rule.findEncryptTable(each).isPresent()).collect(Collectors.toList());
+        if (loadTableNames.isEmpty()) {
+            return Collections.emptyMap();
+        }
+        Map<String, Collection<String>> dataSourceTables = 
getTableGroup(loadTableNames, materials);
+        return loader.isPresent() ? TableMetaDataLoader.load(loader.get(), 
dataSourceTables, materials.getDataSourceMap(), executorService)
+                : TableMetaDataLoader.load(dataSourceTables, 
materials.getDatabaseType(), materials.getDataSourceMap());
+    }
+    
+    private Map<String, Collection<String>> getTableGroup(final 
Collection<String> tableNames, final SchemaBuilderMaterials materials) {
+        Map<String, Collection<String>> result = new LinkedHashMap<>();
+        DataNodes dataNodes = new DataNodes(materials.getRules());
+        for (String each : tableNames) {
+            String dataSourceName = 
dataNodes.getDataNodes(each).stream().map(DataNode::getDataSourceName).findFirst().orElseGet(()
 -> materials.getDataSourceMap().keySet().iterator().next());
+            Collection<String> tables = result.getOrDefault(dataSourceName, 
new LinkedList<>());
+            tables.add(each);
+            result.putIfAbsent(dataSourceName, tables);
+        }
+        return result;
+    }
+    
+    @Override
     public TableMetaData decorate(final String tableName, final TableMetaData 
tableMetaData, final EncryptRule encryptRule) {
         Optional<EncryptTable> encryptTable = 
encryptRule.findEncryptTable(tableName);
         return encryptTable.map(optional ->
diff --git 
a/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/test/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilderTest.java
 
b/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/test/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilderTest.java
index e9a724f..4e194ec 100644
--- 
a/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/test/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilderTest.java
+++ 
b/shardingsphere-features/shardingsphere-encrypt/shardingsphere-encrypt-core/src/test/java/org/apache/shardingsphere/encrypt/metadata/EncryptTableMetaDataBuilderTest.java
@@ -17,12 +17,15 @@
 
 package org.apache.shardingsphere.encrypt.metadata;
 
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.shardingsphere.encrypt.rule.EncryptRule;
 import org.apache.shardingsphere.encrypt.rule.EncryptTable;
 import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNode;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.DialectTableMetaDataLoader;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
 import org.apache.shardingsphere.infra.metadata.schema.model.ColumnMetaData;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
@@ -39,14 +42,22 @@ import org.mockito.junit.MockitoJUnitRunner;
 
 import javax.sql.DataSource;
 import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+
+import java.util.LinkedList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 
 import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertFalse;
@@ -54,6 +65,7 @@ import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.startsWith;
 import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -65,6 +77,7 @@ public final class EncryptTableMetaDataBuilderTest {
     
     static {
         
ShardingSphereServiceLoader.register(RuleBasedTableMetaDataBuilder.class);
+        ShardingSphereServiceLoader.register(DialectTableMetaDataLoader.class);
     }
     
     @Mock(answer = Answers.RETURNS_DEEP_STUBS)
@@ -78,12 +91,60 @@ public final class EncryptTableMetaDataBuilderTest {
     
     @Before
     public void setUp() throws SQLException {
-        ResultSet tableResultSet = createTableResultSet();
-        ResultSet columnResultSet = createColumnResultSet();
         Connection connection = mock(Connection.class, RETURNS_DEEP_STUBS);
-        when(connection.getMetaData().getTables(any(), any(), any(), 
eq(null))).thenReturn(tableResultSet);
-        when(connection.getMetaData().getColumns(any(), any(), any(), 
eq("%"))).thenReturn(columnResultSet);
         when(dataSource.getConnection()).thenReturn(connection);
+        mockH2ResultSet(connection);
+        mockMySQLResultSet(connection);
+        mockOracleResultSet(connection);
+        mockPGResultSet(connection);
+        mockSQLServerResultSet(connection);
+        mockDatabaseMetaData(connection);
+    }
+    
+    private void mockSQLServerResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet();
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT obj.name AS 
TABLE_NAME, col.name AS COLUMN_NAME, t.name AS 
DATA_TYPE"))).thenReturn(preparedStatement);
+    }
+    
+    private void mockPGResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet();
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT table_name, 
column_name, ordinal_position, data_type, udt_name, 
column_default"))).thenReturn(preparedStatement);
+    }
+    
+    private void mockOracleResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet();
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT OWNER AS 
TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, 
DATA_TYPE"))).thenReturn(preparedStatement);
+    }
+    
+    private void mockMySQLResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet();
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_NAME, 
COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM 
information_schema.columns"))).thenReturn(preparedStatement);
+    }
+    
+    private void mockH2ResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet();
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_CATALOG, 
TABLE_NAME"))).thenReturn(preparedStatement);
+    }
+    
+    private void mockDatabaseMetaData(final Connection connection) throws 
SQLException {
+        DatabaseMetaData databaseMetaData = mock(DatabaseMetaData.class, 
RETURNS_DEEP_STUBS);
+        when(connection.getMetaData()).thenReturn(databaseMetaData);
+        ResultSet dataTypeResultSet = createDataTypeResultSet();
+        when(databaseMetaData.getTypeInfo()).thenReturn(dataTypeResultSet);
+        ResultSet tableResultSet = createTableResultSet();
+        ResultSet columnResultSet = createColumnResultSet();
+        when(databaseMetaData.getTables(any(), any(), any(), 
eq(null))).thenReturn(tableResultSet);
+        when(databaseMetaData.getColumns(any(), any(), any(), 
eq("%"))).thenReturn(columnResultSet);
     }
     
     private ResultSet createTableResultSet() throws SQLException {
@@ -96,10 +157,24 @@ public final class EncryptTableMetaDataBuilderTest {
         ResultSet result = mock(ResultSet.class);
         when(result.next()).thenReturn(true, true, true, false);
         when(result.getString("TABLE_NAME")).thenReturn(TABLE_NAME);
+        when(result.getString("table_name")).thenReturn(TABLE_NAME);
         when(result.getString("COLUMN_NAME")).thenReturn("id", "pwd_cipher", 
"pwd_plain");
+        when(result.getString("column_name")).thenReturn("id", "pwd_cipher", 
"pwd_plain");
+        when(result.getString("TYPE_NAME")).thenReturn("INT");
+        when(result.getString("DATA_TYPE")).thenReturn("INT");
+        when(result.getString("udt_name")).thenReturn("INT");
+        when(result.getInt("ordinal_position")).thenReturn(1, 2, 3);
         return result;
     }
     
+    private ResultSet createDataTypeResultSet() throws SQLException {
+        ResultSet dataTypeResultSet = mock(ResultSet.class);
+        when(dataTypeResultSet.next()).thenReturn(true, false);
+        when(dataTypeResultSet.getString("TYPE_NAME")).thenReturn("INT");
+        when(dataTypeResultSet.getInt("DATA_TYPE")).thenReturn(1);
+        return dataTypeResultSet;
+    }
+    
     @Test
     public void assertLoadByExistedTable() throws SQLException {
         EncryptRule encryptRule = createEncryptRule();
@@ -114,6 +189,130 @@ public final class EncryptTableMetaDataBuilderTest {
     }
     
     @Test
+    public void assertLoadByExistedTables() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        
when(databaseType.formatTableNamePattern(TABLE_NAME)).thenReturn(TABLE_NAME);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("logic_db", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        TableMetaData tableMetaData = actual.values().iterator().next();
+        assertThat(tableMetaData.getColumnMetaData(0).getName(), is("id"));
+        assertThat(tableMetaData.getColumnMetaData(1).getName(), 
is("pwd_cipher"));
+        assertThat(tableMetaData.getColumnMetaData(2).getName(), 
is("pwd_plain"));
+    }
+    
+    @Test
+    public void assertLoadByExistedTablesH2() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        ExecutorService executorService = new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build());
+        loadByH2(loader, tableNames, rules, encryptRule, executorService);
+    }
+    
+    private void loadByH2(final EncryptTableMetaDataBuilder loader, final 
Collection<String> tableNames, final Collection<ShardingSphereRule> rules, 
final EncryptRule encryptRule,
+                          final ExecutorService executorService) throws 
SQLException {
+        when(databaseType.getName()).thenReturn("H2");
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType,
+                Collections.singletonMap("logic_db", dataSource), rules, 
props), executorService);
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadByExistedTablesMySQL() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        ExecutorService executorService = new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build());
+        loadByMySQL(loader, tableNames, rules, encryptRule, executorService);
+    }
+    
+    private void loadByMySQL(final EncryptTableMetaDataBuilder loader, final 
Collection<String> tableNames, final Collection<ShardingSphereRule> rules, 
final EncryptRule encryptRule,
+                             final ExecutorService executorService) throws 
SQLException {
+        when(databaseType.getName()).thenReturn("MySQL");
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType,
+                Collections.singletonMap("logic_db", dataSource), rules, 
props), executorService);
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadByExistedTablesOracle() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        ExecutorService executorService = new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build());
+        loadByOracle(loader, tableNames, rules, encryptRule, executorService);
+    }
+    
+    private void loadByOracle(final EncryptTableMetaDataBuilder loader, final 
Collection<String> tableNames, final Collection<ShardingSphereRule> rules, 
final EncryptRule encryptRule,
+                              final ExecutorService executorService) throws 
SQLException {
+        when(databaseType.getName()).thenReturn("Oracle");
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType,
+                Collections.singletonMap("logic_db", dataSource), rules, 
props), executorService);
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadByExistedTablesPGSQL() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        ExecutorService executorService = new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build());
+        loadByPostgreSQL(loader, tableNames, rules, encryptRule, 
executorService);
+    }
+    
+    private void loadByPostgreSQL(final EncryptTableMetaDataBuilder loader, 
final Collection<String> tableNames, final Collection<ShardingSphereRule> 
rules, final EncryptRule encryptRule,
+                                  final ExecutorService executorService) 
throws SQLException {
+        when(databaseType.getName()).thenReturn("PostgreSQL");
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType,
+                Collections.singletonMap("logic_db", dataSource), rules, 
props), executorService);
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadByExistedTablesSQLServer() throws SQLException {
+        EncryptRule encryptRule = createEncryptRule();
+        Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
+        EncryptTableMetaDataBuilder loader = (EncryptTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(encryptRule);
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        ExecutorService executorService = new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build());
+        loadBySQLServer(loader, tableNames, rules, encryptRule, 
executorService);
+    }
+    
+    private void loadBySQLServer(final EncryptTableMetaDataBuilder loader, 
final Collection<String> tableNames, final Collection<ShardingSphereRule> 
rules, final EncryptRule encryptRule,
+                                 final ExecutorService executorService) throws 
SQLException {
+        when(databaseType.getName()).thenReturn("SQLServer");
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
encryptRule, new SchemaBuilderMaterials(databaseType,
+                Collections.singletonMap("logic_db", dataSource), rules, 
props), executorService);
+        assertResult(actual);
+    }
+    
+    private void assertResult(final Map<String, TableMetaData> actual) {
+        TableMetaData tableMetaData = actual.values().iterator().next();
+        assertThat(tableMetaData.getColumnMetaData(0).getName(), is("id"));
+        assertThat(tableMetaData.getColumnMetaData(1).getName(), 
is("pwd_cipher"));
+        assertThat(tableMetaData.getColumnMetaData(2).getName(), 
is("pwd_plain"));
+    }
+    
+    @Test
     public void assertLoadByNotExistedTable() throws SQLException {
         EncryptRule encryptRule = createEncryptRule();
         Collection<ShardingSphereRule> rules = 
Arrays.asList(createSingleTableRule(), encryptRule);
@@ -174,7 +373,8 @@ public final class EncryptTableMetaDataBuilderTest {
     
     private TableMetaData createTableMetaData() {
         Collection<ColumnMetaData> columns = Arrays.asList(new 
ColumnMetaData("id", 1, true, true, true),
-                new ColumnMetaData("pwd_cipher", 2, false, false, true), new 
ColumnMetaData("pwd_plain", 2, false, false, true));
+                new ColumnMetaData("pwd_cipher", 2, false, false, true),
+                new ColumnMetaData("pwd_plain", 2, false, false, true));
         return new TableMetaData(TABLE_NAME, columns, Collections.emptyList());
     }
     
diff --git 
a/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/main/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilder.java
 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/main/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilder.java
index f30a587..e6639b3 100644
--- 
a/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/main/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilder.java
+++ 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/main/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilder.java
@@ -25,7 +25,9 @@ import 
org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNode;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
 import org.apache.shardingsphere.infra.exception.ShardingSphereException;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.loader.TableMetaDataLoader;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.DialectTableMetaDataLoader;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
 import org.apache.shardingsphere.infra.metadata.schema.model.ColumnMetaData;
 import org.apache.shardingsphere.infra.metadata.schema.model.IndexMetaData;
@@ -37,13 +39,15 @@ import org.apache.shardingsphere.sharding.rule.TableRule;
 import javax.sql.DataSource;
 import java.sql.SQLException;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Optional;
+import java.util.LinkedHashMap;
+import java.util.Map.Entry;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -61,6 +65,7 @@ public final class ShardingTableMetaDataBuilder implements 
RuleBasedTableMetaDat
     
     private static final int FUTURE_GET_TIME_OUT_SECOND = 5;
     
+    // TODO remove this method
     @Override
     public Optional<TableMetaData> load(final String tableName, final 
DatabaseType databaseType, final Map<String, DataSource> dataSourceMap, final 
DataNodes dataNodes,
                                         final ShardingRule rule, final 
ConfigurationProperties props) throws SQLException {
@@ -82,6 +87,62 @@ public final class ShardingTableMetaDataBuilder implements 
RuleBasedTableMetaDat
         return Optional.of(actualTableMetaDataMap.values().iterator().next());
     }
     
+    @Override
+    public Map<String, TableMetaData> load(final Collection<String> 
tableNames, final ShardingRule rule, final SchemaBuilderMaterials materials,
+                                           final ExecutorService 
executorService) throws SQLException {
+        Collection<String> loadTableNames = tableNames.stream().filter(each -> 
rule.findTableRule(each).isPresent()).collect(Collectors.toList());
+        if (loadTableNames.isEmpty()) {
+            return Collections.emptyMap();
+        }
+        boolean isCheckingMetaData = 
materials.getProps().getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED);
+        return isCheckingMetaData ? loadWithCheck(loadTableNames, rule, 
materials) : loadWithOutCheck(loadTableNames, rule, materials, executorService);
+    }
+    
+    private Map<String, TableMetaData> loadWithCheck(final Collection<String> 
tableNames, final ShardingRule rule, final SchemaBuilderMaterials materials) {
+        int maxConnectionsSizePerQuery = 
materials.getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY);
+        Map<String, TableMetaData> result = new HashMap<>();
+        for (String each : tableNames) {
+            TableRule tableRule = rule.getTableRule(each);
+            Map<String, TableMetaData> actualTableMetaDataMap = 
parallelLoadTables(materials.getDatabaseType(), materials.getDataSourceMap(),
+                    new DataNodes(materials.getRules()), each, 
maxConnectionsSizePerQuery);
+            if (actualTableMetaDataMap.isEmpty()) {
+                continue;
+            }
+            checkUniformed(tableRule.getLogicTable(), actualTableMetaDataMap, 
rule);
+            result.put(tableRule.getLogicTable(), 
actualTableMetaDataMap.values().iterator().next());
+        }
+        return result;
+    }
+    
+    private Map<String, TableMetaData> loadWithOutCheck(final 
Collection<String> tableNames, final ShardingRule rule,
+                                                        final 
SchemaBuilderMaterials materials, final ExecutorService executorService) throws 
SQLException {
+        Optional<DialectTableMetaDataLoader> loader = 
TableMetaDataLoader.findDialectTableMetaDataLoader(materials.getDatabaseType());
+        Map<String, Collection<String>> dataSourceTables = 
getTableGroup(tableNames, materials);
+        Map<String, TableMetaData> tableMetaDataMap = loader.isPresent() ? 
TableMetaDataLoader.load(loader.get(), dataSourceTables, 
materials.getDataSourceMap(), executorService)
+                : TableMetaDataLoader.load(dataSourceTables, 
materials.getDatabaseType(), materials.getDataSourceMap());
+        return decorateLogicTableName(tableMetaDataMap.values(), rule);
+    }
+    
+    private Map<String, Collection<String>> getTableGroup(final 
Collection<String> tableNames, final SchemaBuilderMaterials materials) {
+        DataNodes dataNodes = new DataNodes(materials.getRules());
+        Map<String, Collection<String>> result = new LinkedHashMap<>();
+        for (String each : tableNames) {
+            DataNode dataNode = dataNodes.getDataNodes(each).iterator().next();
+            Collection<String> tables = 
result.getOrDefault(dataNode.getDataSourceName(), new LinkedList<>());
+            tables.add(dataNode.getTableName());
+            result.putIfAbsent(dataNode.getDataSourceName(), tables);
+        }
+        return result;
+    }
+    
+    private Map<String, TableMetaData> decorateLogicTableName(final 
Collection<TableMetaData> tableMetaDatas, final ShardingRule rule) {
+        Map<String, TableMetaData> result = new LinkedHashMap<>();
+        for (TableMetaData each : tableMetaDatas) {
+            
rule.findLogicTableByActualTable(each.getName()).ifPresent(tableName -> 
result.put(tableName, each));
+        }
+        return result;
+    }
+    
     private Map<String, TableMetaData> parallelLoadTables(final DatabaseType 
databaseType, final Map<String, DataSource> dataSourceMap, final DataNodes 
dataNodes,
                                                           final String 
tableName, final int maxConnectionsSizePerQuery) {
         Map<String, List<DataNode>> dataNodeGroups = 
dataNodes.getDataNodeGroups(tableName);
diff --git 
a/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/test/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilderTest.java
 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/test/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilderTest.java
new file mode 100644
index 0000000..e4ba81f
--- /dev/null
+++ 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-core/src/test/java/org/apache/shardingsphere/sharding/metadata/ShardingTableMetaDataBuilderTest.java
@@ -0,0 +1,331 @@
+/*
+ *
+ *  * Licensed to the Apache Software Foundation (ASF) under one or more
+ *  * contributor license agreements.  See the NOTICE file distributed with
+ *  * this work for additional information regarding copyright ownership.
+ *  * The ASF licenses this file to You under the Apache License, Version 2.0
+ *  * (the "License"); you may not use this file except in compliance with
+ *  * the License.  You may obtain a copy of the License at
+ *  *
+ *  *     http://www.apache.org/licenses/LICENSE-2.0
+ *  *
+ *  * Unless required by applicable law or agreed to in writing, software
+ *  * distributed under the License is distributed on an "AS IS" BASIS,
+ *  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  * See the License for the specific language governing permissions and
+ *  * limitations under the License.
+ *
+ */
+
+package org.apache.shardingsphere.sharding.metadata;
+
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
+import 
org.apache.shardingsphere.infra.config.properties.ConfigurationPropertyKey;
+import org.apache.shardingsphere.infra.database.type.DatabaseType;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.DialectTableMetaDataLoader;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
+import org.apache.shardingsphere.infra.metadata.schema.model.IndexMetaData;
+import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
+import org.apache.shardingsphere.infra.rule.ShardingSphereRule;
+import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader;
+import org.apache.shardingsphere.infra.spi.ordered.OrderedSPIRegistry;
+import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
+import 
org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration;
+import org.apache.shardingsphere.sharding.rule.ShardingRule;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Answers;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.startsWith;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class ShardingTableMetaDataBuilderTest {
+    
+    private static final String TABLE_NAME = "t_order";
+    
+    static {
+        
ShardingSphereServiceLoader.register(RuleBasedTableMetaDataBuilder.class);
+        ShardingSphereServiceLoader.register(DialectTableMetaDataLoader.class);
+    }
+    
+    @Mock(answer = Answers.RETURNS_DEEP_STUBS)
+    private DatabaseType databaseType;
+    
+    @Mock
+    private DataSource dataSource;
+    
+    @Mock
+    private ConfigurationProperties props;
+    
+    private ShardingRule shardingRule;
+    
+    @Before
+    public void setUp() throws SQLException {
+        Connection connection = mock(Connection.class, RETURNS_DEEP_STUBS);
+        when(dataSource.getConnection()).thenReturn(connection);
+        shardingRule = buildShardingRule();
+        mockH2ResultSet(connection);
+        mockMySQLResultSet(connection);
+        mockOracleResultSet(connection);
+        mockPGResultSet(connection);
+        mockSQLServerResultSet(connection);
+        mockDatabaseMetaData(connection);
+    }
+    
+    private ShardingRule buildShardingRule() {
+        ShardingTableRuleConfiguration tableRuleConfig = new 
ShardingTableRuleConfiguration(TABLE_NAME, "ds.t_order_${0..1}");
+        ShardingRuleConfiguration shardingRuleConfig = new 
ShardingRuleConfiguration();
+        shardingRuleConfig.getTables().add(tableRuleConfig);
+        return new ShardingRule(shardingRuleConfig, 
Collections.singletonMap("ds", dataSource));
+    }
+    
+    private void mockSQLServerResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet("t_order_0");
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT obj.name AS 
TABLE_NAME, col.name AS COLUMN_NAME, t.name AS 
DATA_TYPE"))).thenReturn(preparedStatement);
+        ResultSet indexResultSet = createIndexResultSet();
+        PreparedStatement indexStatement = mock(PreparedStatement.class);
+        when(indexStatement.executeQuery()).thenReturn(indexResultSet);
+        when(connection.prepareStatement(startsWith("SELECT a.name AS 
INDEX_NAME, c.name AS TABLE_NAME FROM sys.indexes 
a"))).thenReturn(indexStatement);
+    }
+    
+    private void mockPGResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet("t_order_0");
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT table_name, 
column_name, ordinal_position, data_type, udt_name, 
column_default"))).thenReturn(preparedStatement);
+        ResultSet indexResultSet = createIndexResultSet();
+        PreparedStatement indexStatement = mock(PreparedStatement.class);
+        when(indexStatement.executeQuery()).thenReturn(indexResultSet);
+        when(connection.prepareStatement(startsWith("SELECT tablename, 
indexname FROM pg_indexes WHERE schemaname"))).thenReturn(indexStatement);
+    }
+    
+    private void mockOracleResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet("t_order_0");
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT OWNER AS 
TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, 
DATA_TYPE"))).thenReturn(preparedStatement);
+        ResultSet indexResultSet = createIndexResultSet();
+        PreparedStatement indexStatement = mock(PreparedStatement.class);
+        when(indexStatement.executeQuery()).thenReturn(indexResultSet);
+        when(connection.prepareStatement(startsWith("SELECT OWNER AS 
TABLE_SCHEMA, TABLE_NAME, INDEX_NAME FROM ALL_INDEXES WHERE 
OWNER"))).thenReturn(indexStatement);
+    }
+    
+    private void mockMySQLResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet("t_order_0");
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_NAME, 
COLUMN_NAME, DATA_TYPE, COLUMN_KEY, EXTRA, COLLATION_NAME FROM 
information_schema.columns"))).thenReturn(preparedStatement);
+        ResultSet indexResultSet = createIndexResultSet();
+        PreparedStatement indexStatement = mock(PreparedStatement.class);
+        when(indexStatement.executeQuery()).thenReturn(indexResultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_NAME, 
INDEX_NAME FROM information_schema.statistics WHERE 
TABLE_SCHEMA"))).thenReturn(indexStatement);
+    }
+    
+    private void mockH2ResultSet(final Connection connection) throws 
SQLException {
+        ResultSet resultSet = createColumnResultSet("t_order_0");
+        PreparedStatement preparedStatement = mock(PreparedStatement.class);
+        when(preparedStatement.executeQuery()).thenReturn(resultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_CATALOG, 
TABLE_NAME"))).thenReturn(preparedStatement);
+        ResultSet indexResultSet = createIndexResultSet();
+        PreparedStatement indexStatement = mock(PreparedStatement.class);
+        when(indexStatement.executeQuery()).thenReturn(indexResultSet);
+        when(connection.prepareStatement(startsWith("SELECT TABLE_CATALOG, 
TABLE_NAME, INDEX_NAME, COLUMN_NAME FROM 
INFORMATION_SCHEMA.INDEXES"))).thenReturn(indexStatement);
+    }
+    
+    private ResultSet createIndexResultSet() throws SQLException {
+        ResultSet result = mock(ResultSet.class);
+        when(result.next()).thenReturn(true, false);
+        
when(result.getString("INDEX_NAME")).thenReturn("order_index_t_order_t_order_0");
+        when(result.getString("TABLE_NAME")).thenReturn("t_order_0");
+        
when(result.getString("indexname")).thenReturn("order_index_t_order_t_order_0");
+        when(result.getString("tablename")).thenReturn("t_order_0");
+        return result;
+    }
+    
+    private void mockDatabaseMetaData(final Connection connection) throws 
SQLException {
+        DatabaseMetaData databaseMetaData = mock(DatabaseMetaData.class, 
RETURNS_DEEP_STUBS);
+        when(connection.getMetaData()).thenReturn(databaseMetaData);
+        ResultSet dataTypeResultSet = createDataTypeResultSet();
+        when(databaseMetaData.getTypeInfo()).thenReturn(dataTypeResultSet);
+        ResultSet tableResultSet1 = createTableResultSet();
+        ResultSet tableResultSet2 = createTableResultSet();
+        ResultSet columnResultSet1 = createColumnResultSet("t_order_0");
+        ResultSet columnResultSet2 = createColumnResultSet("t_order_1");
+        when(databaseMetaData.getTables(any(), any(), eq("t_order_0"), 
eq(null))).thenReturn(tableResultSet1);
+        when(databaseMetaData.getTables(any(), any(), eq("t_order_1"), 
eq(null))).thenReturn(tableResultSet2);
+        when(databaseMetaData.getColumns(any(), any(), eq("t_order_0"), 
eq("%"))).thenReturn(columnResultSet1);
+        when(databaseMetaData.getColumns(any(), any(), eq("t_order_1"), 
eq("%"))).thenReturn(columnResultSet2);
+    }
+    
+    private ResultSet createTableResultSet() throws SQLException {
+        ResultSet result = mock(ResultSet.class);
+        when(result.next()).thenReturn(true, false);
+        return result;
+    }
+    
+    private ResultSet createColumnResultSet(final String actualTable) throws 
SQLException {
+        ResultSet result = mock(ResultSet.class);
+        when(result.next()).thenReturn(true, true, true, false);
+        when(result.getString("TABLE_NAME")).thenReturn(actualTable);
+        when(result.getString("table_name")).thenReturn(actualTable);
+        when(result.getString("COLUMN_NAME")).thenReturn("id", "pwd_cipher", 
"pwd_plain");
+        when(result.getString("column_name")).thenReturn("id", "pwd_cipher", 
"pwd_plain");
+        when(result.getString("TYPE_NAME")).thenReturn("INT");
+        when(result.getString("DATA_TYPE")).thenReturn("INT");
+        when(result.getString("udt_name")).thenReturn("INT");
+        when(result.getInt("ordinal_position")).thenReturn(1, 2, 3);
+        return result;
+    }
+    
+    private ResultSet createDataTypeResultSet() throws SQLException {
+        ResultSet dataTypeResultSet = mock(ResultSet.class);
+        when(dataTypeResultSet.next()).thenReturn(true, false);
+        when(dataTypeResultSet.getString("TYPE_NAME")).thenReturn("INT");
+        when(dataTypeResultSet.getInt("DATA_TYPE")).thenReturn(1);
+        return dataTypeResultSet;
+    }
+    
+    @Test
+    public void assertLoadTablesH2() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("H2");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadTablesMySQL() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("MySQL");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadTablesOracle() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("Oracle");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadTablesPGSQL() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("PostgreSQL");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        assertResult(actual);
+    }
+    
+    @Test
+    public void assertLoadTablesSQLServer() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("SQLServer");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        assertResult(actual);
+    }
+    
+    private void assertResult(final Map<String, TableMetaData> actual) {
+        TableMetaData tableMetaData = actual.values().iterator().next();
+        assertThat(tableMetaData.getColumnMetaData(0).getName(), is("id"));
+        assertThat(tableMetaData.getColumnMetaData(1).getName(), 
is("pwd_cipher"));
+        assertThat(tableMetaData.getColumnMetaData(2).getName(), 
is("pwd_plain"));
+        IndexMetaData indexMetaData = 
tableMetaData.getIndexes().values().iterator().next();
+        assertThat(indexMetaData.getName(), 
is("order_index_t_order_t_order_0"));
+    }
+    
+    @Test
+    public void assertLoadTablesDefault() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(false);
+        when(databaseType.getName()).thenReturn("default");
+        
when(databaseType.formatTableNamePattern("t_order_0")).thenReturn("t_order_0");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        TableMetaData tableMetaData = actual.values().iterator().next();
+        assertThat(tableMetaData.getColumnMetaData(0).getName(), is("id"));
+        assertThat(tableMetaData.getColumnMetaData(1).getName(), 
is("pwd_cipher"));
+        assertThat(tableMetaData.getColumnMetaData(2).getName(), 
is("pwd_plain"));
+    }
+    
+    @Test
+    public void assertLoadTablesWithCheck() throws SQLException {
+        Collection<ShardingSphereRule> rules = 
Collections.singletonList(shardingRule);
+        final ShardingTableMetaDataBuilder loader = 
(ShardingTableMetaDataBuilder) 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
rules).get(shardingRule);
+        
when(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED)).thenReturn(true);
+        
when(props.getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY)).thenReturn(1);
+        
when(databaseType.formatTableNamePattern("t_order_0")).thenReturn("t_order_0");
+        
when(databaseType.formatTableNamePattern("t_order_1")).thenReturn("t_order_1");
+        Collection<String> tableNames = new LinkedList<>();
+        tableNames.add(TABLE_NAME);
+        Map<String, TableMetaData> actual = loader.load(tableNames, 
shardingRule, new SchemaBuilderMaterials(databaseType, 
Collections.singletonMap("ds", dataSource), rules, props),
+                new 
ThreadPoolExecutor(Runtime.getRuntime().availableProcessors() * 2, 
Runtime.getRuntime().availableProcessors() * 2,
+                        0L, TimeUnit.MILLISECONDS, new 
LinkedBlockingQueue<>(), new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("ShardingSphere-SchemaBuilder-%d").build()));
+        TableMetaData tableMetaData = actual.values().iterator().next();
+        assertThat(tableMetaData.getColumnMetaData(0).getName(), is("id"));
+        assertThat(tableMetaData.getColumnMetaData(1).getName(), 
is("pwd_cipher"));
+        assertThat(tableMetaData.getColumnMetaData(2).getName(), 
is("pwd_plain"));
+    }
+}
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/SchemaBuilder.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/SchemaBuilder.java
index b275587..d8ca129 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/SchemaBuilder.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/SchemaBuilder.java
@@ -50,6 +50,7 @@ import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
 import java.util.stream.Collectors;
 
 /**
@@ -85,15 +86,7 @@ public final class SchemaBuilder {
     private static Map<String, TableMetaData> 
buildActualTableMetaDataMap(final SchemaBuilderMaterials materials) throws 
SQLException {
         Map<String, TableMetaData> result = new 
HashMap<>(materials.getRules().size(), 1);
         appendRemainTables(materials, result);
-        for (ShardingSphereRule rule : materials.getRules()) {
-            if (rule instanceof TableContainedRule) {
-                for (String table : ((TableContainedRule) rule).getTables()) {
-                    if (!result.containsKey(table)) {
-                        TableMetaDataBuilder.load(table, 
materials).map(optional -> result.put(table, optional));
-                    }
-                }
-            }
-        }
+        appendLogicTables(materials, result);
         return result;
     }
     
@@ -106,6 +99,11 @@ public final class SchemaBuilder {
         appendDefaultRemainTables(materials, tables);
     }
     
+    private static void appendLogicTables(final SchemaBuilderMaterials 
materials, final Map<String, TableMetaData> result) throws SQLException {
+        result.putAll(TableMetaDataBuilder.loadLogicTables(materials, 
EXECUTOR_SERVICE)
+                        
.stream().collect(Collectors.toMap(TableMetaData::getName, Function.identity(), 
(oldValue, currentValue) -> oldValue)));
+    }
+    
     private static Map<String, TableMetaData> buildLogicTableMetaDataMap(final 
SchemaBuilderMaterials materials, final Map<String, TableMetaData> tables) {
         Map<String, TableMetaData> result = new 
HashMap<>(materials.getRules().size(), 1);
         for (ShardingSphereRule rule : materials.getRules()) {
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilder.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilder.java
index 0b523b1..a8e794e 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilder.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilder.java
@@ -29,8 +29,12 @@ import 
org.apache.shardingsphere.infra.spi.ordered.OrderedSPIRegistry;
 
 import java.sql.SQLException;
 import java.util.Collection;
+import java.util.LinkedList;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
+import java.util.stream.Collectors;
 
 /**
  * Table meta data builder.
@@ -79,7 +83,40 @@ public final class TableMetaDataBuilder {
         }
         return Optional.empty();
     }
-
+    
+    /**
+     * Load logic table metadata.
+     *
+     * @param materials schema builder materials
+     * @param executorService executorService
+     * @return table meta data collection
+     * @throws SQLException SQL exception
+     */
+    @SuppressWarnings("rawtypes")
+    public static Collection<TableMetaData> loadLogicTables(final 
SchemaBuilderMaterials materials, final ExecutorService executorService) throws 
SQLException {
+        Collection<TableMetaData> result = new LinkedList<>();
+        for (Entry<ShardingSphereRule, RuleBasedTableMetaDataBuilder> entry : 
OrderedSPIRegistry.getRegisteredServices(RuleBasedTableMetaDataBuilder.class, 
materials.getRules()).entrySet()) {
+            if (entry.getKey() instanceof TableContainedRule) {
+                loadTableContainedRuleTables(materials, executorService, 
result, entry);
+            }
+        }
+        return result;
+    }
+    
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    private static void loadTableContainedRuleTables(final 
SchemaBuilderMaterials materials, final ExecutorService executorService, final 
Collection<TableMetaData> result,
+                                                     final 
Entry<ShardingSphereRule, RuleBasedTableMetaDataBuilder> ruleBuilderEntry) 
throws SQLException {
+        TableContainedRule rule = (TableContainedRule) 
ruleBuilderEntry.getKey();
+        RuleBasedTableMetaDataBuilder loader = ruleBuilderEntry.getValue();
+        Collection<String> loadedTables = 
result.stream().map(TableMetaData::getName).collect(Collectors.toSet());
+        Collection<String> needLoadTables = 
rule.getTables().stream().filter(each -> 
!loadedTables.contains(each)).collect(Collectors.toList());
+        if (!needLoadTables.isEmpty()) {
+            Map<String, TableMetaData> tableMetaDataMap = 
loader.load(needLoadTables, rule, materials, executorService);
+            result.addAll(tableMetaDataMap.entrySet().stream()
+                    .map(entry -> new TableMetaData(entry.getKey(), 
entry.getValue().getColumns().values(), 
entry.getValue().getIndexes().values())).collect(Collectors.toList()));
+        }
+    }
+    
     /**
      * Load logic table metadata.
      * @param tableName table name
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/TableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/TableMetaDataLoader.java
index ab75166..7181294 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/TableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/TableMetaDataLoader.java
@@ -20,19 +20,29 @@ package 
org.apache.shardingsphere.infra.metadata.schema.builder.loader;
 import lombok.AccessLevel;
 import lombok.NoArgsConstructor;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
+import org.apache.shardingsphere.infra.exception.ShardingSphereException;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.loader.adapter.MetaDataLoaderConnectionAdapter;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.DialectTableMetaDataLoader;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
 import org.apache.shardingsphere.infra.rule.ShardingSphereRule;
 import 
org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule;
+import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader;
 
 import javax.sql.DataSource;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Objects;
 import java.util.Optional;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
 
 /**
  * Table meta data loader.
@@ -62,6 +72,26 @@ public final class TableMetaDataLoader {
     /**
      * Load table meta data.
      *
+     * @param dataSourceTable data source table name map
+     * @param databaseType database type
+     * @param dataSourceMap data source map
+     * @return table meta data map
+     * @throws SQLException SQL exception
+     */
+    public static Map<String, TableMetaData> load(final Map<String, 
Collection<String>> dataSourceTable, final DatabaseType databaseType,
+                                                  final Map<String, 
DataSource> dataSourceMap) throws SQLException {
+        Map<String, TableMetaData> result = new LinkedHashMap<>();
+        for (Entry<String, Collection<String>> entry : 
dataSourceTable.entrySet()) {
+            for (String each : entry.getValue()) {
+                load(dataSourceMap.get(entry.getKey()), each, 
databaseType).ifPresent(tableMetaData -> result.put(each, tableMetaData));
+            }
+        }
+        return result;
+    }
+    
+    /**
+     * Load table meta data.
+     *
      * @param tableName table name
      * @param logicDataSourceNames logic data source names
      * @param materials materials
@@ -79,6 +109,36 @@ public final class TableMetaDataLoader {
         return Optional.empty();
     }
     
+    /**
+     * Load table meta data by executor service.
+     *
+     * @param loader dialect table meta data loader
+     * @param dataSourceTables data source table names map
+     * @param dataSourceMap data source map
+     * @param executorService executor service
+     * @return table meta data map
+     * @throws SQLException SQL exception
+     */
+    public static Map<String, TableMetaData> load(final 
DialectTableMetaDataLoader loader, final Map<String, Collection<String>> 
dataSourceTables,
+                                                  final Map<String, 
DataSource> dataSourceMap, final ExecutorService executorService) throws 
SQLException {
+        Map<String, TableMetaData> result = new LinkedHashMap<>();
+        Collection<Future<Map<String, TableMetaData>>> futures = new 
LinkedList<>();
+        for (Map.Entry<String, Collection<String>> each : 
dataSourceTables.entrySet()) {
+            futures.add(executorService.submit(() -> 
loader.loadWithTables(dataSourceMap.get(each.getKey()), each.getValue())));
+        }
+        try {
+            for (Future<Map<String, TableMetaData>> each : futures) {
+                result.putAll(each.get());
+            }
+        } catch (final InterruptedException | ExecutionException ex) {
+            if (ex.getCause() instanceof SQLException) {
+                throw (SQLException) ex.getCause();
+            }
+            throw new ShardingSphereException(ex);
+        }
+        return result;
+    }
+    
     private static boolean isTableExist(final Connection connection, final 
String tableNamePattern) throws SQLException {
         try (ResultSet resultSet = 
connection.getMetaData().getTables(connection.getCatalog(), 
connection.getSchema(), tableNamePattern, null)) {
             return resultSet.next();
@@ -93,4 +153,19 @@ public final class TableMetaDataLoader {
         }
         return logicDataSourceName;
     }
+    
+    /**
+     * Find dialect table meta data loader.
+     *
+     * @param databaseType database type
+     * @return dialect table meta data loader
+     */
+    public static Optional<DialectTableMetaDataLoader> 
findDialectTableMetaDataLoader(final DatabaseType databaseType) {
+        for (DialectTableMetaDataLoader each : 
ShardingSphereServiceLoader.getSingletonServiceInstances(DialectTableMetaDataLoader.class))
 {
+            if (each.getDatabaseType().equals(databaseType.getName())) {
+                return Optional.of(each);
+            }
+        }
+        return Optional.empty();
+    }
 }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/H2TableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/H2TableMetaDataLoader.java
index 10368ab..954887b 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/H2TableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/H2TableMetaDataLoader.java
@@ -46,6 +46,8 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
     private static final String TABLE_META_DATA_SQL = "SELECT TABLE_CATALOG, 
TABLE_NAME, COLUMN_NAME, DATA_TYPE, TYPE_NAME FROM INFORMATION_SCHEMA.COLUMNS 
WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=?";
 
     private static final String TABLE_META_DATA_SQL_WITH_EXISTED_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME NOT IN (%s)";
+    
+    private static final String TABLE_META_DATA_SQL_IN_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME IN (%s)";
 
     private static final String INDEX_META_DATA_SQL = "SELECT TABLE_CATALOG, 
TABLE_NAME, INDEX_NAME, COLUMN_NAME FROM INFORMATION_SCHEMA.INDEXES"
             + " WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND TABLE_NAME IN 
(%s)";
@@ -53,22 +55,31 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
     private static final String PRIMARY_KEY_META_DATA_SQL = "SELECT 
TABLE_NAME, COLUMN_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? 
AND TABLE_SCHEMA=? AND PRIMARY_KEY = TRUE";
 
     private static final String PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES 
= PRIMARY_KEY_META_DATA_SQL + " AND TABLE_NAME NOT IN (%s)";
+    
+    private static final String PRIMARY_KEY_META_DATA_SQL_IN_TABLES = 
PRIMARY_KEY_META_DATA_SQL + " AND TABLE_NAME IN (%s)";
 
     private static final String GENERATED_INFO_SQL = "SELECT C.TABLE_NAME 
TABLE_NAME, C.COLUMN_NAME COLUMN_NAME, COALESCE(S.IS_GENERATED, FALSE) 
IS_GENERATED FROM INFORMATION_SCHEMA.COLUMNS C"
             + " RIGHT JOIN INFORMATION_SCHEMA.SEQUENCES S ON 
C.SEQUENCE_NAME=S.SEQUENCE_NAME WHERE C.TABLE_CATALOG=? AND C.TABLE_SCHEMA=?";
 
     private static final String GENERATED_INFO_SQL_WITH_EXISTED_TABLES = 
GENERATED_INFO_SQL + " AND TABLE_NAME NOT IN (%s)";
+    
+    private static final String GENERATED_INFO_SQL_IN_TABLES = 
GENERATED_INFO_SQL + " AND TABLE_NAME IN (%s)";
 
     @Override
     public Map<String, TableMetaData> load(final DataSource dataSource, final 
Collection<String> existedTables) throws SQLException {
-        return loadTableMetaDataMap(dataSource, existedTables);
+        return loadTableMetaDataMap(dataSource, existedTables, true);
     }
-
-    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> existedTables) throws SQLException {
+    
+    @Override
+    public Map<String, TableMetaData> loadWithTables(final DataSource 
dataSource, final Collection<String> tables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, tables, false);
+    }
+    
+    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> tables, final boolean isExclude) throws 
SQLException {
         Map<String, TableMetaData> result = new LinkedHashMap<>();
         try (Connection connection = dataSource.getConnection()) {
-            Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(connection, existedTables);
-            Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(connection, columnMetaDataMap.keySet());
+            Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(connection, tables, isExclude);
+            Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(connection, columnMetaDataMap.keySet(), isExclude);
             for (Entry<String, Collection<ColumnMetaData>> entry : 
columnMetaDataMap.entrySet()) {
                 result.put(entry.getKey(), new TableMetaData(entry.getKey(), 
entry.getValue(), indexMetaDataMap.getOrDefault(entry.getKey(), 
Collections.emptyList())));
             }
@@ -76,12 +87,12 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
         return result;
     }
 
-    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final Connection connection, final Collection<String> 
existedTables) throws SQLException {
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final Connection connection, final Collection<String> 
tables, final boolean isExclude) throws SQLException {
         Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
-        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(existedTables))) {
+        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables, isExclude))) {
             Map<String, Integer> dataTypes = 
DataTypeLoader.load(connection.getMetaData());
-            Map<String, Collection<String>> tablePrimaryKeys = 
loadTablePrimaryKeys(connection, existedTables);
-            Map<String, Map<String, Boolean>> tableGenerated = 
loadTableGenerated(connection, existedTables);
+            Map<String, Collection<String>> tablePrimaryKeys = 
loadTablePrimaryKeys(connection, tables, isExclude);
+            Map<String, Map<String, Boolean>> tableGenerated = 
loadTableGenerated(connection, tables, isExclude);
             preparedStatement.setString(1, connection.getCatalog());
             preparedStatement.setString(2, "PUBLIC");
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
@@ -110,12 +121,13 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
         return new ColumnMetaData(columnName, dataTypeMap.get(typeName), 
primaryKey, generated, true);
     }
 
-    private String getTableMetaDataSQL(final Collection<String> existedTables) 
{
-        return existedTables.isEmpty() ? TABLE_META_DATA_SQL
-                : String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getTableMetaDataSQL(final Collection<String> tables, final 
boolean isExclude) {
+        return tables.isEmpty() ? TABLE_META_DATA_SQL
+                : isExclude ? 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, tables.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")))
+                : String.format(TABLE_META_DATA_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
     }
 
-    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
Connection connection, final Collection<String> tableNames) throws SQLException 
{
+    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
Connection connection, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<IndexMetaData>> result = new HashMap<>();
         try (PreparedStatement preparedStatement = 
connection.prepareStatement(getIndexMetaDataSQL(tableNames))) {
             preparedStatement.setString(1, connection.getCatalog());
@@ -127,7 +139,8 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
                     if (!result.containsKey(tableName)) {
                         result.put(tableName, new LinkedList<>());
                     }
-                    result.get(tableName).add(new 
IndexMetaData(IndexMetaDataUtil.getLogicIndexName(indexName, tableName)));
+                    // TODO Temporarily process the index scheme, and wait for 
the single table loader scheme to be modified and reconstructed
+                    result.get(tableName).add(new IndexMetaData(isExclude ? 
IndexMetaDataUtil.getLogicIndexName(indexName, tableName) : indexName));
                 }
             }
         }
@@ -143,14 +156,18 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
         return "H2";
     }
 
-    private String getPrimaryKeyMetaDataSQL(final Collection<String> 
existedTables) {
-        return existedTables.isEmpty() ? PRIMARY_KEY_META_DATA_SQL
-                : String.format(PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getPrimaryKeyMetaDataSQL(final Collection<String> tables, 
final boolean isExclude) {
+        return tables.isEmpty() ? PRIMARY_KEY_META_DATA_SQL : 
getPrimaryKeyMetaDataSQLWithTables(tables, isExclude);
     }
-
-    private Map<String, Collection<String>> loadTablePrimaryKeys(final 
Connection connection, final Collection<String> tableNames) throws SQLException 
{
+    
+    private String getPrimaryKeyMetaDataSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? 
String.format(PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")))
+                : String.format(PRIMARY_KEY_META_DATA_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    }
+    
+    private Map<String, Collection<String>> loadTablePrimaryKeys(final 
Connection connection, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<String>> result = new HashMap<>();
-        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getPrimaryKeyMetaDataSQL(tableNames))) {
+        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getPrimaryKeyMetaDataSQL(tableNames, isExclude))) {
             preparedStatement.setString(1, connection.getCatalog());
             preparedStatement.setString(2, "PUBLIC");
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
@@ -164,14 +181,18 @@ public final class H2TableMetaDataLoader implements 
DialectTableMetaDataLoader {
         return result;
     }
 
-    private String getGeneratedInfoSQL(final Collection<String> existedTables) 
{
-        return existedTables.isEmpty() ? GENERATED_INFO_SQL
-                : String.format(GENERATED_INFO_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getGeneratedInfoSQL(final Collection<String> tables, final 
boolean isExclude) {
+        return tables.isEmpty() ? GENERATED_INFO_SQL : 
getGeneratedInfoSQLWithTables(tables, isExclude);
     }
-
-    private Map<String, Map<String, Boolean>> loadTableGenerated(final 
Connection connection, final Collection<String> tableNames) throws SQLException 
{
+    
+    private String getGeneratedInfoSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? 
String.format(GENERATED_INFO_SQL_WITH_EXISTED_TABLES, tables.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")))
+             : String.format(GENERATED_INFO_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    }
+    
+    private Map<String, Map<String, Boolean>> loadTableGenerated(final 
Connection connection, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Map<String, Boolean>> result = new HashMap<>();
-        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getGeneratedInfoSQL(tableNames))) {
+        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getGeneratedInfoSQL(tableNames, isExclude))) {
             preparedStatement.setString(1, connection.getCatalog());
             preparedStatement.setString(2, "PUBLIC");
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/MySQLTableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/MySQLTableMetaDataLoader.java
index e3b684c..036ff84 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/MySQLTableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/MySQLTableMetaDataLoader.java
@@ -47,28 +47,35 @@ public final class MySQLTableMetaDataLoader implements 
DialectTableMetaDataLoade
     
     private static final String TABLE_META_DATA_SQL_WITH_EXISTED_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME NOT IN (%s)";
     
+    private static final String TABLE_META_DATA_SQL_IN_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME IN (%s)";
+    
     private static final String INDEX_META_DATA_SQL = "SELECT TABLE_NAME, 
INDEX_NAME FROM information_schema.statistics WHERE TABLE_SCHEMA=? and 
TABLE_NAME IN (%s)";
     
     @Override
     public Map<String, TableMetaData> load(final DataSource dataSource, final 
Collection<String> existedTables) throws SQLException {
-        return loadTableMetaDataMap(dataSource, existedTables);
+        return loadTableMetaDataMap(dataSource, existedTables, true);
+    }
+    
+    @Override
+    public Map<String, TableMetaData> loadWithTables(final DataSource 
dataSource, final Collection<String> tables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, tables, false);
     }
     
-    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> existedTables) throws SQLException {
+    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> tables, final boolean isExclude) throws 
SQLException {
         Map<String, TableMetaData> result = new LinkedHashMap<>();
-        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, existedTables);
-        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet());
+        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, tables, isExclude);
+        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet(), isExclude);
         for (Entry<String, Collection<ColumnMetaData>> entry : 
columnMetaDataMap.entrySet()) {
             result.put(entry.getKey(), new TableMetaData(entry.getKey(), 
entry.getValue(), indexMetaDataMap.getOrDefault(entry.getKey(), 
Collections.emptyList())));
         }
         return result;
     }
     
-    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
existedTables) throws SQLException {
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
tables, final boolean isExclude) throws SQLException {
         Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
         try (
                 Connection connection = dataSource.getConnection();
-                PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(existedTables))) {
+                PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables, isExclude))) {
             Map<String, Integer> dataTypes = 
DataTypeLoader.load(connection.getMetaData());
             dataTypes.putIfAbsent("JSON", -1);
             preparedStatement.setString(1, connection.getCatalog());
@@ -96,12 +103,16 @@ public final class MySQLTableMetaDataLoader implements 
DialectTableMetaDataLoade
         return new ColumnMetaData(columnName, dataTypeMap.get(dataType), 
primaryKey, generated, caseSensitive);
     }
 
-    private String getTableMetaDataSQL(final Collection<String> existedTables) 
{
-        return existedTables.isEmpty() ? TABLE_META_DATA_SQL
-                : String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getTableMetaDataSQL(final Collection<String> tables, final 
boolean isExclude) {
+        return tables.isEmpty() ? TABLE_META_DATA_SQL : 
getTableMetaDataSQLWithTables(tables, isExclude);
+    }
+    
+    private String getTableMetaDataSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, tables.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")))
+                : String.format(TABLE_META_DATA_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
     }
     
-    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames) throws SQLException 
{
+    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<IndexMetaData>> result = new HashMap<>();
         try (
                 Connection connection = dataSource.getConnection();
@@ -114,7 +125,8 @@ public final class MySQLTableMetaDataLoader implements 
DialectTableMetaDataLoade
                     if (!result.containsKey(tableName)) {
                         result.put(tableName, new LinkedList<>());
                     }
-                    result.get(tableName).add(new 
IndexMetaData(IndexMetaDataUtil.getLogicIndexName(indexName, tableName)));
+                    // TODO Temporarily process the index scheme, and wait for 
the single table loader scheme to be modified and reconstructed
+                    result.get(tableName).add(new IndexMetaData(isExclude ? 
IndexMetaDataUtil.getLogicIndexName(indexName, tableName) : indexName));
                 }
             }
         }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/OracleTableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/OracleTableMetaDataLoader.java
index 85595f3..56062bd 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/OracleTableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/OracleTableMetaDataLoader.java
@@ -48,6 +48,8 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
     
     private static final String TABLE_META_DATA_SQL_WITH_EXISTED_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME NOT IN (%s)";
     
+    private static final String TABLE_META_DATA_SQL_IN_TABLES = 
TABLE_META_DATA_SQL + " AND TABLE_NAME IN (%s)";
+    
     private static final String INDEX_META_DATA_SQL = "SELECT OWNER AS 
TABLE_SCHEMA, TABLE_NAME, INDEX_NAME FROM ALL_INDEXES WHERE OWNER = ? AND 
TABLE_NAME IN (%s)";
     
     private static final String PRIMARY_KEY_META_DATA_SQL = "SELECT A.OWNER AS 
TABLE_SCHEMA, A.TABLE_NAME AS TABLE_NAME, B.COLUMN_NAME AS COLUMN_NAME FROM 
ALL_CONSTRAINTS A INNER JOIN"
@@ -55,6 +57,8 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
     
     private static final String PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES 
= PRIMARY_KEY_META_DATA_SQL + " AND A.TABLE_NAME NOT IN (%s)";
     
+    private static final String PRIMARY_KEY_META_DATA_SQL_IN_TABLES = 
PRIMARY_KEY_META_DATA_SQL + " AND A.TABLE_NAME IN (%s)";
+    
     private static final int COLLATION_START_MAJOR_VERSION = 12;
     
     private static final int COLLATION_START_MINOR_VERSION = 2;
@@ -63,24 +67,29 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
     
     @Override
     public Map<String, TableMetaData> load(final DataSource dataSource, final 
Collection<String> existedTables) throws SQLException {
-        return loadTableMetaDataMap(dataSource, existedTables);
+        return loadTableMetaDataMap(dataSource, existedTables, true);
+    }
+    
+    @Override
+    public Map<String, TableMetaData> loadWithTables(final DataSource 
dataSource, final Collection<String> tables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, tables, false);
     }
     
-    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> existedTables) throws SQLException {
+    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> tables, final boolean isExclude) throws 
SQLException {
         Map<String, TableMetaData> result = new LinkedHashMap<>();
-        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, existedTables);
-        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet());
+        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, tables, isExclude);
+        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
columnMetaDataMap.isEmpty() ? Collections.emptyMap() : 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet(), isExclude);
         for (Entry<String, Collection<ColumnMetaData>> entry : 
columnMetaDataMap.entrySet()) {
             result.put(entry.getKey(), new TableMetaData(entry.getKey(), 
entry.getValue(), indexMetaDataMap.getOrDefault(entry.getKey(), 
Collections.emptyList())));
         }
         return result;
     }
     
-    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
existedTables) throws SQLException {
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
tables, final boolean isExclude) throws SQLException {
         Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
-        try (Connection connection = dataSource.getConnection(); 
PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(existedTables, 
connection.getMetaData()))) {
+        try (Connection connection = dataSource.getConnection(); 
PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables, 
connection.getMetaData(), isExclude))) {
             Map<String, Integer> dataTypes = 
DataTypeLoader.load(connection.getMetaData());
-            Map<String, Collection<String>> tablePrimaryKeys = 
loadTablePrimaryKeys(connection, existedTables);
+            Map<String, Collection<String>> tablePrimaryKeys = 
loadTablePrimaryKeys(connection, tables, isExclude);
             preparedStatement.setString(1, connection.getCatalog());
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
                 while (resultSet.next()) {
@@ -106,7 +115,7 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
         return new ColumnMetaData(columnName, dataTypeMap.get(dataType), 
primaryKey, generated, caseSensitive);
     }
     
-    private String getTableMetaDataSQL(final Collection<String> existedTables, 
final DatabaseMetaData metaData) throws SQLException {
+    private String getTableMetaDataSQL(final Collection<String> tables, final 
DatabaseMetaData metaData, final boolean isExclude) throws SQLException {
         StringBuilder stringBuilder = new StringBuilder(28);
         int majorVersion = metaData.getDatabaseMajorVersion();
         int minorVersion = metaData.getDatabaseMinorVersion();
@@ -117,11 +126,17 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
             stringBuilder.append(", COLLATION");
         }
         String collation = stringBuilder.toString();
-        return existedTables.isEmpty() ? String.format(TABLE_META_DATA_SQL, 
collation)
-                : String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, 
collation, existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+        return tables.isEmpty() ? String.format(TABLE_META_DATA_SQL, collation)
+                : getTableMetaDataSQLWithTables(tables, collation, isExclude);
     }
     
-    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames) throws SQLException 
{
+    private String getTableMetaDataSQLWithTables(final Collection<String> 
tables, final String collation, final boolean isExclude) {
+        return isExclude ? 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, collation, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")))
+                : String.format(TABLE_META_DATA_SQL_IN_TABLES, collation, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    
+    }
+    
+    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<IndexMetaData>> result = new HashMap<>();
         try (Connection connection = dataSource.getConnection(); 
PreparedStatement preparedStatement = 
connection.prepareStatement(getIndexMetaDataSQL(tableNames))) {
             preparedStatement.setString(1, connection.getCatalog());
@@ -132,7 +147,8 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
                     if (!result.containsKey(tableName)) {
                         result.put(tableName, new LinkedList<>());
                     }
-                    result.get(tableName).add(new 
IndexMetaData(IndexMetaDataUtil.getLogicIndexName(indexName, tableName)));
+                    // TODO Temporarily process the index scheme, and wait for 
the single table loader scheme to be modified and reconstructed
+                    result.get(tableName).add(new IndexMetaData(isExclude ? 
IndexMetaDataUtil.getLogicIndexName(indexName, tableName) : indexName));
                 }
             }
         }
@@ -143,9 +159,9 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
         return String.format(INDEX_META_DATA_SQL, tableNames.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")));
     }
     
-    private Map<String, Collection<String>> loadTablePrimaryKeys(final 
Connection connection, final Collection<String> tableNames) throws SQLException 
{
+    private Map<String, Collection<String>> loadTablePrimaryKeys(final 
Connection connection, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<String>> result = new HashMap<>();
-        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getPrimaryKeyMetaDataSQL(tableNames))) {
+        try (PreparedStatement preparedStatement = 
connection.prepareStatement(getPrimaryKeyMetaDataSQL(tableNames, isExclude))) {
             preparedStatement.setString(1, connection.getCatalog());
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
                 while (resultSet.next()) {
@@ -158,9 +174,14 @@ public final class OracleTableMetaDataLoader implements 
DialectTableMetaDataLoad
         return result;
     }
     
-    private String getPrimaryKeyMetaDataSQL(final Collection<String> 
existedTables) {
-        return existedTables.isEmpty() ? PRIMARY_KEY_META_DATA_SQL
-                : String.format(PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getPrimaryKeyMetaDataSQL(final Collection<String> tables, 
final boolean isExclude) {
+        return tables.isEmpty() ? PRIMARY_KEY_META_DATA_SQL : 
getPrimaryKeyMetaDataSQLWithTables(tables, isExclude);
+    }
+    
+    private String getPrimaryKeyMetaDataSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? 
String.format(PRIMARY_KEY_META_DATA_SQL_WITH_EXISTED_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")))
+                : String.format(PRIMARY_KEY_META_DATA_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    
     }
     
     @Override
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/PostgreSQLTableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/PostgreSQLTableMetaDataLoader.java
index a90a6a6..1a02473 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/PostgreSQLTableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/PostgreSQLTableMetaDataLoader.java
@@ -52,6 +52,8 @@ public final class PostgreSQLTableMetaDataLoader implements 
DialectTableMetaData
     
     private static final String TABLE_META_DATA_SQL_WITH_EXISTED_TABLES = 
BASIC_TABLE_META_DATA_SQL + " AND table_name NOT IN (%s)";
     
+    private static final String TABLE_META_DATA_SQL_IN_TABLES = 
BASIC_TABLE_META_DATA_SQL + " AND table_name IN (%s)";
+    
     private static final String PRIMARY_KEY_META_DATA_SQL = "SELECT 
tc.table_name, kc.column_name FROM information_schema.table_constraints tc"
             + " JOIN information_schema.key_column_usage kc"
             + " ON kc.table_schema = tc.table_schema AND kc.table_name = 
tc.table_name AND kc.constraint_name = tc.constraint_name"
@@ -61,9 +63,18 @@ public final class PostgreSQLTableMetaDataLoader implements 
DialectTableMetaData
     
     @Override
     public Map<String, TableMetaData> load(final DataSource dataSource, final 
Collection<String> existedTables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, existedTables, true);
+    }
+    
+    @Override
+    public Map<String, TableMetaData> loadWithTables(final DataSource 
dataSource, final Collection<String> tables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, tables, false);
+    }
+    
+    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> tables, final boolean isExclude) throws 
SQLException {
         Map<String, TableMetaData> result = new LinkedHashMap<>();
-        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
loadIndexMetaDataMap(dataSource);
-        for (Entry<String, Collection<ColumnMetaData>> entry : 
loadColumnMetaDataMap(dataSource, existedTables).entrySet()) {
+        Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
loadIndexMetaDataMap(dataSource, isExclude);
+        for (Entry<String, Collection<ColumnMetaData>> entry : 
loadColumnMetaDataMap(dataSource, tables, isExclude).entrySet()) {
             Collection<IndexMetaData> indexMetaDataList = 
indexMetaDataMap.get(entry.getKey());
             if (null == indexMetaDataList) {
                 indexMetaDataList = Collections.emptyList();
@@ -73,10 +84,10 @@ public final class PostgreSQLTableMetaDataLoader implements 
DialectTableMetaData
         return result;
     }
     
-    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
existedTables) throws SQLException {
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
tables, final boolean isExclude) throws SQLException {
         Map<String, SortedMap<Integer, ColumnMetaData>> result = new 
HashMap<>();
         try (Connection connection = dataSource.getConnection();
-             PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(existedTables))) {
+             PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables, isExclude))) {
             Map<String, Integer> dataTypes = 
DataTypeLoader.load(connection.getMetaData());
             Set<String> primaryKeys = loadPrimaryKeys(connection);
             preparedStatement.setString(1, connection.getSchema());
@@ -119,12 +130,16 @@ public final class PostgreSQLTableMetaDataLoader 
implements DialectTableMetaData
         return new ColumnMetaData(columnName, dataTypeMap.get(dataType), 
isPrimaryKey, generated, caseSensitive);
     }
     
-    private String getTableMetaDataSQL(final Collection<String> existedTables) 
{
-        return existedTables.isEmpty() ? BASIC_TABLE_META_DATA_SQL
-                : String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getTableMetaDataSQL(final Collection<String> tables, final 
boolean isExclude) {
+        return tables.isEmpty() ? BASIC_TABLE_META_DATA_SQL : 
getTableMetaDataSQLWithTables(tables, isExclude);
+    }
+    
+    private String getTableMetaDataSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, tables.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")))
+                : String.format(TABLE_META_DATA_SQL_IN_TABLES, 
tables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
     }
     
-    private Map<String, Collection<IndexMetaData>> loadIndexMetaDataMap(final 
DataSource dataSource) throws SQLException {
+    private Map<String, Collection<IndexMetaData>> loadIndexMetaDataMap(final 
DataSource dataSource, final boolean isExclude) throws SQLException {
         Map<String, Collection<IndexMetaData>> result = new HashMap<>();
         try (Connection connection = dataSource.getConnection();
              PreparedStatement preparedStatement = 
connection.prepareStatement(BASIC_INDEX_META_DATA_SQL)) {
@@ -134,7 +149,8 @@ public final class PostgreSQLTableMetaDataLoader implements 
DialectTableMetaData
                     String tableName = resultSet.getString("tablename");
                     Collection<IndexMetaData> indexes = 
result.computeIfAbsent(tableName, k -> new LinkedList<>());
                     String indexName = resultSet.getString("indexname");
-                    indexes.add(new 
IndexMetaData(IndexMetaDataUtil.getLogicIndexName(indexName, tableName)));
+                    // TODO Temporarily process the index scheme, and wait for 
the single table loader scheme to be modified and reconstructed
+                    indexes.add(new IndexMetaData(isExclude ? 
IndexMetaDataUtil.getLogicIndexName(indexName, tableName) : indexName));
                 }
             }
         }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/SQLServerTableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/SQLServerTableMetaDataLoader.java
index ccf178e..fcb4259 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/SQLServerTableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/loader/dialect/SQLServerTableMetaDataLoader.java
@@ -51,19 +51,26 @@ public final class SQLServerTableMetaDataLoader implements 
DialectTableMetaDataL
     
     private static final String TABLE_META_DATA_SQL_WITH_EXISTED_TABLES = " 
WHERE obj.name NOT IN (%s)";
     
+    private static final String TABLE_META_DATA_SQL_IN_TABLES = " WHERE 
obj.name IN (%s)";
+    
     private static final String INDEX_META_DATA_SQL = "SELECT a.name AS 
INDEX_NAME, c.name AS TABLE_NAME FROM sys.indexes a"
             + " JOIN sys.objects c ON a.object_id = c.object_id WHERE 
a.index_id NOT IN (0, 255) AND c.name IN (%s)";
     
     @Override
     public Map<String, TableMetaData> load(final DataSource dataSource, final 
Collection<String> existedTables) throws SQLException {
-        return loadTableMetaDataMap(dataSource, existedTables);
+        return loadTableMetaDataMap(dataSource, existedTables, true);
+    }
+    
+    @Override
+    public Map<String, TableMetaData> loadWithTables(final DataSource 
dataSource, final Collection<String> tables) throws SQLException {
+        return loadTableMetaDataMap(dataSource, tables, false);
     }
     
-    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> existedTables) throws SQLException {
+    private Map<String, TableMetaData> loadTableMetaDataMap(final DataSource 
dataSource, final Collection<String> tables, final boolean isExclude) throws 
SQLException {
         Map<String, TableMetaData> result = new LinkedHashMap<>();
-        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, existedTables);
+        Map<String, Collection<ColumnMetaData>> columnMetaDataMap = 
loadColumnMetaDataMap(dataSource, tables, isExclude);
         if (!columnMetaDataMap.isEmpty()) {
-            Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet());
+            Map<String, Collection<IndexMetaData>> indexMetaDataMap = 
loadIndexMetaData(dataSource, columnMetaDataMap.keySet(), isExclude);
             for (Entry<String, Collection<ColumnMetaData>> entry : 
columnMetaDataMap.entrySet()) {
                 result.put(entry.getKey(), new TableMetaData(entry.getKey(), 
entry.getValue(), indexMetaDataMap.getOrDefault(entry.getKey(), 
Collections.emptyList())));
             }
@@ -71,11 +78,11 @@ public final class SQLServerTableMetaDataLoader implements 
DialectTableMetaDataL
         return result;
     }
     
-    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
existedTables) throws SQLException {
+    private Map<String, Collection<ColumnMetaData>> 
loadColumnMetaDataMap(final DataSource dataSource, final Collection<String> 
tables, final boolean isExclude) throws SQLException {
         Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
         try (
                 Connection connection = dataSource.getConnection();
-                PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(existedTables))) {
+                PreparedStatement preparedStatement = 
connection.prepareStatement(getTableMetaDataSQL(tables, isExclude))) {
             Map<String, Integer> dataTypes = 
DataTypeLoader.load(connection.getMetaData());
             try (ResultSet resultSet = preparedStatement.executeQuery()) {
                 while (resultSet.next()) {
@@ -101,12 +108,17 @@ public final class SQLServerTableMetaDataLoader 
implements DialectTableMetaDataL
         return new ColumnMetaData(columnName, dataTypeMap.get(dataType), 
primaryKey, generated, caseSensitive);
     }
     
-    private String getTableMetaDataSQL(final Collection<String> existedTables) 
{
-        return existedTables.isEmpty() ? TABLE_META_DATA_SQL
-                : TABLE_META_DATA_SQL + 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, 
existedTables.stream().map(each -> String.format("'%s'", 
each)).collect(Collectors.joining(",")));
+    private String getTableMetaDataSQL(final Collection<String> tables, final 
boolean isExclude) {
+        return tables.isEmpty() ? TABLE_META_DATA_SQL
+                : getTableMetaDataSQLWithTables(tables, isExclude);
+    }
+    
+    private String getTableMetaDataSQLWithTables(final Collection<String> 
tables, final boolean isExclude) {
+        return isExclude ? TABLE_META_DATA_SQL + 
String.format(TABLE_META_DATA_SQL_WITH_EXISTED_TABLES, tables.stream().map(each 
-> String.format("'%s'", each)).collect(Collectors.joining(",")))
+                : TABLE_META_DATA_SQL + 
String.format(TABLE_META_DATA_SQL_IN_TABLES, tables.stream().map(each -> 
String.format("'%s'", each)).collect(Collectors.joining(",")));
     }
     
-    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames) throws SQLException 
{
+    private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final 
DataSource dataSource, final Collection<String> tableNames, final boolean 
isExclude) throws SQLException {
         Map<String, Collection<IndexMetaData>> result = new HashMap<>();
         try (
                 Connection connection = dataSource.getConnection();
@@ -118,7 +130,8 @@ public final class SQLServerTableMetaDataLoader implements 
DialectTableMetaDataL
                     if (!result.containsKey(tableName)) {
                         result.put(tableName, new LinkedList<>());
                     }
-                    result.get(tableName).add(new 
IndexMetaData(IndexMetaDataUtil.getLogicIndexName(indexName, tableName)));
+                    // TODO Temporarily process the index scheme, and wait for 
the single table loader scheme to be modified and reconstructed
+                    result.get(tableName).add(new IndexMetaData(isExclude ? 
IndexMetaDataUtil.getLogicIndexName(indexName, tableName) : indexName));
                 }
             }
         }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/DialectTableMetaDataLoader.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/DialectTableMetaDataLoader.java
index 7b47909..abf8414 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/DialectTableMetaDataLoader.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/DialectTableMetaDataLoader.java
@@ -39,4 +39,14 @@ public interface DialectTableMetaDataLoader extends 
DatabaseTypeAwareSPI {
      * @throws SQLException SQL exception
      */
     Map<String, TableMetaData> load(DataSource dataSource, Collection<String> 
existedTables) throws SQLException;
+    
+    /**
+     * Load table meta data with tables.
+     *
+     * @param dataSource data source
+     * @param tables tables
+     * @return table meta data map
+     * @throws SQLException SQL exception
+     */
+    Map<String, TableMetaData> loadWithTables(DataSource dataSource, 
Collection<String> tables) throws SQLException;
 }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/RuleBasedTableMetaDataBuilder.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/RuleBasedTableMetaDataBuilder.java
index 3b48396..9f0741d 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/RuleBasedTableMetaDataBuilder.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/builder/spi/RuleBasedTableMetaDataBuilder.java
@@ -20,14 +20,17 @@ package 
org.apache.shardingsphere.infra.metadata.schema.builder.spi;
 import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
 import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule;
 import org.apache.shardingsphere.infra.spi.ordered.OrderedSPI;
 
 import javax.sql.DataSource;
 import java.sql.SQLException;
+import java.util.Collection;
 import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
 
 /**
  * Table meta data builder with related rule.
@@ -51,6 +54,18 @@ public interface RuleBasedTableMetaDataBuilder<T extends 
TableContainedRule> ext
     Optional<TableMetaData> load(String tableName, DatabaseType databaseType, 
Map<String, DataSource> dataSourceMap, DataNodes dataNodes, T rule, 
ConfigurationProperties props) throws SQLException;
     
     /**
+     * Load table meta data.
+     *
+     * @param tableNames tables name
+     * @param rule ShardingSphere rule
+     * @param materials SchemaBuilderMaterials materials
+     * @param executorService executorService
+     * @return table meta data map key is logic table name value is actual 
table meta data
+     * @throws SQLException SQL exception
+     */
+    Map<String, TableMetaData> load(Collection<String> tableNames, T rule, 
SchemaBuilderMaterials materials, ExecutorService executorService) throws 
SQLException;
+    
+    /**
      * Decorate table meta data.
      *
      * @param tableName table name
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilderTest.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilderTest.java
index 2624002..ed40bc4 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilderTest.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/builder/TableMetaDataBuilderTest.java
@@ -74,4 +74,11 @@ public final class TableMetaDataBuilderTest {
                 databaseType, Collections.singletonMap("logic_db", 
dataSource), Arrays.asList(new CommonFixtureRule(), new 
DataNodeContainedFixtureRule()),
                 props)).isPresent());
     }
+    
+    @Test
+    public void assertLoadLogicTables() throws SQLException {
+        assertFalse(TableMetaDataBuilder.loadLogicTables(new 
SchemaBuilderMaterials(
+                databaseType, Collections.singletonMap("logic_db", 
dataSource), Arrays.asList(new CommonFixtureRule(),
+                new DataNodeContainedFixtureRule()), props), null).isEmpty());
+    }
 }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/CommonFixtureTableMetaDataBuilder.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/CommonFixtureTableMetaDataBuilder.java
index 1ded7f5..b81ec67 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/CommonFixtureTableMetaDataBuilder.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/CommonFixtureTableMetaDataBuilder.java
@@ -20,13 +20,18 @@ package 
org.apache.shardingsphere.infra.metadata.schema.fixture.loader;
 import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import 
org.apache.shardingsphere.infra.metadata.schema.fixture.rule.CommonFixtureRule;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
 
 import javax.sql.DataSource;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
 
 public final class CommonFixtureTableMetaDataBuilder implements 
RuleBasedTableMetaDataBuilder<CommonFixtureRule> {
     
@@ -37,6 +42,12 @@ public final class CommonFixtureTableMetaDataBuilder 
implements RuleBasedTableMe
     }
     
     @Override
+    public Map<String, TableMetaData> load(final Collection<String> 
tableNames, final CommonFixtureRule rule, final SchemaBuilderMaterials 
materials,
+                                           final ExecutorService 
executorService) throws SQLException {
+        return Collections.emptyMap();
+    }
+    
+    @Override
     public TableMetaData decorate(final String tableName, final TableMetaData 
tableMetaData, final CommonFixtureRule rule) {
         return tableMetaData;
     }
diff --git 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/DataNodeContainedFixtureRuleBasedTableMetaDataBuilder.java
 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/DataNodeContainedFixtureRuleBasedTableMetaDataBuilder.java
index 854973c..d3f118f 100644
--- 
a/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/DataNodeContainedFixtureRuleBasedTableMetaDataBuilder.java
+++ 
b/shardingsphere-infra/shardingsphere-infra-common/src/test/java/org/apache/shardingsphere/infra/metadata/schema/fixture/loader/DataNodeContainedFixtureRuleBasedTableMetaDataBuilder.java
@@ -20,15 +20,20 @@ package 
org.apache.shardingsphere.infra.metadata.schema.fixture.loader;
 import 
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNodes;
+import 
org.apache.shardingsphere.infra.metadata.schema.builder.SchemaBuilderMaterials;
 import 
org.apache.shardingsphere.infra.metadata.schema.fixture.rule.DataNodeContainedFixtureRule;
 import 
org.apache.shardingsphere.infra.metadata.schema.builder.spi.RuleBasedTableMetaDataBuilder;
 import org.apache.shardingsphere.infra.metadata.schema.model.ColumnMetaData;
 import org.apache.shardingsphere.infra.metadata.schema.model.TableMetaData;
 
 import javax.sql.DataSource;
+import java.sql.SQLException;
 import java.util.Collections;
+import java.util.Collection;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ExecutorService;
 
 public final class DataNodeContainedFixtureRuleBasedTableMetaDataBuilder 
implements RuleBasedTableMetaDataBuilder<DataNodeContainedFixtureRule> {
     
@@ -40,6 +45,19 @@ public final class 
DataNodeContainedFixtureRuleBasedTableMetaDataBuilder impleme
     }
     
     @Override
+    public Map<String, TableMetaData> load(final Collection<String> 
tableNames, final DataNodeContainedFixtureRule rule, final 
SchemaBuilderMaterials materials,
+                                           final ExecutorService 
executorService) throws SQLException {
+        if (!tableNames.isEmpty() && 
(tableNames.contains("data_node_routed_table1") || 
tableNames.contains("data_node_routed_table2"))) {
+            Map<String, TableMetaData> result = new LinkedHashMap<>();
+            for (String tableName : tableNames) {
+                result.put(tableName, new TableMetaData(tableName, 
Collections.emptyList(), Collections.emptyList()));
+            }
+            return result;
+        }
+        return Collections.emptyMap();
+    }
+    
+    @Override
     public TableMetaData decorate(final String tableName, final TableMetaData 
tableMetaData, final DataNodeContainedFixtureRule rule) {
         ColumnMetaData columnMetaData = new ColumnMetaData("id", 1, true, 
true, false);
         return new TableMetaData(tableName, 
Collections.singletonList(columnMetaData), Collections.emptyList());

Reply via email to