This is an automated email from the ASF dual-hosted git repository.

zhaojinchao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 3278b0870a6 Refactor ResourceMetaData (#28640)
3278b0870a6 is described below

commit 3278b0870a642ff449fc3a00334542132aec59a9
Author: Liang Zhang <[email protected]>
AuthorDate: Thu Oct 5 15:43:36 2023 +0800

    Refactor ResourceMetaData (#28640)
    
    * For code format
    
    * Remove ResourceMetaData.getConnectionProperties()
    
    * Remove ResourceMetaData.getStorageType()
    
    * For code format
---
 .../advice/TracingJDBCExecutorCallbackAdvice.java  |  4 ++--
 ...penTelemetryJDBCExecutorCallbackAdviceTest.java |  7 +++---
 .../database/resource/ResourceMetaData.java        | 27 ++--------------------
 .../engine/driver/jdbc/JDBCExecutorCallback.java   |  4 ++--
 .../engine/jdbc/JDBCExecutorCallbackTest.java      |  4 ++--
 .../metadata/ShardingSphereDatabaseMetaData.java   |  4 ++--
 .../manager/switcher/NewResourceSwitchManager.java | 16 ++++++-------
 .../manager/switcher/ResourceSwitchManager.java    | 16 ++++++-------
 .../ral/queryable/ExportStorageNodesExecutor.java  |  2 +-
 .../rql/storage/unit/ShowStorageUnitExecutor.java  |  4 ++--
 .../SelectInformationSchemataExecutor.java         |  2 +-
 .../test/it/rewrite/engine/SQLRewriterIT.java      |  2 +-
 12 files changed, 35 insertions(+), 57 deletions(-)

diff --git 
a/agent/plugins/tracing/core/src/main/java/org/apache/shardingsphere/agent/plugin/tracing/core/advice/TracingJDBCExecutorCallbackAdvice.java
 
b/agent/plugins/tracing/core/src/main/java/org/apache/shardingsphere/agent/plugin/tracing/core/advice/TracingJDBCExecutorCallbackAdvice.java
index 002e2c45cfc..c2c9c8005e4 100644
--- 
a/agent/plugins/tracing/core/src/main/java/org/apache/shardingsphere/agent/plugin/tracing/core/advice/TracingJDBCExecutorCallbackAdvice.java
+++ 
b/agent/plugins/tracing/core/src/main/java/org/apache/shardingsphere/agent/plugin/tracing/core/advice/TracingJDBCExecutorCallbackAdvice.java
@@ -41,8 +41,8 @@ public abstract class TracingJDBCExecutorCallbackAdvice<T> 
implements InstanceMe
     public final void beforeMethod(final TargetAdviceObject target, final 
Method method, final Object[] args, final String pluginType) {
         JDBCExecutionUnit executionUnit = (JDBCExecutionUnit) args[0];
         ResourceMetaData resourceMetaData = 
AgentReflectionUtils.getFieldValue(target, "resourceMetaData");
-        ConnectionProperties connectionProps = 
resourceMetaData.getConnectionProperties(executionUnit.getExecutionUnit().getDataSourceName());
-        DatabaseType storageType = 
resourceMetaData.getStorageType(executionUnit.getExecutionUnit().getDataSourceName());
+        ConnectionProperties connectionProps = 
resourceMetaData.getStorageUnits().get(executionUnit.getExecutionUnit().getDataSourceName()).getConnectionProperties();
+        DatabaseType storageType = 
resourceMetaData.getStorageUnits().get(executionUnit.getExecutionUnit().getDataSourceName()).getStorageType();
         recordExecuteInfo(RootSpanContext.get(), target, executionUnit, 
(boolean) args[1], connectionProps, storageType);
     }
     
diff --git 
a/agent/plugins/tracing/type/opentelemetry/src/test/java/org/apache/shardingsphere/agent/plugin/tracing/opentelemetry/advice/OpenTelemetryJDBCExecutorCallbackAdviceTest.java
 
b/agent/plugins/tracing/type/opentelemetry/src/test/java/org/apache/shardingsphere/agent/plugin/tracing/opentelemetry/advice/OpenTelemetryJDBCExecutorCallbackAdviceTest.java
index abfc4760df5..e5712ab6317 100644
--- 
a/agent/plugins/tracing/type/opentelemetry/src/test/java/org/apache/shardingsphere/agent/plugin/tracing/opentelemetry/advice/OpenTelemetryJDBCExecutorCallbackAdviceTest.java
+++ 
b/agent/plugins/tracing/type/opentelemetry/src/test/java/org/apache/shardingsphere/agent/plugin/tracing/opentelemetry/advice/OpenTelemetryJDBCExecutorCallbackAdviceTest.java
@@ -57,6 +57,7 @@ import java.util.List;
 
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.MatcherAssert.assertThat;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -95,9 +96,9 @@ class OpenTelemetryJDBCExecutorCallbackAdviceTest {
         when(connection.getMetaData()).thenReturn(databaseMetaData);
         when(statement.getConnection()).thenReturn(connection);
         executionUnit = new JDBCExecutionUnit(new 
ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.emptyList())), 
null, statement);
-        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
-        
when(resourceMetaData.getStorageType(DATA_SOURCE_NAME)).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "MySQL"));
-        
when(resourceMetaData.getConnectionProperties(DATA_SOURCE_NAME)).thenReturn(mock(ConnectionProperties.class));
+        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, 
RETURNS_DEEP_STUBS);
+        
when(resourceMetaData.getStorageUnits().get(DATA_SOURCE_NAME).getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "MySQL"));
+        
when(resourceMetaData.getStorageUnits().get(DATA_SOURCE_NAME).getConnectionProperties()).thenReturn(mock(ConnectionProperties.class));
         JDBCExecutorCallback jdbcExecutorCallback = new 
JDBCExecutorCallbackFixture(TypedSPILoader.getService(DatabaseType.class, 
"MySQL"), resourceMetaData, new MySQLSelectStatement(), true);
         
Plugins.getMemberAccessor().set(JDBCExecutorCallback.class.getDeclaredField("resourceMetaData"),
 jdbcExecutorCallback, resourceMetaData);
         targetObject = (TargetAdviceObject) jdbcExecutorCallback;
diff --git 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
index 4e39f94aa61..3c51850be04 100644
--- 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
+++ 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
@@ -18,14 +18,12 @@
 package org.apache.shardingsphere.infra.metadata.database.resource;
 
 import lombok.Getter;
-import 
org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties;
-import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
 import 
org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
 import 
org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
-import 
org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
 import 
org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeAggregator;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
 import 
org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
 import 
org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapUtils;
 
@@ -91,28 +89,7 @@ public final class ResourceMetaData {
     }
     
     private boolean isExisted(final String dataSourceName, final 
Collection<String> existedDataSourceNames) {
-        return existedDataSourceNames.stream().anyMatch(each -> 
storageUnits.get(dataSourceName).getConnectionProperties()
-                
.isInSameDatabaseInstance(storageUnits.get(each).getConnectionProperties()));
-    }
-    
-    /**
-     * Get connection properties.
-     *
-     * @param dataSourceName data source name
-     * @return connection properties
-     */
-    public ConnectionProperties getConnectionProperties(final String 
dataSourceName) {
-        return storageUnits.get(dataSourceName).getConnectionProperties();
-    }
-    
-    /**
-     * Get storage type.
-     *
-     * @param dataSourceName data source name
-     * @return storage type
-     */
-    public DatabaseType getStorageType(final String dataSourceName) {
-        return storageUnits.get(dataSourceName).getStorageType();
+        return existedDataSourceNames.stream().anyMatch(each -> 
storageUnits.get(dataSourceName).getConnectionProperties().isInSameDatabaseInstance(storageUnits.get(each).getConnectionProperties()));
     }
     
     /**
diff --git 
a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/driver/jdbc/JDBCExecutorCallback.java
 
b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/driver/jdbc/JDBCExecutorCallback.java
index 47fdf0aa220..e675e944a7f 100644
--- 
a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/driver/jdbc/JDBCExecutorCallback.java
+++ 
b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/driver/jdbc/JDBCExecutorCallback.java
@@ -74,8 +74,8 @@ public abstract class JDBCExecutorCallback<T> implements 
ExecutorCallback<JDBCEx
      */
     private T execute(final JDBCExecutionUnit jdbcExecutionUnit, final boolean 
isTrunkThread) throws SQLException {
         SQLExecutorExceptionHandler.setExceptionThrown(isExceptionThrown);
-        DatabaseType storageType = 
resourceMetaData.getStorageType(jdbcExecutionUnit.getExecutionUnit().getDataSourceName());
-        ConnectionProperties connectionProps = 
resourceMetaData.getConnectionProperties(jdbcExecutionUnit.getExecutionUnit().getDataSourceName());
+        DatabaseType storageType = 
resourceMetaData.getStorageUnits().get(jdbcExecutionUnit.getExecutionUnit().getDataSourceName()).getStorageType();
+        ConnectionProperties connectionProps = 
resourceMetaData.getStorageUnits().get(jdbcExecutionUnit.getExecutionUnit().getDataSourceName()).getConnectionProperties();
         SQLExecutionHook sqlExecutionHook = new SPISQLExecutionHook();
         try {
             SQLUnit sqlUnit = 
jdbcExecutionUnit.getExecutionUnit().getSqlUnit();
diff --git 
a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
 
b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
index 2874a515c52..8fef2152363 100644
--- 
a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
+++ 
b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
@@ -66,7 +66,7 @@ class JDBCExecutorCallbackTest {
     void assertExecuteFailedAndProtocolTypeDifferentWithDatabaseType() throws 
SQLException {
         Object saneResult = new Object();
         ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
-        
when(resourceMetaData.getStorageType("ds")).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
+        
when(resourceMetaData.getStorageUnits().get("ds").getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
         JDBCExecutorCallback<Object> callback =
                 new 
JDBCExecutorCallback<Object>(TypedSPILoader.getService(DatabaseType.class, 
"MySQL"), resourceMetaData, mock(SelectStatement.class), true) {
                     
@@ -87,7 +87,7 @@ class JDBCExecutorCallbackTest {
     @Test
     void assertExecuteSQLExceptionOccurredAndProtocolTypeSameAsDatabaseType() {
         ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
-        
when(resourceMetaData.getStorageType("ds")).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
+        
when(resourceMetaData.getStorageUnits().get("ds").getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
         JDBCExecutorCallback<Object> callback =
                 new 
JDBCExecutorCallback<Object>(TypedSPILoader.getService(DatabaseType.class, 
"MySQL"), resourceMetaData, mock(SelectStatement.class), true) {
                     
diff --git 
a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java
 
b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java
index 4cf8b2dbec1..aa13f33ccd6 100644
--- 
a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java
+++ 
b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java
@@ -223,13 +223,13 @@ public final class ShardingSphereDatabaseMetaData extends 
AdaptedDatabaseMetaDat
     
     private String getActualCatalog(final String catalog) {
         ConnectionProperties connectionProps = connection.getContextManager()
-                
.getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getConnectionProperties(getDataSourceName());
+                
.getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getStorageUnits().get(getDataSourceName()).getConnectionProperties();
         return null == catalog || 
!catalog.contains(DefaultDatabase.LOGIC_NAME) ? catalog : 
connectionProps.getCatalog();
     }
     
     private String getActualSchema(final String schema) {
         ConnectionProperties connectionProps = connection.getContextManager()
-                
.getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getConnectionProperties(getDataSourceName());
+                
.getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getStorageUnits().get(getDataSourceName()).getConnectionProperties();
         return null == schema || !schema.contains(DefaultDatabase.LOGIC_NAME) 
? schema : connectionProps.getSchema();
     }
     
diff --git 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java
 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java
index 645a280b263..ce4263a5e7b 100644
--- 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java
+++ 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java
@@ -52,8 +52,8 @@ public final class NewResourceSwitchManager {
                 .collect(Collectors.toMap(Entry::getKey, entry -> 
entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> 
oldValue, LinkedHashMap::new)));
         mergedPropsMap.putAll(storageUnitDataSourcePoolPropsMap);
         Map<String, StorageNode> toBeCreatedStorageUintNodeMap = 
StorageUnitNodeMapUtils.fromDataSourcePoolProperties(storageUnitDataSourcePoolPropsMap);
-        return new SwitchingResource(resourceMetaData,
-                getRegisterNewStorageResource(resourceMetaData, 
toBeCreatedStorageUintNodeMap, 
StorageNodeAggregator.aggregateDataSourcePoolProperties(storageUnitDataSourcePoolPropsMap)),
+        Map<StorageNodeName, DataSourcePoolProperties> dataSourcePoolPropsMap 
= 
StorageNodeAggregator.aggregateDataSourcePoolProperties(storageUnitDataSourcePoolPropsMap);
+        return new SwitchingResource(resourceMetaData, 
getRegisterNewStorageResource(resourceMetaData, toBeCreatedStorageUintNodeMap, 
dataSourcePoolPropsMap),
                 new StorageResource(Collections.emptyMap(), 
Collections.emptyMap()), mergedPropsMap);
     }
     
@@ -77,13 +77,13 @@ public final class NewResourceSwitchManager {
      * @return created switching resource
      */
     public SwitchingResource alterStorageUnit(final ResourceMetaData 
resourceMetaData, final Map<String, DataSourcePoolProperties> propsMap) {
-        Map<String, DataSourcePoolProperties> 
mergedDataSourcePoolPropertiesMap = new 
LinkedHashMap<>(resourceMetaData.getStorageUnits().entrySet().stream()
+        Map<String, DataSourcePoolProperties> mergedDataSourcePoolPropsMap = 
new LinkedHashMap<>(resourceMetaData.getStorageUnits().entrySet().stream()
                 .collect(Collectors.toMap(Entry::getKey, entry -> 
entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> 
oldValue, LinkedHashMap::new)));
-        mergedDataSourcePoolPropertiesMap.putAll(propsMap);
-        Map<String, StorageNode> toBeAlteredStorageUintNodeMap = 
StorageUnitNodeMapUtils.fromDataSourcePoolProperties(mergedDataSourcePoolPropertiesMap);
-        return new SwitchingResource(resourceMetaData,
-                getAlterNewStorageResource(toBeAlteredStorageUintNodeMap, 
StorageNodeAggregator.aggregateDataSourcePoolProperties(mergedDataSourcePoolPropertiesMap)),
-                getStaleStorageResource(resourceMetaData, 
toBeAlteredStorageUintNodeMap), mergedDataSourcePoolPropertiesMap);
+        mergedDataSourcePoolPropsMap.putAll(propsMap);
+        Map<String, StorageNode> toBeAlteredStorageUintNodeMap = 
StorageUnitNodeMapUtils.fromDataSourcePoolProperties(mergedDataSourcePoolPropsMap);
+        Map<StorageNodeName, DataSourcePoolProperties> dataSourcePoolPropsMap 
= 
StorageNodeAggregator.aggregateDataSourcePoolProperties(mergedDataSourcePoolPropsMap);
+        return new SwitchingResource(resourceMetaData, 
getAlterNewStorageResource(toBeAlteredStorageUintNodeMap, 
dataSourcePoolPropsMap),
+                getStaleStorageResource(resourceMetaData, 
toBeAlteredStorageUintNodeMap), mergedDataSourcePoolPropsMap);
     }
     
     private StorageResource getAlterNewStorageResource(final Map<String, 
StorageNode> storageUintNodeMap, final Map<StorageNodeName, 
DataSourcePoolProperties> dataSourcePoolPropsMap) {
diff --git 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java
 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java
index a585871a5e8..adef4ddc0fe 100644
--- 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java
+++ 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java
@@ -53,9 +53,10 @@ public final class ResourceSwitchManager {
                 .collect(Collectors.toMap(Entry::getKey, entry -> 
entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> 
oldValue, LinkedHashMap::new)));
         mergedPropsMap.putAll(toBeChangedPropsMap);
         Map<String, StorageNode> toBeChangedStorageUnitNodeMap = 
StorageUnitNodeMapUtils.fromDataSourcePoolProperties(toBeChangedPropsMap);
-        return new SwitchingResource(resourceMetaData,
-                createNewStorageResource(resourceMetaData, 
toBeChangedStorageUnitNodeMap, 
StorageNodeAggregator.aggregateDataSourcePoolProperties(toBeChangedPropsMap)),
-                getStaleDataSources(resourceMetaData, 
toBeChangedStorageUnitNodeMap, mergedPropsMap), mergedPropsMap);
+        Map<StorageNodeName, DataSourcePoolProperties> dataSourcePoolPropsMap 
= StorageNodeAggregator.aggregateDataSourcePoolProperties(toBeChangedPropsMap);
+        StorageResource newStorageResource = 
createNewStorageResource(resourceMetaData, toBeChangedStorageUnitNodeMap, 
dataSourcePoolPropsMap);
+        StorageResource staleDataSources = 
getStaleDataSources(resourceMetaData, toBeChangedStorageUnitNodeMap, 
mergedPropsMap);
+        return new SwitchingResource(resourceMetaData, newStorageResource, 
staleDataSources, mergedPropsMap);
     }
     
     /**
@@ -92,16 +93,15 @@ public final class ResourceSwitchManager {
         
staleStorageResource.getDataSources().putAll(getToBeDeletedDataSources(resourceMetaData.getDataSources(),
 toBeChangedStorageNodeNames));
         staleStorageResource.getStorageUnitNodeMap().putAll(
                 
getToBeDeletedStorageUnitNodeMap(resourceMetaData.getStorageUnits(), 
toBeChangedStorageUnitNodeMap.keySet()));
-        return new SwitchingResource(resourceMetaData,
-                createNewStorageResource(resourceMetaData, 
toBeChangedStorageUnitNodeMap, 
StorageNodeAggregator.aggregateDataSourcePoolProperties(toBeChangedPropsMap)),
-                staleStorageResource, mergedDataSourcePoolPropertiesMap);
+        Map<StorageNodeName, DataSourcePoolProperties> dataSourcePoolPropsMap 
= StorageNodeAggregator.aggregateDataSourcePoolProperties(toBeChangedPropsMap);
+        return new SwitchingResource(
+                resourceMetaData, createNewStorageResource(resourceMetaData, 
toBeChangedStorageUnitNodeMap, dataSourcePoolPropsMap), staleStorageResource, 
mergedDataSourcePoolPropertiesMap);
     }
     
     private StorageResource createNewStorageResource(final ResourceMetaData 
resourceMetaData,
                                                      final Map<String, 
StorageNode> toBeChangedStorageUnitNodeMap, final Map<StorageNodeName, 
DataSourcePoolProperties> dataSourcePoolPropsMap) {
         Collection<StorageNodeName> toBeChangedStorageNodeName = 
toBeChangedStorageUnitNodeMap.values().stream().map(StorageNode::getName).collect(Collectors.toSet());
-        Map<StorageNodeName, DataSource> storageNodes =
-                getNewStorageNodes(resourceMetaData, 
toBeChangedStorageNodeName, dataSourcePoolPropsMap);
+        Map<StorageNodeName, DataSource> storageNodes = 
getNewStorageNodes(resourceMetaData, toBeChangedStorageNodeName, 
dataSourcePoolPropsMap);
         Map<String, StorageNode> storageUnitNodeMap = 
getNewStorageUnitNodeMap(resourceMetaData, toBeChangedStorageUnitNodeMap);
         return new StorageResource(storageNodes, storageUnitNodeMap);
     }
diff --git 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java
 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java
index e370cd4a442..dd85561b8ba 100644
--- 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java
+++ 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java
@@ -88,7 +88,7 @@ public final class ExportStorageNodesExecutor implements 
MetaDataRequiredQueryab
     private Map<String, Collection<ExportedStorageNode>> 
generateDatabaseExportStorageNodesData(final ShardingSphereDatabase database) {
         Map<String, ExportedStorageNode> storageNodes = new LinkedHashMap<>();
         for (Entry<String, StorageUnit> entry : 
database.getResourceMetaData().getStorageUnits().entrySet()) {
-            ConnectionProperties connectionProps = 
database.getResourceMetaData().getConnectionProperties(entry.getKey());
+            ConnectionProperties connectionProps = 
database.getResourceMetaData().getStorageUnits().get(entry.getKey()).getConnectionProperties();
             String databaseInstanceIp = getDatabaseInstanceIp(connectionProps);
             if (storageNodes.containsKey(databaseInstanceIp)) {
                 continue;
diff --git 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
index 4f3198f0917..5c7dea63c4b 100644
--- 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
+++ 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
@@ -61,11 +61,11 @@ public final class ShowStorageUnitExecutor implements 
RQLExecutor<ShowStorageUni
         Collection<LocalDataQueryResultRow> result = new LinkedList<>();
         for (Entry<String, DataSourcePoolProperties> entry : 
getDataSourcePoolPropertiesMap(database, sqlStatement).entrySet()) {
             String key = entry.getKey();
-            ConnectionProperties connectionProps = 
resourceMetaData.getConnectionProperties(key);
+            ConnectionProperties connectionProps = 
resourceMetaData.getStorageUnits().get(key).getConnectionProperties();
             Map<String, Object> poolProps = 
entry.getValue().getPoolPropertySynonyms().getStandardProperties();
             Map<String, Object> customProps = 
getCustomProps(entry.getValue().getCustomProperties().getProperties(), 
connectionProps.getQueryProperties());
             result.add(new LocalDataQueryResultRow(key,
-                    resourceMetaData.getStorageType(key).getType(),
+                    
resourceMetaData.getStorageUnits().get(key).getStorageType().getType(),
                     connectionProps.getHostname(),
                     connectionProps.getPort(),
                     connectionProps.getCatalog(),
diff --git 
a/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java
 
b/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java
index d43e871390b..f75b822132f 100644
--- 
a/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java
+++ 
b/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java
@@ -96,7 +96,7 @@ public final class SelectInformationSchemataExecutor extends 
DefaultDatabaseMeta
     protected void preProcess(final String databaseName, final Map<String, 
Object> rows, final Map<String, String> alias) {
         ResourceMetaData resourceMetaData = 
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData();
         Collection<String> catalogs = 
resourceMetaData.getStorageUnits().keySet()
-                .stream().map(each -> 
resourceMetaData.getConnectionProperties(each).getCatalog()).collect(Collectors.toSet());
+                .stream().map(each -> 
resourceMetaData.getStorageUnits().get(each).getConnectionProperties().getCatalog()).collect(Collectors.toSet());
         schemaNameAlias = alias.getOrDefault(SCHEMA_NAME, "");
         String rowValue = rows.getOrDefault(schemaNameAlias, "").toString();
         queryDatabase = !rowValue.isEmpty();
diff --git 
a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java
 
b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java
index 1af42cc7bce..bd624913c7b 100644
--- 
a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java
+++ 
b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java
@@ -173,7 +173,7 @@ public abstract class SQLRewriterIT {
     private Map<String, StorageUnit> createStorageUnits(final 
DatabaseConfiguration databaseConfig, final DatabaseType databaseType) {
         Map<String, StorageUnit> result = new 
LinkedHashMap<>(databaseConfig.getDataSources().size(), 1F);
         for (Entry<String, DataSource> entry : 
databaseConfig.getDataSources().entrySet()) {
-            StorageUnit storageUnit = mock(StorageUnit.class);
+            StorageUnit storageUnit = mock(StorageUnit.class, 
RETURNS_DEEP_STUBS);
             when(storageUnit.getStorageType()).thenReturn(databaseType);
             result.put(entry.getKey(), storageUnit);
         }

Reply via email to