This is an automated email from the ASF dual-hosted git repository.

sunnianjun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new e4ec1dd97b0 Refactor ShowStorageUnitExecutor (#28687)
e4ec1dd97b0 is described below

commit e4ec1dd97b0c7885baa9a0e98868a2fd5bc20e08
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Oct 8 18:37:51 2023 +0800

    Refactor ShowStorageUnitExecutor (#28687)
---
 .../rql/storage/unit/ShowStorageUnitExecutor.java  | 42 +++++++++-------------
 1 file changed, 16 insertions(+), 26 deletions(-)

diff --git 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
index 25604d8d3db..13ea8dcc689 100644
--- 
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
+++ 
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java
@@ -20,7 +20,6 @@ package 
org.apache.shardingsphere.proxy.backend.handler.distsql.rql.storage.unit
 import org.apache.shardingsphere.distsql.handler.query.RQLExecutor;
 import 
org.apache.shardingsphere.distsql.parser.statement.rql.show.ShowStorageUnitsStatement;
 import 
org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties;
-import 
org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData;
 import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
 import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
 import 
org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
@@ -28,7 +27,6 @@ import 
org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourceP
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
 import 
org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow;
 import 
org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase;
-import 
org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData;
 import 
org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
 import org.apache.shardingsphere.infra.rule.ShardingSphereRule;
 import org.apache.shardingsphere.infra.util.json.JsonUtils;
@@ -57,15 +55,15 @@ public final class ShowStorageUnitExecutor implements 
RQLExecutor<ShowStorageUni
     
     @Override
     public Collection<LocalDataQueryResultRow> getRows(final 
ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) {
-        ResourceMetaData resourceMetaData = database.getResourceMetaData();
         Collection<LocalDataQueryResultRow> result = new LinkedList<>();
         for (Entry<String, DataSourcePoolProperties> entry : 
getDataSourcePoolPropertiesMap(database, sqlStatement).entrySet()) {
-            String key = entry.getKey();
-            ConnectionProperties connectionProps = 
resourceMetaData.getStorageUnits().get(key).getConnectionProperties();
+            String storageUnitName = entry.getKey();
+            StorageUnit storageUnit = 
database.getResourceMetaData().getStorageUnits().get(storageUnitName);
+            ConnectionProperties connectionProps = 
storageUnit.getConnectionProperties();
             Map<String, Object> poolProps = 
entry.getValue().getPoolPropertySynonyms().getStandardProperties();
-            Map<String, Object> customProps = 
getCustomProps(entry.getValue().getCustomProperties().getProperties(), 
connectionProps.getQueryProperties());
-            result.add(new LocalDataQueryResultRow(key,
-                    
resourceMetaData.getStorageUnits().get(key).getStorageType().getType(),
+            Map<String, Object> customProps = 
getCustomProperties(entry.getValue().getCustomProperties().getProperties(), 
connectionProps.getQueryProperties());
+            result.add(new LocalDataQueryResultRow(storageUnitName,
+                    storageUnit.getStorageType().getType(),
                     connectionProps.getHostname(),
                     connectionProps.getPort(),
                     connectionProps.getCatalog(),
@@ -80,15 +78,6 @@ public final class ShowStorageUnitExecutor implements 
RQLExecutor<ShowStorageUni
         return result;
     }
     
-    private Map<String, Object> getCustomProps(final Map<String, Object> 
customProps, final Properties queryProps) {
-        Map<String, Object> result = new LinkedHashMap<>(customProps.size() + 
1, 1F);
-        result.putAll(customProps);
-        if (!queryProps.isEmpty()) {
-            result.put("queryProperties", queryProps);
-        }
-        return result;
-    }
-    
     private Map<String, DataSourcePoolProperties> 
getDataSourcePoolPropertiesMap(final ShardingSphereDatabase database, final 
ShowStorageUnitsStatement sqlStatement) {
         Map<String, DataSourcePoolProperties> result = new 
LinkedHashMap<>(database.getResourceMetaData().getStorageUnits().size(), 1F);
         Map<String, DataSourcePoolProperties> propsMap = 
database.getResourceMetaData().getStorageUnits().entrySet().stream()
@@ -113,11 +102,10 @@ public final class ShowStorageUnitExecutor implements 
RQLExecutor<ShowStorageUni
     
     private DataSourcePoolProperties getDataSourcePoolProperties(final 
Map<String, DataSourcePoolProperties> propsMap, final String storageUnitName,
                                                                  final 
DatabaseType databaseType, final DataSource dataSource) {
-        DataSourcePoolProperties result = 
getDataSourcePoolProperties(dataSource);
-        DialectDatabaseMetaData dialectDatabaseMetaData = new 
DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData();
-        if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && 
propsMap.containsKey(storageUnitName)) {
-            DataSourcePoolProperties unitDataSourcePoolProps = 
propsMap.get(storageUnitName);
-            for (Entry<String, Object> entry : 
unitDataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties().entrySet())
 {
+        DataSourcePoolProperties result = 
DataSourcePoolPropertiesCreator.create(
+                dataSource instanceof CatalogSwitchableDataSource ? 
((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource);
+        if (new 
DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().isInstanceConnectionAvailable()
 && propsMap.containsKey(storageUnitName)) {
+            for (Entry<String, Object> entry : 
propsMap.get(storageUnitName).getPoolPropertySynonyms().getStandardProperties().entrySet())
 {
                 if (null != entry.getValue()) {
                     
result.getPoolPropertySynonyms().getStandardProperties().put(entry.getKey(), 
entry.getValue());
                 }
@@ -126,10 +114,12 @@ public final class ShowStorageUnitExecutor implements 
RQLExecutor<ShowStorageUni
         return result;
     }
     
-    private DataSourcePoolProperties getDataSourcePoolProperties(final 
DataSource dataSource) {
-        return dataSource instanceof CatalogSwitchableDataSource
-                ? 
DataSourcePoolPropertiesCreator.create(((CatalogSwitchableDataSource) 
dataSource).getDataSource())
-                : DataSourcePoolPropertiesCreator.create(dataSource);
+    private Map<String, Object> getCustomProperties(final Map<String, Object> 
customProps, final Properties queryProps) {
+        Map<String, Object> result = new LinkedHashMap<>(customProps);
+        if (!queryProps.isEmpty()) {
+            result.put("queryProperties", queryProps);
+        }
+        return result;
     }
     
     private String getStandardProperty(final Map<String, Object> 
standardProps, final String key) {

Reply via email to