This is an automated email from the ASF dual-hosted git repository.

duanzhengqiang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new eaaabab7f75 Fix test cases (#28642)
eaaabab7f75 is described below

commit eaaabab7f750b821f57fbf15d1ea495bad6845e5
Author: Liang Zhang <[email protected]>
AuthorDate: Thu Oct 5 16:26:07 2023 +0800

    Fix test cases (#28642)
    
    * Fix test cases
    
    * For code format
    
    * Fix test cases
---
 .../infra/metadata/database/resource/ResourceMetaData.java        | 8 ++++----
 .../sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java         | 5 +++--
 .../distsql/ral/queryable/ExportStorageNodesExecutorTest.java     | 4 ++--
 3 files changed, 9 insertions(+), 8 deletions(-)

diff --git 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
index 3c51850be04..f63d95511ad 100644
--- 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
+++ 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java
@@ -48,7 +48,7 @@ public final class ResourceMetaData {
     public ResourceMetaData(final Map<String, DataSource> dataSources) {
         this.dataSources = 
StorageNodeAggregator.aggregateDataSources(dataSources);
         Map<String, StorageNode> storageNodes = 
StorageUnitNodeMapUtils.fromDataSources(dataSources);
-        Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap = 
dataSources.entrySet().stream().collect(
+        Map<String, DataSourcePoolProperties> dataSourcePoolPropsMap = 
dataSources.entrySet().stream().collect(
                 Collectors.toMap(Entry::getKey, entry -> 
DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, 
currentValue) -> oldValue, LinkedHashMap::new));
         storageUnits = new LinkedHashMap<>();
         for (Entry<String, StorageNode> entry : storageNodes.entrySet()) {
@@ -56,12 +56,12 @@ public final class ResourceMetaData {
             if (!(dataSource instanceof CatalogSwitchableDataSource)) {
                 dataSource = new CatalogSwitchableDataSource(dataSource, 
entry.getValue().getCatalog(), entry.getValue().getUrl());
             }
-            storageUnits.put(entry.getKey(), new StorageUnit(null, 
entry.getValue(), dataSourcePoolPropertiesMap.get(entry.getKey()), dataSource));
+            storageUnits.put(entry.getKey(), new StorageUnit(null, 
entry.getValue(), dataSourcePoolPropsMap.get(entry.getKey()), dataSource));
         }
     }
     
     public ResourceMetaData(final String databaseName, final 
Map<StorageNodeName, DataSource> dataSources,
-                            final Map<String, StorageNode> storageNodes, final 
Map<String, DataSourcePoolProperties> propsMap) {
+                            final Map<String, StorageNode> storageNodes, final 
Map<String, DataSourcePoolProperties> dataSourcePoolPropsMap) {
         this.dataSources = dataSources;
         storageUnits = new LinkedHashMap<>();
         for (Entry<String, StorageNode> entry : storageNodes.entrySet()) {
@@ -69,7 +69,7 @@ public final class ResourceMetaData {
             if (!(dataSource instanceof CatalogSwitchableDataSource)) {
                 dataSource = new CatalogSwitchableDataSource(dataSource, 
entry.getValue().getCatalog(), entry.getValue().getUrl());
             }
-            storageUnits.put(entry.getKey(), new StorageUnit(databaseName, 
entry.getValue(), propsMap.get(entry.getKey()), dataSource));
+            storageUnits.put(entry.getKey(), new StorageUnit(databaseName, 
entry.getValue(), dataSourcePoolPropsMap.get(entry.getKey()), dataSource));
         }
     }
     
diff --git 
a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
 
b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
index 8fef2152363..1369779743e 100644
--- 
a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
+++ 
b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/execute/engine/jdbc/JDBCExecutorCallbackTest.java
@@ -44,6 +44,7 @@ import java.util.Optional;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -65,7 +66,7 @@ class JDBCExecutorCallbackTest {
     @Test
     void assertExecuteFailedAndProtocolTypeDifferentWithDatabaseType() throws 
SQLException {
         Object saneResult = new Object();
-        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
+        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, 
RETURNS_DEEP_STUBS);
         
when(resourceMetaData.getStorageUnits().get("ds").getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
         JDBCExecutorCallback<Object> callback =
                 new 
JDBCExecutorCallback<Object>(TypedSPILoader.getService(DatabaseType.class, 
"MySQL"), resourceMetaData, mock(SelectStatement.class), true) {
@@ -86,7 +87,7 @@ class JDBCExecutorCallbackTest {
     
     @Test
     void assertExecuteSQLExceptionOccurredAndProtocolTypeSameAsDatabaseType() {
-        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
+        ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, 
RETURNS_DEEP_STUBS);
         
when(resourceMetaData.getStorageUnits().get("ds").getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "PostgreSQL"));
         JDBCExecutorCallback<Object> callback =
                 new 
JDBCExecutorCallback<Object>(TypedSPILoader.getService(DatabaseType.class, 
"MySQL"), resourceMetaData, mock(SelectStatement.class), true) {
diff --git 
a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java
 
b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java
index 36bc633d8d6..4deba8a34a1 100644
--- 
a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java
+++ 
b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java
@@ -165,9 +165,9 @@ class ExportStorageNodesExecutorTest {
     }
     
     private Map<String, StorageUnit> createStorageUnits() {
-        StorageUnit storageUnit1 = mock(StorageUnit.class);
+        StorageUnit storageUnit1 = mock(StorageUnit.class, RETURNS_DEEP_STUBS);
         
when(storageUnit1.getDataSource()).thenReturn(createDataSource("ds_0"));
-        StorageUnit storageUnit2 = mock(StorageUnit.class);
+        StorageUnit storageUnit2 = mock(StorageUnit.class, RETURNS_DEEP_STUBS);
         
when(storageUnit2.getDataSource()).thenReturn(createDataSource("ds_2"));
         Map<String, StorageUnit> result = new LinkedHashMap<>(2, 1F);
         result.put("ds_0", storageUnit1);

Reply via email to