This is an automated email from the ASF dual-hosted git repository.

zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new a95f19f41c7 Add more test cases on ImportMetaDataExecutorTest (#38027)
a95f19f41c7 is described below

commit a95f19f41c7954281f887bc3874de3ef4cbc9246
Author: Liang Zhang <[email protected]>
AuthorDate: Fri Feb 13 00:24:20 2026 +0800

    Add more test cases on ImportMetaDataExecutorTest (#38027)
---
 .../imports/ImportMetaDataExecutorTest.java        | 84 +++++++++++++---------
 1 file changed, 52 insertions(+), 32 deletions(-)

diff --git 
a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/imports/ImportMetaDataExecutorTest.java
 
b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/imports/ImportMetaDataExecutorTest.java
index 43c10d4019c..54ce4b74312 100644
--- 
a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/imports/ImportMetaDataExecutorTest.java
+++ 
b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/imports/ImportMetaDataExecutorTest.java
@@ -17,11 +17,13 @@
 
 package 
org.apache.shardingsphere.proxy.backend.handler.distsql.ral.updatable.imports;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.shardingsphere.authority.rule.AuthorityRule;
 import 
org.apache.shardingsphere.authority.rule.builder.DefaultAuthorityRuleConfigurationBuilder;
 import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
 import 
org.apache.shardingsphere.database.exception.core.exception.syntax.database.DatabaseCreateExistsException;
+import 
org.apache.shardingsphere.distsql.handler.engine.update.DistSQLUpdateExecutor;
 import 
org.apache.shardingsphere.distsql.statement.type.ral.updatable.ImportMetaDataStatement;
 import org.apache.shardingsphere.globalclock.rule.GlobalClockRule;
 import 
org.apache.shardingsphere.globalclock.rule.builder.DefaultGlobalClockRuleConfigurationBuilder;
@@ -31,6 +33,7 @@ import 
org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey;
 import 
org.apache.shardingsphere.infra.config.props.temporary.TemporaryConfigurationProperties;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
+import org.apache.shardingsphere.infra.exception.generic.FileIOException;
 import 
org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storageunit.EmptyStorageUnitException;
 import org.apache.shardingsphere.infra.instance.ComputeNodeInstance;
 import org.apache.shardingsphere.infra.instance.ComputeNodeInstanceContext;
@@ -50,18 +53,22 @@ import 
org.apache.shardingsphere.mode.manager.ContextManager;
 import 
org.apache.shardingsphere.mode.manager.standalone.workerid.StandaloneWorkerIdGenerator;
 import org.apache.shardingsphere.mode.metadata.MetaDataContexts;
 import org.apache.shardingsphere.test.infra.fixture.jdbc.MockedDataSource;
-import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
 import javax.sql.DataSource;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Objects;
+import java.util.Optional;
 import java.util.Properties;
+import java.util.stream.Stream;
 import java.util.stream.Collectors;
 
 import static org.junit.jupiter.api.Assertions.assertNotNull;
@@ -83,43 +90,56 @@ class ImportMetaDataExecutorTest {
     
     private static final String EMPTY_DATABASE_NAME = "empty_metadata";
     
-    private final Map<String, String> featureMap = new HashMap<>(1, 1F);
+    private static final String EMPTY_METADATA_FILE_PATH = 
"/conf/import/empty-metadata.json";
     
-    @BeforeEach
-    void setup() {
-        featureMap.put(EMPTY_DATABASE_NAME, 
"/conf/import/empty-metadata.json");
-    }
+    private static final String NOT_EXIST_METADATA_FILE_PATH = 
"/__not_exist__/empty-metadata.json";
     
-    @Test
-    void assertImportEmptyMetaData() {
-        ImportMetaDataExecutor executor = new ImportMetaDataExecutor();
-        ContextManager contextManager = mock(ContextManager.class, 
RETURNS_DEEP_STUBS);
-        
when(contextManager.getMetaDataContexts().getMetaData().getTemporaryProps()).thenReturn(new
 TemporaryConfigurationProperties(new Properties()));
-        assertThrows(EmptyStorageUnitException.class, () -> 
executor.executeUpdate(
-                new ImportMetaDataStatement(null, 
Objects.requireNonNull(ImportMetaDataExecutorTest.class.getResource(featureMap.get(EMPTY_DATABASE_NAME))).getPath()),
 contextManager));
+    private final ImportMetaDataExecutor executor = (ImportMetaDataExecutor) 
TypedSPILoader.getService(DistSQLUpdateExecutor.class, 
ImportMetaDataStatement.class);
+    
+    @ParameterizedTest(name = "{0}")
+    @MethodSource("importFailureCases")
+    void assertExecuteUpdateFailure(final String caseName, final 
ImportMetaDataStatement sqlStatement, final ContextManager contextManager, 
final Class<? extends Throwable> expectedException) {
+        assertThrows(expectedException, () -> 
executor.executeUpdate(sqlStatement, contextManager));
     }
     
     @Test
-    void assertImportMetaDataFromJsonValue() {
-        ContextManager contextManager = mock(ContextManager.class, 
RETURNS_DEEP_STUBS);
-        
when(contextManager.getMetaDataContexts().getMetaData().getTemporaryProps()).thenReturn(new
 TemporaryConfigurationProperties(new Properties()));
-        ImportMetaDataExecutor executor = new ImportMetaDataExecutor();
-        executor.executeUpdate(new 
ImportMetaDataStatement(Base64.encodeBase64String(METADATA_VALUE.getBytes()), 
null), contextManager);
+    void assertExecuteUpdateFromJsonValue() {
+        ContextManager contextManager = 
createContextManagerWithTemporaryProps();
+        executor.executeUpdate(new 
ImportMetaDataStatement(Base64.encodeBase64String(METADATA_VALUE.getBytes(StandardCharsets.UTF_8)),
 null), contextManager);
         assertNotNull(contextManager.getDatabase("normal_db"));
     }
     
-    @Test
-    void assertImportExistedMetaDataFromFile() {
-        ImportMetaDataExecutor executor = new ImportMetaDataExecutor();
-        ContextManager contextManager = mockContextManager();
-        assertThrows(DatabaseCreateExistsException.class, () -> 
executor.executeUpdate(
-                new ImportMetaDataStatement(null, 
Objects.requireNonNull(ImportMetaDataExecutorTest.class.getResource(featureMap.get(EMPTY_DATABASE_NAME))).getPath()),
 contextManager));
+    private static Stream<Arguments> importFailureCases() {
+        return Stream.of(
+                Arguments.of("import empty metadata", 
createImportMetaDataStatement(EMPTY_METADATA_FILE_PATH), 
createContextManagerWithTemporaryProps(), EmptyStorageUnitException.class),
+                Arguments.of("import existed metadata", 
createImportMetaDataStatement(EMPTY_METADATA_FILE_PATH), 
createContextManagerWithExistedDatabase(),
+                        DatabaseCreateExistsException.class),
+                Arguments.of("import metadata from missing file", new 
ImportMetaDataStatement(null, NOT_EXIST_METADATA_FILE_PATH), 
mock(ContextManager.class), FileIOException.class),
+                Arguments.of("import metadata with empty file path in file 
branch", createStatementWithEmptyFilePathInFileBranch(), 
mock(ContextManager.class), JsonProcessingException.class));
+    }
+    
+    private static ImportMetaDataStatement createImportMetaDataStatement(final 
String filePath) {
+        URL url = ImportMetaDataExecutorTest.class.getResource(filePath);
+        assertNotNull(url);
+        return new ImportMetaDataStatement(null, url.getPath());
+    }
+    
+    private static ImportMetaDataStatement 
createStatementWithEmptyFilePathInFileBranch() {
+        ImportMetaDataStatement result = mock(ImportMetaDataStatement.class);
+        
when(result.getFilePath()).thenReturn(Optional.of("/tmp/mock-metadata.json"), 
Optional.empty());
+        when(result.getMetaDataValue()).thenReturn("");
+        return result;
+    }
+    
+    private static ContextManager createContextManagerWithTemporaryProps() {
+        ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS);
+        
when(result.getMetaDataContexts().getMetaData().getTemporaryProps()).thenReturn(new
 TemporaryConfigurationProperties(new Properties()));
+        return result;
     }
     
-    private ContextManager mockContextManager() {
+    private static ContextManager createContextManagerWithExistedDatabase() {
         ShardingSphereDatabase database = mockDatabase();
-        ShardingSphereMetaData metaData = new 
ShardingSphereMetaData(Collections.singleton(database),
-                new ResourceMetaData(Collections.emptyMap()),
+        ShardingSphereMetaData metaData = new 
ShardingSphereMetaData(Collections.singleton(database), new 
ResourceMetaData(Collections.emptyMap()),
                 new RuleMetaData(Arrays.asList(new AuthorityRule(new 
DefaultAuthorityRuleConfigurationBuilder().build()),
                         new GlobalClockRule(new 
DefaultGlobalClockRuleConfigurationBuilder().build()))),
                 new ConfigurationProperties(PropertiesBuilder.build(new 
Property(ConfigurationPropertyKey.SQL_SHOW.getKey(), "true"))));
@@ -133,7 +153,7 @@ class ImportMetaDataExecutorTest {
         return result;
     }
     
-    private ShardingSphereDatabase mockDatabase() {
+    private static ShardingSphereDatabase mockDatabase() {
         Map<String, StorageUnit> storageUnits = createStorageUnits();
         ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, 
RETURNS_DEEP_STUBS);
         
when(result.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class,
 "FIXTURE"));
@@ -144,7 +164,7 @@ class ImportMetaDataExecutorTest {
         return result;
     }
     
-    private Map<String, StorageUnit> createStorageUnits() {
+    private static Map<String, StorageUnit> createStorageUnits() {
         Map<String, DataSourcePoolProperties> propsMap = 
createDataSourceMap().entrySet().stream()
                 .collect(Collectors.toMap(Entry::getKey, entry -> 
DataSourcePoolPropertiesCreator.create(entry.getValue())));
         Map<String, StorageUnit> result = new LinkedHashMap<>(propsMap.size(), 
1F);
@@ -156,7 +176,7 @@ class ImportMetaDataExecutorTest {
         return result;
     }
     
-    private Map<String, DataSource> createDataSourceMap() {
+    private static Map<String, DataSource> createDataSourceMap() {
         Map<String, DataSource> result = new LinkedHashMap<>(2, 1F);
         result.put("ds_0", new MockedDataSource());
         result.put("ds_1", new MockedDataSource());

Reply via email to