This is an automated email from the ASF dual-hosted git repository.

chengzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 565d5233497 Fix sonar. (#33909)
565d5233497 is described below

commit 565d52334975ce6f42d2d49ea0a8491eef756ce7
Author: Cong Hu <iamhuc...@gmail.com>
AuthorDate: Wed Dec 4 11:31:42 2024 +0800

    Fix sonar. (#33909)
---
 .../pipeline/scenario/migration/api/MigrationJobAPI.java | 16 +++++++---------
 1 file changed, 7 insertions(+), 9 deletions(-)

diff --git 
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
 
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
index b0a820defa2..04f69fd3eb5 100644
--- 
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
+++ 
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
@@ -156,14 +156,13 @@ public final class MigrationJobAPI implements 
TransmissionJobAPI {
         }
         result.setSources(configSources);
         ShardingSphereDatabase targetDatabase = 
PipelineContextManager.getProxyContext().getContextManager().getMetaDataContexts().getMetaData().getDatabase(param.getTargetDatabaseName());
-        Map<String, String> targetTableSchemaMap = 
buildTargetTableSchemaMap(sourceDataNodes);
-        PipelineDataSourceConfiguration targetPipelineDataSourceConfig = 
buildTargetPipelineDataSourceConfiguration(targetDatabase, 
targetTableSchemaMap);
+        PipelineDataSourceConfiguration targetPipelineDataSourceConfig = 
buildTargetPipelineDataSourceConfiguration(targetDatabase);
         
result.setTarget(buildYamlPipelineDataSourceConfiguration(targetPipelineDataSourceConfig.getType(),
 targetPipelineDataSourceConfig.getParameter()));
         
result.setTargetDatabaseType(targetPipelineDataSourceConfig.getDatabaseType().getType());
         List<JobDataNodeEntry> tablesFirstDataNodes = 
sourceDataNodes.entrySet().stream()
                 .map(entry -> new JobDataNodeEntry(entry.getKey(), 
entry.getValue().subList(0, 1))).collect(Collectors.toList());
         result.setTargetTableNames(new 
ArrayList<>(sourceDataNodes.keySet()).stream().sorted().collect(Collectors.toList()));
-        result.setTargetTableSchemaMap(targetTableSchemaMap);
+        
result.setTargetTableSchemaMap(buildTargetTableSchemaMap(sourceDataNodes));
         result.setTablesFirstDataNodes(new 
JobDataNodeLine(tablesFirstDataNodes).marshal());
         
result.setJobShardingDataNodes(JobDataNodeLineConvertUtils.convertDataNodesToLines(sourceDataNodes).stream().map(JobDataNodeLine::marshal).collect(Collectors.toList()));
         result.setJobId(PipelineJobIdUtils.marshal(new 
MigrationJobId(contextKey, result.getJobShardingDataNodes())));
@@ -177,27 +176,26 @@ public final class MigrationJobAPI implements 
TransmissionJobAPI {
         return result;
     }
     
-    private PipelineDataSourceConfiguration 
buildTargetPipelineDataSourceConfiguration(final ShardingSphereDatabase 
targetDatabase, final Map<String, String> targetTableSchemaMap) {
+    private PipelineDataSourceConfiguration 
buildTargetPipelineDataSourceConfiguration(final ShardingSphereDatabase 
targetDatabase) {
         Map<String, Map<String, Object>> targetPoolProps = new 
HashMap<>(targetDatabase.getResourceMetaData().getStorageUnits().size(), 1F);
         YamlDataSourceConfigurationSwapper dataSourceConfigSwapper = new 
YamlDataSourceConfigurationSwapper();
         for (Entry<String, StorageUnit> entry : 
targetDatabase.getResourceMetaData().getStorageUnits().entrySet()) {
             targetPoolProps.put(entry.getKey(), 
dataSourceConfigSwapper.swapToMap(entry.getValue().getDataSourcePoolProperties()));
         }
-        YamlRootConfiguration targetRootConfig = 
buildYamlRootConfiguration(targetDatabase.getName(), targetPoolProps, 
targetDatabase.getRuleMetaData().getConfigurations(), targetTableSchemaMap);
+        YamlRootConfiguration targetRootConfig = 
buildYamlRootConfiguration(targetDatabase.getName(), targetPoolProps, 
targetDatabase.getRuleMetaData().getConfigurations());
         return new 
ShardingSpherePipelineDataSourceConfiguration(targetRootConfig);
     }
     
-    private YamlRootConfiguration buildYamlRootConfiguration(final String 
databaseName, final Map<String, Map<String, Object>> yamlDataSources, final 
Collection<RuleConfiguration> rules,
-                                                             final Map<String, 
String> targetTableSchemaMap) {
+    private YamlRootConfiguration buildYamlRootConfiguration(final String 
databaseName, final Map<String, Map<String, Object>> yamlDataSources, final 
Collection<RuleConfiguration> rules) {
         ShardingSpherePreconditions.checkNotEmpty(rules, () -> new 
EmptyRuleException(databaseName));
         YamlRootConfiguration result = new YamlRootConfiguration();
         result.setDatabaseName(databaseName);
         result.setDataSources(yamlDataSources);
-        result.setRules(getYamlRuleConfigurations(rules, 
targetTableSchemaMap));
+        result.setRules(getYamlRuleConfigurations(rules));
         return result;
     }
     
-    private Collection<YamlRuleConfiguration> getYamlRuleConfigurations(final 
Collection<RuleConfiguration> rules, final Map<String, String> 
targetTableSchemaMap) {
+    private Collection<YamlRuleConfiguration> getYamlRuleConfigurations(final 
Collection<RuleConfiguration> rules) {
         Collection<YamlRuleConfiguration> ruleConfigurations = new 
YamlRuleConfigurationSwapperEngine().swapToYamlRuleConfigurations(rules);
         Optional<YamlSingleRuleConfiguration> originalSingleRuleConfig =
                 
ruleConfigurations.stream().filter(YamlSingleRuleConfiguration.class::isInstance).map(YamlSingleRuleConfiguration.class::cast).findFirst();

Reply via email to