This is an automated email from the ASF dual-hosted git repository.
sunnianjun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 139f6dbcad4 Fix sonar issue (#29204)
139f6dbcad4 is described below
commit 139f6dbcad48f265b6f656fd2c1aa2b2e0a165f5
Author: Liang Zhang <[email protected]>
AuthorDate: Sat Nov 25 19:24:42 2023 +0800
Fix sonar issue (#29204)
* Remove useless codes
* Remove useless codes
* Fix sonar issue
---
.../util/datetime/StandardDateTimeFormatter.java | 4 ++
.../migration/api/impl/MigrationJobOption.java | 55 ----------------------
2 files changed, 4 insertions(+), 55 deletions(-)
diff --git
a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/datetime/StandardDateTimeFormatter.java
b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/datetime/StandardDateTimeFormatter.java
index 96e1a5d7121..a1119fca678 100644
---
a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/datetime/StandardDateTimeFormatter.java
+++
b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/datetime/StandardDateTimeFormatter.java
@@ -17,11 +17,15 @@
package org.apache.shardingsphere.infra.util.datetime;
+import lombok.AccessLevel;
+import lombok.NoArgsConstructor;
+
import java.time.format.DateTimeFormatter;
/**
* Standard date time formatter.
*/
+@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class StandardDateTimeFormatter {
private static final DateTimeFormatter DATE_TIME_FORMATTER =
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
diff --git
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobOption.java
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobOption.java
index 6e820b4af1f..c3b8886940d 100644
---
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobOption.java
+++
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobOption.java
@@ -33,8 +33,6 @@ import
org.apache.shardingsphere.data.pipeline.common.datanode.DataNodeUtils;
import
org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeEntry;
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine;
import
org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLineConvertUtils;
-import
org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceFactory;
-import
org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper;
import
org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfiguration;
import
org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import
org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveQualifiedTable;
@@ -42,7 +40,6 @@ import
org.apache.shardingsphere.data.pipeline.common.metadata.loader.PipelineSc
import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobInfo;
import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobMetaData;
import
org.apache.shardingsphere.data.pipeline.common.spi.algorithm.JobRateLimitAlgorithm;
-import
org.apache.shardingsphere.data.pipeline.common.sqlbuilder.PipelineCommonSQLBuilder;
import
org.apache.shardingsphere.data.pipeline.common.util.ShardingColumnsExtractor;
import
org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext;
import
org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker;
@@ -54,7 +51,6 @@ import
org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.Increm
import
org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils;
import
org.apache.shardingsphere.data.pipeline.core.job.option.TransmissionJobOption;
-import
org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory;
import
org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobConfigurationManager;
import
org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager;
import
org.apache.shardingsphere.data.pipeline.core.job.service.TransmissionJobManager;
@@ -87,11 +83,7 @@ import
org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSour
import
org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine;
import
org.apache.shardingsphere.migration.distsql.statement.MigrateTableStatement;
import
org.apache.shardingsphere.migration.distsql.statement.pojo.SourceTargetEntry;
-import org.apache.shardingsphere.mode.manager.ContextManager;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
@@ -298,40 +290,6 @@ public final class MigrationJobOption implements
TransmissionJobOption {
return Optional.of("CONSISTENCY_CHECK");
}
- private void dropCheckJobs(final String jobId) {
- Collection<String> checkJobIds =
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineJobIdUtils.parseContextKey(jobId)).getJobFacade().getCheck().listCheckJobIds(jobId);
- if (checkJobIds.isEmpty()) {
- return;
- }
- for (String each : checkJobIds) {
- try {
- new PipelineJobManager(this).drop(each);
- // CHECKSTYLE:OFF
- } catch (final RuntimeException ex) {
- // CHECKSTYLE:ON
- log.info("drop check job failed, check job id: {}, error: {}",
each, ex.getMessage());
- }
- }
- }
-
- private void cleanTempTableOnRollback(final String jobId) throws
SQLException {
- MigrationJobConfiguration jobConfig = new
PipelineJobConfigurationManager(this).getJobConfiguration(jobId);
- PipelineCommonSQLBuilder pipelineSQLBuilder = new
PipelineCommonSQLBuilder(jobConfig.getTargetDatabaseType());
- TableAndSchemaNameMapper mapping = new
TableAndSchemaNameMapper(jobConfig.getTargetTableSchemaMap());
- try (
- PipelineDataSourceWrapper dataSource =
PipelineDataSourceFactory.newInstance(jobConfig.getTarget());
- Connection connection = dataSource.getConnection()) {
- for (String each : jobConfig.getTargetTableNames()) {
- String targetSchemaName = mapping.getSchemaName(each);
- String sql = pipelineSQLBuilder.buildDropSQL(targetSchemaName,
each);
- log.info("cleanTempTableOnRollback, targetSchemaName={},
targetTableName={}, sql={}", targetSchemaName, each, sql);
- try (Statement statement = connection.createStatement()) {
- statement.execute(sql);
- }
- }
- }
- }
-
/**
* Add migration source resources.
*
@@ -410,19 +368,6 @@ public final class MigrationJobOption implements
TransmissionJobOption {
return "";
}
- /**
- * Refresh table metadata.
- *
- * @param jobId job id
- * @param databaseName database name
- */
- public void refreshTableMetadata(final String jobId, final String
databaseName) {
- // TODO use origin database name now, wait reloadDatabaseMetaData fix
case-sensitive probelm
- ContextManager contextManager =
PipelineContextManager.getContext(PipelineJobIdUtils.parseContextKey(jobId)).getContextManager();
- ShardingSphereDatabase database =
contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName);
- contextManager.reloadDatabaseMetaData(database.getName());
- }
-
@Override
public Class<MigrationJob> getJobClass() {
return MigrationJob.class;