This is an automated email from the ASF dual-hosted git repository.
sunnianjun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 0f0ccfc7129 Remove
AbstractPipelineJobAPIImpl.buildPipelineJobMetaData() (#29016)
0f0ccfc7129 is described below
commit 0f0ccfc7129a4bdb14beb1de1de1dfffaf8530d1
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Nov 12 21:15:51 2023 +0800
Remove AbstractPipelineJobAPIImpl.buildPipelineJobMetaData() (#29016)
---
.../data/pipeline/common/pojo/PipelineJobMetaData.java | 6 ++++++
.../pipeline/core/job/service/impl/AbstractPipelineJobAPIImpl.java | 6 ------
.../apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java | 2 +-
.../data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java | 2 +-
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/PipelineJobMetaData.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/PipelineJobMetaData.java
index c02827aff36..48a678e9666 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/PipelineJobMetaData.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/PipelineJobMetaData.java
@@ -19,6 +19,7 @@ package org.apache.shardingsphere.data.pipeline.common.pojo;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
+import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO;
/**
* Pipeline job meta data.
@@ -38,4 +39,9 @@ public final class PipelineJobMetaData {
private final String stopTime;
private final String jobParameter;
+
+ public PipelineJobMetaData(final JobConfigurationPOJO jobConfigPOJO) {
+ this(jobConfigPOJO.getJobName(), !jobConfigPOJO.isDisabled(),
+ jobConfigPOJO.getShardingTotalCount(),
jobConfigPOJO.getProps().getProperty("create_time"),
jobConfigPOJO.getProps().getProperty("stop_time"),
jobConfigPOJO.getJobParameter());
+ }
}
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractPipelineJobAPIImpl.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractPipelineJobAPIImpl.java
index 5d52a1cfb66..5dbd46b1cb9 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractPipelineJobAPIImpl.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractPipelineJobAPIImpl.java
@@ -24,7 +24,6 @@ import
org.apache.shardingsphere.data.pipeline.common.config.job.yaml.YamlPipeli
import
org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey;
import
org.apache.shardingsphere.data.pipeline.common.listener.PipelineElasticJobListener;
import
org.apache.shardingsphere.data.pipeline.common.metadata.node.PipelineMetaDataNode;
-import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobMetaData;
import
org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI;
import
org.apache.shardingsphere.data.pipeline.common.util.PipelineDistributedBarrier;
import
org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobCreationWithInvalidShardingCountException;
@@ -51,11 +50,6 @@ public abstract class AbstractPipelineJobAPIImpl implements
PipelineJobAPI {
protected static final DateTimeFormatter DATE_TIME_FORMATTER =
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
- protected PipelineJobMetaData buildPipelineJobMetaData(final
JobConfigurationPOJO jobConfigPOJO) {
- return new PipelineJobMetaData(jobConfigPOJO.getJobName(),
!jobConfigPOJO.isDisabled(),
- jobConfigPOJO.getShardingTotalCount(),
jobConfigPOJO.getProps().getProperty("create_time"),
jobConfigPOJO.getProps().getProperty("stop_time"),
jobConfigPOJO.getJobParameter());
- }
-
@Override
public Optional<String> start(final PipelineJobConfiguration jobConfig) {
String jobId = jobConfig.getJobId();
diff --git
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java
index 88dabfe855e..482b05d21d5 100644
---
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java
+++
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java
@@ -302,7 +302,7 @@ public final class CDCJobAPI extends
AbstractInventoryIncrementalJobAPIImpl {
@Override
public TableBasedPipelineJobInfo getJobInfo(final String jobId) {
JobConfigurationPOJO jobConfigPOJO = getElasticJobConfigPOJO(jobId);
- PipelineJobMetaData jobMetaData =
buildPipelineJobMetaData(jobConfigPOJO);
+ PipelineJobMetaData jobMetaData = new
PipelineJobMetaData(jobConfigPOJO);
CDCJobConfiguration jobConfig = getJobConfiguration(jobConfigPOJO);
return new TableBasedPipelineJobInfo(jobMetaData,
jobConfig.getDatabaseName(), String.join(", ",
jobConfig.getSchemaTableNames()));
}
diff --git
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java
index 7477ebcf612..7c3cbcd2141 100644
---
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java
+++
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java
@@ -211,7 +211,7 @@ public final class MigrationJobAPI extends
AbstractInventoryIncrementalJobAPIImp
@Override
public TableBasedPipelineJobInfo getJobInfo(final String jobId) {
JobConfigurationPOJO jobConfigPOJO = getElasticJobConfigPOJO(jobId);
- PipelineJobMetaData jobMetaData =
buildPipelineJobMetaData(jobConfigPOJO);
+ PipelineJobMetaData jobMetaData = new
PipelineJobMetaData(jobConfigPOJO);
List<String> sourceTables = new LinkedList<>();
getJobConfiguration(jobConfigPOJO).getJobShardingDataNodes().forEach(each ->
each.getEntries().forEach(entry -> entry.getDataNodes()
.forEach(dataNode ->
sourceTables.add(DataNodeUtils.formatWithSchema(dataNode)))));