This is an automated email from the ASF dual-hosted git repository.
zhonghongsheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new f23f6d9e934 Move "new EqualsBuilder()" into method for pipeline
(#36675)
f23f6d9e934 is described below
commit f23f6d9e9344b7cb142a884c9b1bc4ccf52a57bb
Author: Hongsheng Zhong <[email protected]>
AuthorDate: Tue Sep 23 21:01:40 2025 +0800
Move "new EqualsBuilder()" into method for pipeline (#36675)
---
.../table/calculator/RecordSingleTableInventoryCalculator.java | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java
index d1a29f02502..5e0d96f2d02 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java
@@ -63,8 +63,6 @@ public final class RecordSingleTableInventoryCalculator
extends AbstractStreamin
private final StreamingRangeType streamingRangeType;
- private final EqualsBuilder equalsBuilder = new EqualsBuilder();
-
public RecordSingleTableInventoryCalculator(final int chunkSize, final
StreamingRangeType streamingRangeType) {
this(chunkSize, DEFAULT_STREAMING_CHUNK_COUNT, streamingRangeType);
}
@@ -179,6 +177,7 @@ public final class RecordSingleTableInventoryCalculator
extends AbstractStreamin
if (null != previousRecord) {
duplicateRecords.add(previousRecord);
}
+ EqualsBuilder equalsBuilder = new EqualsBuilder();
while (resultSet.next()) {
ShardingSpherePreconditions.checkState(!isCanceling(), () -> new
PipelineJobCancelingException("Calculate chunk canceled, qualified table: %s",
param.getTable()));
Map<String, Object> record = readRecord(columnValueReaderEngine,
resultSet, resultSetMetaData);