This is an automated email from the ASF dual-hosted git repository.

chengzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 9cbe22c5fb3 Fix sonar issues (#25715)
9cbe22c5fb3 is described below

commit 9cbe22c5fb34df4dbb7a8d3d47bc821553ec86ae
Author: Liang Zhang <[email protected]>
AuthorDate: Wed May 17 05:38:34 2023 +0800

    Fix sonar issues (#25715)
    
    * Fix sonar issues of CharDigestLikeEncryptAlgorithm
    
    * Fix sonar issues of PipelineContextKeyTest
    
    * Fix sonar issues of Rename this variable to not match a restricted 
identifier.
    
    * Fix sonar issues of PostgreSQLColumnValueReader
    
    * Fix sonar issues of Extract this nested ternary operation into an 
independent statement
    
    * Fix sonar issues of Extract this nested ternary operation into an 
independent statement
---
 .../like/CharDigestLikeEncryptAlgorithm.java       |  9 +++----
 .../shardingsphere/sharding/rule/ShardingRule.java | 12 +++++++--
 .../checker/ShardingTableRuleStatementChecker.java | 12 +++++++--
 .../database/metadata/url/JdbcUrlAppender.java     |  6 ++++-
 .../data/pipeline/api/ingest/record/Record.java    |  2 +-
 .../spi/sqlbuilder/PipelineSQLBuilder.java         |  4 +--
 .../pipeline/core/importer/DataSourceImporter.java | 10 ++++----
 .../sqlbuilder/AbstractPipelineSQLBuilder.java     |  4 +--
 .../core/context/PipelineContextKeyTest.java       |  7 +++--
 .../core/sqlbuilder/FixturePipelineSQLBuilder.java |  2 +-
 .../mysql/ingest/MySQLIncrementalDumper.java       | 30 +++++++++++-----------
 .../sqlbuilder/OpenGaussPipelineSQLBuilder.java    |  4 +--
 .../ingest/PostgreSQLColumnValueReader.java        | 21 ++++++++-------
 .../advanced/resultset/SQLFederationResultSet.java |  6 ++++-
 14 files changed, 74 insertions(+), 55 deletions(-)

diff --git 
a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java
 
b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java
index b5b4b5b2122..8e0d45b62e4 100644
--- 
a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java
+++ 
b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java
@@ -71,9 +71,8 @@ public final class CharDigestLikeEncryptAlgorithm implements 
LikeEncryptAlgorith
     
     private int createDelta(final Properties props) {
         if (props.containsKey(DELTA_KEY)) {
-            String delta = props.getProperty(DELTA_KEY);
             try {
-                return Integer.parseInt(delta);
+                return Integer.parseInt(props.getProperty(DELTA_KEY));
             } catch (final NumberFormatException ignored) {
                 throw new EncryptAlgorithmInitializationException(getType(), 
"delta can only be a decimal number");
             }
@@ -83,9 +82,8 @@ public final class CharDigestLikeEncryptAlgorithm implements 
LikeEncryptAlgorith
     
     private int createMask(final Properties props) {
         if (props.containsKey(MASK_KEY)) {
-            String mask = props.getProperty(MASK_KEY);
             try {
-                return Integer.parseInt(mask);
+                return Integer.parseInt(props.getProperty(MASK_KEY));
             } catch (final NumberFormatException ignored) {
                 throw new EncryptAlgorithmInitializationException(getType(), 
"mask can only be a decimal number");
             }
@@ -95,9 +93,8 @@ public final class CharDigestLikeEncryptAlgorithm implements 
LikeEncryptAlgorith
     
     private int createStart(final Properties props) {
         if (props.containsKey(START_KEY)) {
-            String start = props.getProperty(START_KEY);
             try {
-                return Integer.parseInt(start);
+                return Integer.parseInt(props.getProperty(START_KEY));
             } catch (final NumberFormatException ignored) {
                 throw new EncryptAlgorithmInitializationException(getType(), 
"start can only be a decimal number");
             }
diff --git 
a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
 
b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
index f6a5d8c1da7..ab72fb951cd 100644
--- 
a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
+++ 
b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
@@ -268,8 +268,8 @@ public final class ShardingRule implements DatabaseRule, 
DataNodeContainedRule,
     
     private Optional<String> getAlgorithmExpression(final TableRule tableRule, 
final boolean databaseAlgorithm, final BindingTableCheckedConfiguration 
checkedConfig) {
         ShardingStrategyConfiguration shardingStrategyConfig = 
databaseAlgorithm
-                ? null == tableRule.getDatabaseShardingStrategyConfig() ? 
checkedConfig.getDefaultDatabaseShardingStrategyConfig() : 
tableRule.getDatabaseShardingStrategyConfig()
-                : null == tableRule.getTableShardingStrategyConfig() ? 
checkedConfig.getDefaultTableShardingStrategyConfig() : 
tableRule.getTableShardingStrategyConfig();
+                ? getDatabaseShardingStrategyConfiguration(tableRule, 
checkedConfig.getDefaultDatabaseShardingStrategyConfig())
+                : getTableShardingStrategyConfiguration(tableRule, 
checkedConfig.getDefaultTableShardingStrategyConfig());
         ShardingAlgorithm shardingAlgorithm = 
checkedConfig.getShardingAlgorithms().get(shardingStrategyConfig.getShardingAlgorithmName());
         String dataNodePrefix = databaseAlgorithm ? 
tableRule.getDataSourceDataNode().getPrefix() : 
tableRule.getTableDataNode().getPrefix();
         String shardingColumn = getShardingColumn(shardingStrategyConfig, 
checkedConfig.getDefaultShardingColumn());
@@ -302,6 +302,10 @@ public final class ShardingRule implements DatabaseRule, 
DataNodeContainedRule,
      * @return database sharding strategy configuration
      */
     public ShardingStrategyConfiguration 
getDatabaseShardingStrategyConfiguration(final TableRule tableRule) {
+        return getDatabaseShardingStrategyConfiguration(tableRule, 
defaultDatabaseShardingStrategyConfig);
+    }
+    
+    private ShardingStrategyConfiguration 
getDatabaseShardingStrategyConfiguration(final TableRule tableRule, final 
ShardingStrategyConfiguration defaultDatabaseShardingStrategyConfig) {
         return null == tableRule.getDatabaseShardingStrategyConfig() ? 
defaultDatabaseShardingStrategyConfig : 
tableRule.getDatabaseShardingStrategyConfig();
     }
     
@@ -312,6 +316,10 @@ public final class ShardingRule implements DatabaseRule, 
DataNodeContainedRule,
      * @return table sharding strategy configuration
      */
     public ShardingStrategyConfiguration 
getTableShardingStrategyConfiguration(final TableRule tableRule) {
+        return getTableShardingStrategyConfiguration(tableRule, 
defaultTableShardingStrategyConfig);
+    }
+    
+    private ShardingStrategyConfiguration 
getTableShardingStrategyConfiguration(final TableRule tableRule, final 
ShardingStrategyConfiguration defaultTableShardingStrategyConfig) {
         return null == tableRule.getTableShardingStrategyConfig() ? 
defaultTableShardingStrategyConfig : tableRule.getTableShardingStrategyConfig();
     }
     
diff --git 
a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java
 
b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java
index 0d8a46c2de4..c717e031ea6 100644
--- 
a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java
+++ 
b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java
@@ -296,14 +296,22 @@ public final class ShardingTableRuleStatementChecker {
     
     private static Optional<String> getAlgorithmExpression(final TableRule 
tableRule, final boolean databaseAlgorithm, final 
BindingTableCheckedConfiguration checkedConfig) {
         ShardingStrategyConfiguration shardingStrategyConfig = 
databaseAlgorithm
-                ? null == tableRule.getDatabaseShardingStrategyConfig() ? 
checkedConfig.getDefaultDatabaseShardingStrategyConfig() : 
tableRule.getDatabaseShardingStrategyConfig()
-                : null == tableRule.getTableShardingStrategyConfig() ? 
checkedConfig.getDefaultTableShardingStrategyConfig() : 
tableRule.getTableShardingStrategyConfig();
+                ? getDatabaseShardingStrategyConfiguration(tableRule, 
checkedConfig)
+                : getTableShardingStrategyConfiguration(tableRule, 
checkedConfig);
         ShardingAlgorithm shardingAlgorithm = 
checkedConfig.getShardingAlgorithms().get(shardingStrategyConfig.getShardingAlgorithmName());
         String dataNodePrefix = databaseAlgorithm ? 
tableRule.getDataSourceDataNode().getPrefix() : 
tableRule.getTableDataNode().getPrefix();
         String shardingColumn = getShardingColumn(shardingStrategyConfig, 
checkedConfig.getDefaultShardingColumn());
         return null == shardingAlgorithm ? Optional.empty() : 
shardingAlgorithm.getAlgorithmStructure(dataNodePrefix, shardingColumn);
     }
     
+    private static ShardingStrategyConfiguration 
getDatabaseShardingStrategyConfiguration(final TableRule tableRule, final 
BindingTableCheckedConfiguration checkedConfig) {
+        return null == tableRule.getDatabaseShardingStrategyConfig() ? 
checkedConfig.getDefaultDatabaseShardingStrategyConfig() : 
tableRule.getDatabaseShardingStrategyConfig();
+    }
+    
+    private static ShardingStrategyConfiguration 
getTableShardingStrategyConfiguration(final TableRule tableRule, final 
BindingTableCheckedConfiguration checkedConfig) {
+        return null == tableRule.getTableShardingStrategyConfig() ? 
checkedConfig.getDefaultTableShardingStrategyConfig() : 
tableRule.getTableShardingStrategyConfig();
+    }
+    
     private static String getShardingColumn(final 
ShardingStrategyConfiguration shardingStrategyConfig, final String 
defaultShardingColumn) {
         String shardingColumn = defaultShardingColumn;
         if (shardingStrategyConfig instanceof 
ComplexShardingStrategyConfiguration) {
diff --git 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/database/metadata/url/JdbcUrlAppender.java
 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/database/metadata/url/JdbcUrlAppender.java
index 3c5e32f522c..3019730db25 100644
--- 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/database/metadata/url/JdbcUrlAppender.java
+++ 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/database/metadata/url/JdbcUrlAppender.java
@@ -38,7 +38,7 @@ public final class JdbcUrlAppender {
         Properties currentQueryProps = 
DatabaseTypeEngine.getDatabaseType(jdbcUrl).getDataSourceMetaData(jdbcUrl, 
null).getQueryProperties();
         return hasConflictedQueryProperties(currentQueryProps, queryProps)
                 ? concat(jdbcUrl.substring(0, jdbcUrl.indexOf('?') + 1), 
getMergedProperties(currentQueryProps, queryProps))
-                : concat(jdbcUrl + (currentQueryProps.isEmpty() ? "?" : "&"), 
queryProps);
+                : concat(jdbcUrl + getURLDelimiter(currentQueryProps), 
queryProps);
     }
     
     private boolean hasConflictedQueryProperties(final Properties 
currentQueryProps, final Properties toBeAppendedQueryProps) {
@@ -64,4 +64,8 @@ public final class JdbcUrlAppender {
         result.deleteCharAt(result.length() - 1);
         return result.toString();
     }
+    
+    private String getURLDelimiter(final Properties currentQueryProps) {
+        return currentQueryProps.isEmpty() ? "?" : "&";
+    }
 }
diff --git 
a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/ingest/record/Record.java
 
b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/ingest/record/Record.java
index 18cafe5b540..a455b83bcfd 100644
--- 
a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/ingest/record/Record.java
+++ 
b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/ingest/record/Record.java
@@ -25,7 +25,7 @@ import lombok.ToString;
 import 
org.apache.shardingsphere.data.pipeline.api.ingest.position.IngestPosition;
 
 /**
- * Record interface.
+ * Record.
  */
 @RequiredArgsConstructor(access = AccessLevel.PROTECTED)
 @Getter
diff --git 
a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/spi/sqlbuilder/PipelineSQLBuilder.java
 
b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/spi/sqlbuilder/PipelineSQLBuilder.java
index 1c4714efe77..d0610dcb3be 100644
--- 
a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/spi/sqlbuilder/PipelineSQLBuilder.java
+++ 
b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/spi/sqlbuilder/PipelineSQLBuilder.java
@@ -104,11 +104,11 @@ public interface PipelineSQLBuilder extends TypedSPI {
     /**
      * Extract updated columns.
      *
-     * @param record data record
+     * @param dataRecord data record
      * @return filtered columns
      */
     // TODO Consider remove extractUpdatedColumns. openGauss has special impl 
currently
-    List<Column> extractUpdatedColumns(DataRecord record);
+    List<Column> extractUpdatedColumns(DataRecord dataRecord);
     
     /**
      * Build delete SQL.
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/DataSourceImporter.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/DataSourceImporter.java
index 5fc458ea99b..d318dfefd7a 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/DataSourceImporter.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/DataSourceImporter.java
@@ -235,11 +235,11 @@ public final class DataSourceImporter extends 
AbstractLifecycleExecutor implemen
         }
     }
     
-    private void executeUpdate(final Connection connection, final DataRecord 
record) throws SQLException {
-        Set<String> shardingColumns = 
importerConfig.getShardingColumns(record.getTableName());
-        List<Column> conditionColumns = 
RecordUtils.extractConditionColumns(record, shardingColumns);
-        List<Column> updatedColumns = 
pipelineSqlBuilder.extractUpdatedColumns(record);
-        String updateSql = 
pipelineSqlBuilder.buildUpdateSQL(getSchemaName(record.getTableName()), record, 
conditionColumns);
+    private void executeUpdate(final Connection connection, final DataRecord 
dataRecord) throws SQLException {
+        Set<String> shardingColumns = 
importerConfig.getShardingColumns(dataRecord.getTableName());
+        List<Column> conditionColumns = 
RecordUtils.extractConditionColumns(dataRecord, shardingColumns);
+        List<Column> updatedColumns = 
pipelineSqlBuilder.extractUpdatedColumns(dataRecord);
+        String updateSql = 
pipelineSqlBuilder.buildUpdateSQL(getSchemaName(dataRecord.getTableName()), 
dataRecord, conditionColumns);
         try (PreparedStatement preparedStatement = 
connection.prepareStatement(updateSql)) {
             updateStatement.set(preparedStatement);
             for (int i = 0; i < updatedColumns.size(); i++) {
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/AbstractPipelineSQLBuilder.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/AbstractPipelineSQLBuilder.java
index 56bb8588b6a..6494932f763 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/AbstractPipelineSQLBuilder.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/AbstractPipelineSQLBuilder.java
@@ -154,8 +154,8 @@ public abstract class AbstractPipelineSQLBuilder implements 
PipelineSQLBuilder {
     }
     
     @Override
-    public List<Column> extractUpdatedColumns(final DataRecord record) {
-        return new ArrayList<>(RecordUtils.extractUpdatedColumns(record));
+    public List<Column> extractUpdatedColumns(final DataRecord dataRecord) {
+        return new ArrayList<>(RecordUtils.extractUpdatedColumns(dataRecord));
     }
     
     @Override
diff --git 
a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineContextKeyTest.java
 
b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineContextKeyTest.java
index 1cfccee3bb6..e4a0646e0cf 100644
--- 
a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineContextKeyTest.java
+++ 
b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineContextKeyTest.java
@@ -21,9 +21,8 @@ import 
org.apache.shardingsphere.infra.instance.metadata.InstanceType;
 import org.junit.jupiter.api.Test;
 
 import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
 import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
 
 class PipelineContextKeyTest {
     
@@ -39,7 +38,7 @@ class PipelineContextKeyTest {
     void assertHashCodeEqualsForJdbcMode() {
         PipelineContextKey contextKey1 = PipelineContextKey.build("logic_db", 
InstanceType.JDBC);
         PipelineContextKey contextKey2 = 
PipelineContextKey.build("sharding_db", InstanceType.JDBC);
-        assertTrue(contextKey1.hashCode() != contextKey2.hashCode());
-        assertNotEquals(contextKey1, contextKey2);
+        assertThat(contextKey1.hashCode(), not(contextKey2.hashCode()));
+        assertThat(contextKey1, not(contextKey2));
     }
 }
diff --git 
a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/FixturePipelineSQLBuilder.java
 
b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/FixturePipelineSQLBuilder.java
index 70d8492a26d..47eaa986d4d 100644
--- 
a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/FixturePipelineSQLBuilder.java
+++ 
b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/sqlbuilder/FixturePipelineSQLBuilder.java
@@ -54,7 +54,7 @@ public final class FixturePipelineSQLBuilder implements 
PipelineSQLBuilder {
     }
     
     @Override
-    public List<Column> extractUpdatedColumns(final DataRecord record) {
+    public List<Column> extractUpdatedColumns(final DataRecord dataRecord) {
         return Collections.emptyList();
     }
     
diff --git 
a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java
 
b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java
index 0dcd5656302..772060603ca 100644
--- 
a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java
+++ 
b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java
@@ -126,9 +126,9 @@ public final class MySQLIncrementalDumper extends 
AbstractLifecycleExecutor impl
     }
     
     private void createPlaceholderRecord(final AbstractBinlogEvent event) {
-        PlaceholderRecord record = new PlaceholderRecord(new 
BinlogPosition(event.getFileName(), event.getPosition(), event.getServerId()));
-        record.setCommitTime(event.getTimestamp() * 1000L);
-        channel.pushRecord(record);
+        PlaceholderRecord placeholderRecord = new PlaceholderRecord(new 
BinlogPosition(event.getFileName(), event.getPosition(), event.getServerId()));
+        placeholderRecord.setCommitTime(event.getTimestamp() * 1000L);
+        channel.pushRecord(placeholderRecord);
     }
     
     private PipelineTableMetaData getPipelineTableMetaData(final String 
actualTableName) {
@@ -138,16 +138,16 @@ public final class MySQLIncrementalDumper extends 
AbstractLifecycleExecutor impl
     private void handleWriteRowsEvent(final WriteRowsEvent event, final 
PipelineTableMetaData tableMetaData) {
         Set<ColumnName> columnNameSet = 
dumperConfig.getColumnNameSet(event.getTableName()).orElse(null);
         for (Serializable[] each : event.getAfterRows()) {
-            DataRecord record = createDataRecord(event, each.length);
-            record.setType(IngestDataChangeType.INSERT);
+            DataRecord dataRecord = createDataRecord(event, each.length);
+            dataRecord.setType(IngestDataChangeType.INSERT);
             for (int i = 0; i < each.length; i++) {
                 PipelineColumnMetaData columnMetaData = 
tableMetaData.getColumnMetaData(i + 1);
                 if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) 
{
                     continue;
                 }
-                record.addColumn(new Column(columnMetaData.getName(), 
handleValue(columnMetaData, each[i]), true, columnMetaData.isUniqueKey()));
+                dataRecord.addColumn(new Column(columnMetaData.getName(), 
handleValue(columnMetaData, each[i]), true, columnMetaData.isUniqueKey()));
             }
-            channel.pushRecord(record);
+            channel.pushRecord(dataRecord);
         }
     }
     
@@ -160,8 +160,8 @@ public final class MySQLIncrementalDumper extends 
AbstractLifecycleExecutor impl
         for (int i = 0; i < event.getBeforeRows().size(); i++) {
             Serializable[] beforeValues = event.getBeforeRows().get(i);
             Serializable[] afterValues = event.getAfterRows().get(i);
-            DataRecord record = createDataRecord(event, beforeValues.length);
-            record.setType(IngestDataChangeType.UPDATE);
+            DataRecord dataRecord = createDataRecord(event, 
beforeValues.length);
+            dataRecord.setType(IngestDataChangeType.UPDATE);
             for (int j = 0; j < beforeValues.length; j++) {
                 Serializable oldValue = beforeValues[j];
                 Serializable newValue = afterValues[j];
@@ -170,27 +170,27 @@ public final class MySQLIncrementalDumper extends 
AbstractLifecycleExecutor impl
                 if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) 
{
                     continue;
                 }
-                record.addColumn(new Column(columnMetaData.getName(),
+                dataRecord.addColumn(new Column(columnMetaData.getName(),
                         handleValue(columnMetaData, oldValue),
                         handleValue(columnMetaData, newValue), updated, 
columnMetaData.isUniqueKey()));
             }
-            channel.pushRecord(record);
+            channel.pushRecord(dataRecord);
         }
     }
     
     private void handleDeleteRowsEvent(final DeleteRowsEvent event, final 
PipelineTableMetaData tableMetaData) {
         Set<ColumnName> columnNameSet = 
dumperConfig.getColumnNameSet(event.getTableName()).orElse(null);
         for (Serializable[] each : event.getBeforeRows()) {
-            DataRecord record = createDataRecord(event, each.length);
-            record.setType(IngestDataChangeType.DELETE);
+            DataRecord dataRecord = createDataRecord(event, each.length);
+            dataRecord.setType(IngestDataChangeType.DELETE);
             for (int i = 0, length = each.length; i < length; i++) {
                 PipelineColumnMetaData columnMetaData = 
tableMetaData.getColumnMetaData(i + 1);
                 if (isColumnUnneeded(columnNameSet, columnMetaData.getName())) 
{
                     continue;
                 }
-                record.addColumn(new Column(columnMetaData.getName(), 
handleValue(columnMetaData, each[i]), null, true, 
columnMetaData.isUniqueKey()));
+                dataRecord.addColumn(new Column(columnMetaData.getName(), 
handleValue(columnMetaData, each[i]), null, true, 
columnMetaData.isUniqueKey()));
             }
-            channel.pushRecord(record);
+            channel.pushRecord(dataRecord);
         }
     }
     
diff --git 
a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java
 
b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java
index b60aae911a9..7fbf9521a83 100644
--- 
a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java
+++ 
b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java
@@ -73,8 +73,8 @@ public final class OpenGaussPipelineSQLBuilder extends 
AbstractPipelineSQLBuilde
     }
     
     @Override
-    public List<Column> extractUpdatedColumns(final DataRecord record) {
-        return record.getColumns().stream().filter(each -> 
!(each.isUniqueKey())).collect(Collectors.toList());
+    public List<Column> extractUpdatedColumns(final DataRecord dataRecord) {
+        return dataRecord.getColumns().stream().filter(each -> 
!(each.isUniqueKey())).collect(Collectors.toList());
     }
     
     private String buildConflictSQL(final DataRecord dataRecord) {
diff --git 
a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/PostgreSQLColumnValueReader.java
 
b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/PostgreSQLColumnValueReader.java
index 0cfdbdb6cdb..8c9b07414c9 100644
--- 
a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/PostgreSQLColumnValueReader.java
+++ 
b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/PostgreSQLColumnValueReader.java
@@ -39,14 +39,16 @@ public final class PostgreSQLColumnValueReader extends 
AbstractColumnValueReader
         if (isPgMoneyType(metaData, columnIndex)) {
             return resultSet.getBigDecimal(columnIndex);
         }
-        if (isPgBitType(metaData, columnIndex)) {
-            PGobject result = new PGobject();
-            result.setType("bit");
-            Object bitValue = resultSet.getObject(columnIndex);
-            result.setValue(null == bitValue ? null : (Boolean) bitValue ? "1" 
: "0");
-            return result;
+        if (!isPgBitType(metaData, columnIndex)) {
+            return defaultDoReadValue(resultSet, metaData, columnIndex);
         }
-        return super.defaultDoReadValue(resultSet, metaData, columnIndex);
+        PGobject result = new PGobject();
+        result.setType("bit");
+        Object bitValue = resultSet.getObject(columnIndex);
+        if (null != bitValue) {
+            result.setValue((Boolean) bitValue ? "1" : "0");
+        }
+        return result;
     }
     
     private boolean isPgMoneyType(final ResultSetMetaData resultSetMetaData, 
final int index) throws SQLException {
@@ -54,10 +56,7 @@ public final class PostgreSQLColumnValueReader extends 
AbstractColumnValueReader
     }
     
     private boolean isPgBitType(final ResultSetMetaData resultSetMetaData, 
final int index) throws SQLException {
-        if (Types.BIT == resultSetMetaData.getColumnType(index)) {
-            return 
PG_BIT_TYPE.equalsIgnoreCase(resultSetMetaData.getColumnTypeName(index));
-        }
-        return false;
+        return Types.BIT == resultSetMetaData.getColumnType(index) && 
PG_BIT_TYPE.equalsIgnoreCase(resultSetMetaData.getColumnTypeName(index));
     }
     
     @Override
diff --git 
a/kernel/sql-federation/executor/advanced/src/main/java/org/apache/shardingsphere/sqlfederation/advanced/resultset/SQLFederationResultSet.java
 
b/kernel/sql-federation/executor/advanced/src/main/java/org/apache/shardingsphere/sqlfederation/advanced/resultset/SQLFederationResultSet.java
index 689f83d207f..18bf1ca94e0 100644
--- 
a/kernel/sql-federation/executor/advanced/src/main/java/org/apache/shardingsphere/sqlfederation/advanced/resultset/SQLFederationResultSet.java
+++ 
b/kernel/sql-federation/executor/advanced/src/main/java/org/apache/shardingsphere/sqlfederation/advanced/resultset/SQLFederationResultSet.java
@@ -103,7 +103,11 @@ public final class SQLFederationResultSet extends 
AbstractUnsupportedOperationRe
     @Override
     public boolean next() {
         boolean result = enumerator.moveNext();
-        currentRows = result ? enumerator.current().getClass().isArray() ? 
(Object[]) enumerator.current() : new Object[]{enumerator.current()} : new 
Object[]{};
+        if (result) {
+            currentRows = enumerator.current().getClass().isArray() ? 
(Object[]) enumerator.current() : new Object[]{enumerator.current()};
+        } else {
+            currentRows = new Object[]{};
+        }
         return result;
     }
     

Reply via email to