This is an automated email from the ASF dual-hosted git repository.
zhaojinchao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 4cf296f71e9 Refactor method name form config to configuration (#30878)
4cf296f71e9 is described below
commit 4cf296f71e97d6944ee2b60dd557550d0de19147
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Apr 14 00:19:58 2024 +0800
Refactor method name form config to configuration (#30878)
---
.../EncryptRuleConfigurationCheckerTest.java | 12 +++----
.../update/CreateEncryptRuleExecutorTest.java | 8 ++---
.../handler/update/AlterMaskRuleExecutorTest.java | 8 ++---
.../handler/update/CreateMaskRuleExecutorTest.java | 8 ++---
...writeSplittingRuleConfigurationCheckerTest.java | 16 ++++-----
.../shardingsphere/sharding/rule/ShardingRule.java | 8 ++---
.../ShardingRoutingEngineFixtureBuilder.java | 40 +++++++++++-----------
.../sharding/rule/ShardingRuleTest.java | 2 +-
...eateShardingTableReferenceRuleExecutorTest.java | 6 ++--
.../state/datasource/DataSourceStateManager.java | 2 +-
.../core/context/PipelineProcessContext.java | 2 +-
.../core/context/TransmissionProcessContext.java | 8 ++---
.../preparer/inventory/InventoryTaskSplitter.java | 6 ++--
.../shardingsphere/data/pipeline/cdc/CDCJob.java | 2 +-
.../pipeline/cdc/core/prepare/CDCJobPreparer.java | 4 +--
.../context/ConsistencyCheckProcessContext.java | 2 +-
.../pipeline/scenario/migration/MigrationJob.java | 2 +-
.../migration/preparer/MigrationJobPreparer.java | 2 +-
.../cluster/zookeeper/ZookeeperRepositoryTest.java | 2 +-
.../ral/updatable/ImportMetaDataExecutor.java | 4 +--
20 files changed, 72 insertions(+), 72 deletions(-)
diff --git
a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java
b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java
index a8d2e09a9bb..555b3b608bb 100644
---
a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java
+++
b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java
@@ -57,12 +57,12 @@ class EncryptRuleConfigurationCheckerTest {
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void assertCheckWhenConfigInvalidCipherColumn() {
- EncryptRuleConfiguration config = createInvalidCipherColumnConfig();
+ EncryptRuleConfiguration config =
createInvalidCipherColumnConfiguration();
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
assertThrows(UnregisteredAlgorithmException.class, () ->
checker.check("test", config, Collections.emptyMap(), Collections.emptyList()));
}
- private EncryptRuleConfiguration createInvalidCipherColumnConfig() {
+ private EncryptRuleConfiguration createInvalidCipherColumnConfiguration() {
EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class);
when(result.getEncryptors()).thenReturn(Collections.emptyMap());
EncryptColumnRuleConfiguration columnRuleConfig = new
EncryptColumnRuleConfiguration("user_id", new
EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor"));
@@ -74,12 +74,12 @@ class EncryptRuleConfigurationCheckerTest {
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void assertCheckWhenConfigInvalidAssistColumn() {
- EncryptRuleConfiguration config = createInvalidAssistColumnConfig();
+ EncryptRuleConfiguration config =
createInvalidAssistColumnConfiguration();
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
assertThrows(UnregisteredAlgorithmException.class, () ->
checker.check("test", config, Collections.emptyMap(), Collections.emptyList()));
}
- private EncryptRuleConfiguration createInvalidAssistColumnConfig() {
+ private EncryptRuleConfiguration createInvalidAssistColumnConfiguration() {
EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class);
when(result.getEncryptors()).thenReturn(Collections.emptyMap());
EncryptColumnRuleConfiguration columnRuleConfig = new
EncryptColumnRuleConfiguration("user_id", new
EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor"));
@@ -92,12 +92,12 @@ class EncryptRuleConfigurationCheckerTest {
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void assertCheckWhenConfigInvalidLikeColumn() {
- EncryptRuleConfiguration config = createInvalidLikeColumnConfig();
+ EncryptRuleConfiguration config =
createInvalidLikeColumnConfiguration();
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
assertThrows(UnregisteredAlgorithmException.class, () ->
checker.check("test", config, Collections.emptyMap(), Collections.emptyList()));
}
- private EncryptRuleConfiguration createInvalidLikeColumnConfig() {
+ private EncryptRuleConfiguration createInvalidLikeColumnConfiguration() {
EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class);
when(result.getEncryptors()).thenReturn(Collections.emptyMap());
EncryptColumnRuleConfiguration columnRuleConfig = new
EncryptColumnRuleConfiguration("user_id", new
EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor"));
diff --git
a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleExecutorTest.java
b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleExecutorTest.java
index 04f7a428501..4a7ea9c0a01 100644
---
a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleExecutorTest.java
+++
b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleExecutorTest.java
@@ -59,7 +59,7 @@ class CreateEncryptRuleExecutorTest {
@Test
void assertCheckSQLStatementWithDuplicateEncryptRule() {
EncryptRule rule = mock(EncryptRule.class);
- when(rule.getConfiguration()).thenReturn(getCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(DuplicateRuleException.class, () ->
executor.checkBeforeUpdate(createSQLStatement(false, "MD5")));
}
@@ -72,7 +72,7 @@ class CreateEncryptRuleExecutorTest {
@Test
void assertCheckSQLStatementWithConflictColumnNames() {
EncryptRule rule = mock(EncryptRule.class);
- when(rule.getConfiguration()).thenReturn(getCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(InvalidRuleConfigurationException.class, () ->
executor.checkBeforeUpdate(createConflictColumnNameSQLStatement()));
}
@@ -122,7 +122,7 @@ class CreateEncryptRuleExecutorTest {
return new CreateEncryptRuleStatement(false,
Collections.singleton(ruleSegment));
}
- private EncryptRuleConfiguration getCurrentRuleConfig() {
+ private EncryptRuleConfiguration getCurrentRuleConfiguration() {
Collection<EncryptTableRuleConfiguration> rules = new LinkedList<>();
rules.add(new EncryptTableRuleConfiguration("t_user",
Collections.emptyList()));
rules.add(new EncryptTableRuleConfiguration("t_order",
Collections.emptyList()));
@@ -133,7 +133,7 @@ class CreateEncryptRuleExecutorTest {
void assertCreateAESEncryptRuleWithPropertiesNotExists() {
CreateEncryptRuleStatement sqlStatement =
createWrongAESEncryptorSQLStatement();
EncryptRule rule = mock(EncryptRule.class);
- when(rule.getConfiguration()).thenReturn(getCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(AlgorithmInitializationException.class, () ->
executor.checkBeforeUpdate(sqlStatement));
}
diff --git
a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/AlterMaskRuleExecutorTest.java
b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/AlterMaskRuleExecutorTest.java
index fdf00affbeb..b00d531e2dc 100644
---
a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/AlterMaskRuleExecutorTest.java
+++
b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/AlterMaskRuleExecutorTest.java
@@ -63,7 +63,7 @@ class AlterMaskRuleExecutorTest {
@Test
void assertCheckBeforeUpdateWithoutToBeAlteredAlgorithm() {
MaskRule rule = mock(MaskRule.class);
- when(rule.getConfiguration()).thenReturn(createCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(createCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(MissingRequiredRuleException.class, () ->
executor.checkBeforeUpdate(createSQLStatement("INVALID_TYPE")));
}
@@ -74,14 +74,14 @@ class AlterMaskRuleExecutorTest {
MaskRuleSegment ruleSegment = new MaskRuleSegment("t_mask",
Collections.singleton(columnSegment));
AlterMaskRuleStatement statement = new
AlterMaskRuleStatement(Collections.singleton(ruleSegment));
MaskRule rule = mock(MaskRule.class);
- when(rule.getConfiguration()).thenReturn(createCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(createCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(MissingRequiredRuleException.class, () ->
executor.checkBeforeUpdate(statement));
}
@Test
void assertUpdate() {
- MaskRuleConfiguration currentRuleConfig = createCurrentRuleConfig();
+ MaskRuleConfiguration currentRuleConfig =
createCurrentRuleConfiguration();
MaskColumnSegment columnSegment = new MaskColumnSegment("order_id",
new AlgorithmSegment("MD5", new Properties()));
MaskRuleSegment ruleSegment = new MaskRuleSegment("t_order",
Collections.singleton(columnSegment));
@@ -104,7 +104,7 @@ class AlterMaskRuleExecutorTest {
return new AlterMaskRuleStatement(Collections.singleton(ruleSegment));
}
- private MaskRuleConfiguration createCurrentRuleConfig() {
+ private MaskRuleConfiguration createCurrentRuleConfiguration() {
Collection<MaskTableRuleConfiguration> tableRuleConfigs = new
LinkedList<>();
tableRuleConfigs.add(new MaskTableRuleConfiguration("t_order",
Collections.emptyList()));
return new MaskRuleConfiguration(tableRuleConfigs, new HashMap<>());
diff --git
a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/CreateMaskRuleExecutorTest.java
b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/CreateMaskRuleExecutorTest.java
index 30cfa6de3f8..f614b0a030e 100644
---
a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/CreateMaskRuleExecutorTest.java
+++
b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/update/CreateMaskRuleExecutorTest.java
@@ -56,7 +56,7 @@ class CreateMaskRuleExecutorTest {
@Test
void assertCheckSQLStatementWithDuplicateMaskRule() {
MaskRule rule = mock(MaskRule.class);
- when(rule.getConfiguration()).thenReturn(getCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(DuplicateRuleException.class, () ->
executor.checkBeforeUpdate(createDuplicatedSQLStatement(false, "MD5")));
}
@@ -68,7 +68,7 @@ class CreateMaskRuleExecutorTest {
@Test
void assertCreateMaskRule() {
- MaskRuleConfiguration currentRuleConfig = getCurrentRuleConfig();
+ MaskRuleConfiguration currentRuleConfig =
getCurrentRuleConfiguration();
CreateMaskRuleStatement sqlStatement = createSQLStatement(false,
"MD5");
MaskRule rule = mock(MaskRule.class);
when(rule.getConfiguration()).thenReturn(currentRuleConfig);
@@ -82,7 +82,7 @@ class CreateMaskRuleExecutorTest {
@Test
void assertCreateMaskRuleWithIfNotExists() {
- MaskRuleConfiguration currentRuleConfig = getCurrentRuleConfig();
+ MaskRuleConfiguration currentRuleConfig =
getCurrentRuleConfiguration();
MaskRule rule = mock(MaskRule.class);
CreateMaskRuleStatement sqlStatement = createSQLStatement(true, "MD5");
when(rule.getConfiguration()).thenReturn(currentRuleConfig);
@@ -116,7 +116,7 @@ class CreateMaskRuleExecutorTest {
return new CreateMaskRuleStatement(ifNotExists, rules);
}
- private MaskRuleConfiguration getCurrentRuleConfig() {
+ private MaskRuleConfiguration getCurrentRuleConfiguration() {
Collection<MaskTableRuleConfiguration> rules = new LinkedList<>();
rules.add(new MaskTableRuleConfiguration("t_mask",
Collections.emptyList()));
rules.add(new MaskTableRuleConfiguration("t_order",
Collections.emptyList()));
diff --git
a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java
b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java
index 565fa8b08e8..70492bd2dad 100644
---
a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java
+++
b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java
@@ -69,8 +69,8 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
@Test
void assertCheckWhenConfigInvalidWriteDataSource() {
ReadwriteSplittingRuleConfiguration config =
mock(ReadwriteSplittingRuleConfiguration.class);
- List<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Arrays.asList(createDataSourceRuleConfig(
- "write_ds_0", Arrays.asList("read_ds_0", "read_ds_1")),
createDataSourceRuleConfig("write_ds_2", Arrays.asList("read_ds_0",
"read_ds_1")));
+ List<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Arrays.asList(createDataSourceRuleConfiguration(
+ "write_ds_0", Arrays.asList("read_ds_0", "read_ds_1")),
createDataSourceRuleConfiguration("write_ds_2", Arrays.asList("read_ds_0",
"read_ds_1")));
when(config.getDataSources()).thenReturn(configs);
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
assertThrows(DataSourceNameNotExistedException.class, () ->
checker.check("test", config, mockDataSources(), Collections.emptyList()));
@@ -80,8 +80,8 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
@Test
void assertCheckWhenConfigInvalidReadDataSource() {
ReadwriteSplittingRuleConfiguration config =
mock(ReadwriteSplittingRuleConfiguration.class);
- List<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Arrays.asList(createDataSourceRuleConfig(
- "write_ds_0", Arrays.asList("read_ds_0", "read_ds_0")),
createDataSourceRuleConfig("write_ds_1", Arrays.asList("read_ds_0",
"read_ds_0")));
+ List<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Arrays.asList(createDataSourceRuleConfiguration(
+ "write_ds_0", Arrays.asList("read_ds_0", "read_ds_0")),
createDataSourceRuleConfiguration("write_ds_1", Arrays.asList("read_ds_0",
"read_ds_0")));
when(config.getDataSources()).thenReturn(configs);
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
assertThrows(DuplicateDataSourceException.class, () ->
checker.check("test", config, mockDataSources(), Collections.emptyList()));
@@ -91,7 +91,7 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
@Test
void assertCheckWeightLoadBalanceInvalidDataSourceName() {
ReadwriteSplittingRuleConfiguration config =
mock(ReadwriteSplittingRuleConfiguration.class);
- Collection<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Collections.singleton(createDataSourceRuleConfig("write_ds_0",
Arrays.asList("read_ds_0", "read_ds_1")));
+ Collection<ReadwriteSplittingDataSourceRuleConfiguration> configs =
Collections.singleton(createDataSourceRuleConfiguration("write_ds_0",
Arrays.asList("read_ds_0", "read_ds_1")));
when(config.getDataSources()).thenReturn(configs);
AlgorithmConfiguration algorithm = new
AlgorithmConfiguration("WEIGHT", PropertiesBuilder.build(new
Property("read_ds_2", "1"), new Property("read_ds_1", "2")));
when(config.getLoadBalancers()).thenReturn(Collections.singletonMap("weight_ds",
algorithm));
@@ -102,7 +102,7 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void assertCheckWhenConfigOtherRulesDatasource() {
- ReadwriteSplittingRuleConfiguration config =
createContainsOtherRulesDatasourceConfig();
+ ReadwriteSplittingRuleConfiguration config =
createContainsOtherRulesDatasourceConfiguration();
RuleConfigurationChecker checker =
OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class,
Collections.singleton(config.getClass())).get(config.getClass());
ShardingSphereRule rule = mock(ShardingSphereRule.class);
DataSourceMapperRuleAttribute ruleAttribute =
mock(DataSourceMapperRuleAttribute.class, RETURNS_DEEP_STUBS);
@@ -111,7 +111,7 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
checker.check("test", config, mockDataSources(),
Collections.singleton(rule));
}
- private ReadwriteSplittingRuleConfiguration
createContainsOtherRulesDatasourceConfig() {
+ private ReadwriteSplittingRuleConfiguration
createContainsOtherRulesDatasourceConfiguration() {
ReadwriteSplittingRuleConfiguration result =
mock(ReadwriteSplittingRuleConfiguration.class);
ReadwriteSplittingDataSourceRuleConfiguration dataSourceConfig =
mock(ReadwriteSplittingDataSourceRuleConfiguration.class);
when(dataSourceConfig.getName()).thenReturn("readwrite_ds");
@@ -121,7 +121,7 @@ class ReadwriteSplittingRuleConfigurationCheckerTest {
return result;
}
- private ReadwriteSplittingDataSourceRuleConfiguration
createDataSourceRuleConfig(final String writeDataSource, final List<String>
readDataSources) {
+ private ReadwriteSplittingDataSourceRuleConfiguration
createDataSourceRuleConfiguration(final String writeDataSource, final
List<String> readDataSources) {
ReadwriteSplittingDataSourceRuleConfiguration result =
mock(ReadwriteSplittingDataSourceRuleConfiguration.class);
when(result.getName()).thenReturn("readwrite_ds");
when(result.getWriteDataSourceName()).thenReturn(writeDataSource);
diff --git
a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
index 3b7e6b3a994..1ded6e4ba4e 100644
---
a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
+++
b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java
@@ -127,8 +127,8 @@ public final class ShardingRule implements DatabaseRule {
shardingTables.putAll(createShardingAutoTables(ruleConfig.getAutoTables(),
ruleConfig.getDefaultKeyGenerateStrategy()));
validateUniqueActualDataNodesInTableRules();
bindingTableRules.putAll(createBindingTableRules(ruleConfig.getBindingTableGroups()));
- defaultDatabaseShardingStrategyConfig =
createDefaultDatabaseShardingStrategyConfig(ruleConfig);
- defaultTableShardingStrategyConfig =
createDefaultTableShardingStrategyConfig(ruleConfig);
+ defaultDatabaseShardingStrategyConfig =
createDefaultDatabaseShardingStrategyConfiguration(ruleConfig);
+ defaultTableShardingStrategyConfig =
createDefaultTableShardingStrategyConfiguration(ruleConfig);
defaultAuditStrategy = null == ruleConfig.getDefaultAuditStrategy() ?
new ShardingAuditStrategyConfiguration(Collections.emptyList(), true) :
ruleConfig.getDefaultAuditStrategy();
defaultKeyGenerateAlgorithm = null ==
ruleConfig.getDefaultKeyGenerateStrategy()
? TypedSPILoader.getService(KeyGenerateAlgorithm.class, null)
@@ -155,12 +155,12 @@ public final class ShardingRule implements DatabaseRule {
});
}
- private ShardingStrategyConfiguration
createDefaultDatabaseShardingStrategyConfig(final ShardingRuleConfiguration
ruleConfig) {
+ private ShardingStrategyConfiguration
createDefaultDatabaseShardingStrategyConfiguration(final
ShardingRuleConfiguration ruleConfig) {
Optional.ofNullable(ruleConfig.getDefaultDatabaseShardingStrategy()).ifPresent(optional
-> checkManualShardingAlgorithm(optional.getShardingAlgorithmName(),
"default"));
return null == ruleConfig.getDefaultDatabaseShardingStrategy() ? new
NoneShardingStrategyConfiguration() :
ruleConfig.getDefaultDatabaseShardingStrategy();
}
- private ShardingStrategyConfiguration
createDefaultTableShardingStrategyConfig(final ShardingRuleConfiguration
ruleConfig) {
+ private ShardingStrategyConfiguration
createDefaultTableShardingStrategyConfiguration(final ShardingRuleConfiguration
ruleConfig) {
Optional.ofNullable(ruleConfig.getDefaultTableShardingStrategy()).ifPresent(optional
-> checkManualShardingAlgorithm(optional.getShardingAlgorithmName(),
"default"));
return null == ruleConfig.getDefaultTableShardingStrategy() ? new
NoneShardingStrategyConfiguration() :
ruleConfig.getDefaultTableShardingStrategy();
}
diff --git
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/fixture/ShardingRoutingEngineFixtureBuilder.java
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/fixture/ShardingRoutingEngineFixtureBuilder.java
index 11b9e702e1e..6472dfa9e0d 100644
---
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/fixture/ShardingRoutingEngineFixtureBuilder.java
+++
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/fixture/ShardingRoutingEngineFixtureBuilder.java
@@ -75,7 +75,7 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createBasedShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
shardingRuleConfig.getShardingAlgorithms().put(
"t_order_inline", new AlgorithmConfiguration("INLINE",
PropertiesBuilder.build(new Property("algorithm-expression",
"t_order_${order_id % 2}"))));
@@ -89,7 +89,7 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createErrorShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
shardingRuleConfig.getShardingAlgorithms().put(
"t_order_inline", new AlgorithmConfiguration("INLINE",
PropertiesBuilder.build(new Property("algorithm-expression",
"t_order_${order_id % 3}"))));
@@ -103,8 +103,8 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createBindingShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${order_id % 2}",
"ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${order_id % 2}",
"ds_${user_id % 2}"));
shardingRuleConfig.getBindingTableGroups().add(new
ShardingTableReferenceRuleConfiguration("foo", "t_order,t_order_item"));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
shardingRuleConfig.getShardingAlgorithms().put(
@@ -121,8 +121,8 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createBroadcastShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${order_id % 2}",
"ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${order_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${order_id % 2}",
"ds_${user_id % 2}"));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
shardingRuleConfig.getShardingAlgorithms().put(
"t_order_inline", new AlgorithmConfiguration("INLINE",
PropertiesBuilder.build(new Property("algorithm-expression",
"t_order_${order_id % 2}"))));
@@ -138,7 +138,7 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createHintShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
- shardingRuleConfig.getTables().add(createTableRuleWithHintConfig());
+
shardingRuleConfig.getTables().add(createTableRuleWithHintConfiguration());
shardingRuleConfig.getShardingAlgorithms().put("core_hint_fixture",
new AlgorithmConfiguration("CORE.HINT.FIXTURE", new Properties()));
return new ShardingRule(shardingRuleConfig, createDataSources(),
mock(InstanceContext.class));
}
@@ -150,9 +150,9 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createMixedShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createTableRuleConfig("t_hint_ds_test",
"ds_${0..1}.t_hint_ds_test_${0..1}",
+
shardingRuleConfig.getTables().add(createTableRuleConfiguration("t_hint_ds_test",
"ds_${0..1}.t_hint_ds_test_${0..1}",
new HintShardingStrategyConfiguration("core_hint_fixture"),
createStandardShardingStrategyConfiguration("t_hint_ds_test_inline",
"t_hint_ds_test_${order_id % 2}")));
-
shardingRuleConfig.getTables().add(createTableRuleConfig("t_hint_table_test",
"ds_${0..1}.t_hint_table_test_${0..1}",
+
shardingRuleConfig.getTables().add(createTableRuleConfiguration("t_hint_table_test",
"ds_${0..1}.t_hint_table_test_${0..1}",
createStandardShardingStrategyConfiguration("ds_inline",
"ds_${user_id % 2}"), new
HintShardingStrategyConfiguration("core_hint_fixture")));
shardingRuleConfig.getShardingAlgorithms().put("core_hint_fixture",
new AlgorithmConfiguration("CORE.HINT.FIXTURE", new Properties()));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
@@ -169,10 +169,10 @@ public final class ShardingRoutingEngineFixtureBuilder {
public static ShardingRule createAllShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
shardingRuleConfig.setDefaultDatabaseShardingStrategy(new
StandardShardingStrategyConfiguration("order_id", "ds_inline"));
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${user_id % 2}", "ds_${user_id % 2}"));
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${user_id % 2}", "ds_${user_id
% 2}"));
-
shardingRuleConfig.getTables().add(createInlineTableRuleConfig("t_user",
"ds_${0..1}.t_user_${0..1}", "t_user_${user_id % 2}", "ds_${user_id % 2}"));
- shardingRuleConfig.getTables().add(createTableRuleWithHintConfig());
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order",
"ds_${0..1}.t_order_${0..1}", "t_order_${user_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_order_item",
"ds_${0..1}.t_order_item_${0..1}", "t_order_item_${user_id % 2}",
"ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createInlineTableRuleConfiguration("t_user",
"ds_${0..1}.t_user_${0..1}", "t_user_${user_id % 2}", "ds_${user_id % 2}"));
+
shardingRuleConfig.getTables().add(createTableRuleWithHintConfiguration());
shardingRuleConfig.getBindingTableGroups().add(new
ShardingTableReferenceRuleConfiguration("foo", "t_order,t_order_item"));
shardingRuleConfig.getShardingAlgorithms().put("ds_inline", new
AlgorithmConfiguration("INLINE", PropertiesBuilder.build(new
Property("algorithm-expression", "ds_${user_id % 2}"))));
shardingRuleConfig.getShardingAlgorithms().put(
@@ -191,7 +191,7 @@ public final class ShardingRoutingEngineFixtureBuilder {
*/
public static ShardingRule createIntervalTableShardingRule() {
ShardingRuleConfiguration shardingRuleConfig = new
ShardingRuleConfiguration();
-
shardingRuleConfig.getTables().add(createTableRuleConfig("t_interval_test",
"ds_0.t_interval_test_202101,ds_1.t_interval_test_202102",
+
shardingRuleConfig.getTables().add(createTableRuleConfiguration("t_interval_test",
"ds_0.t_interval_test_202101,ds_1.t_interval_test_202102",
null, new StandardShardingStrategyConfiguration("create_at",
"interval_test")));
Properties props = PropertiesBuilder.build(
new Property("datetime-pattern", "yyyy-MM-dd HH:mm:ss"),
@@ -204,9 +204,9 @@ public final class ShardingRoutingEngineFixtureBuilder {
return new ShardingRule(shardingRuleConfig, createDataSources(),
mock(InstanceContext.class));
}
- private static ShardingTableRuleConfiguration
createInlineTableRuleConfig(final String tableName,
-
final String actualDataNodes, final String algorithmExpression, final String
dsAlgorithmExpression) {
- return createTableRuleConfig(tableName, actualDataNodes,
+ private static ShardingTableRuleConfiguration
createInlineTableRuleConfiguration(final String tableName,
+
final String actualDataNodes, final String algorithmExpression, final
String dsAlgorithmExpression) {
+ return createTableRuleConfiguration(tableName, actualDataNodes,
createStandardShardingStrategyConfiguration("ds_inline",
dsAlgorithmExpression), createStandardShardingStrategyConfiguration(tableName +
"_inline", algorithmExpression));
}
@@ -217,15 +217,15 @@ public final class ShardingRoutingEngineFixtureBuilder {
return new StandardShardingStrategyConfiguration(shardingColumn,
algorithmName);
}
- private static ShardingTableRuleConfiguration
createTableRuleWithHintConfig() {
+ private static ShardingTableRuleConfiguration
createTableRuleWithHintConfiguration() {
ShardingTableRuleConfiguration result = new
ShardingTableRuleConfiguration("t_hint_test", "ds_${0..1}.t_hint_test_${0..1}");
result.setTableShardingStrategy(new
HintShardingStrategyConfiguration("core_hint_fixture"));
result.setDatabaseShardingStrategy(new
HintShardingStrategyConfiguration("core_hint_fixture"));
return result;
}
- private static ShardingTableRuleConfiguration createTableRuleConfig(final
String tableName, final String actualDataNodes,
- final
ShardingStrategyConfiguration dsShardingStrategyConfig, final
ShardingStrategyConfiguration tableShardingStrategyConfig) {
+ private static ShardingTableRuleConfiguration
createTableRuleConfiguration(final String tableName, final String
actualDataNodes, final ShardingStrategyConfiguration dsShardingStrategyConfig,
+
final ShardingStrategyConfiguration tableShardingStrategyConfig) {
ShardingTableRuleConfiguration result = new
ShardingTableRuleConfiguration(tableName, actualDataNodes);
result.setDatabaseShardingStrategy(dsShardingStrategyConfig);
result.setTableShardingStrategy(tableShardingStrategyConfig);
diff --git
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java
index dd412dac5eb..32f22b4bad1 100644
---
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java
+++
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java
@@ -245,7 +245,7 @@ class ShardingRuleTest {
}
@Test
- void assertGetBindingTableRuleForNotConfig() {
+ void assertGetBindingTableRuleForNotConfiguration() {
assertFalse(createMinimumShardingRule().findBindingTableRule("logic_Table").isPresent());
}
diff --git
a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleExecutorTest.java
b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleExecutorTest.java
index f547a87685f..dfcf0e46ab7 100644
---
a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleExecutorTest.java
+++
b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleExecutorTest.java
@@ -66,7 +66,7 @@ class CreateShardingTableReferenceRuleExecutorTest {
@Test
void assertCheckSQLStatementWithDuplicateTables() {
ShardingRule rule = mock(ShardingRule.class);
- when(rule.getConfiguration()).thenReturn(getCurrentRuleConfig());
+
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(DuplicateRuleException.class, () ->
executor.checkBeforeUpdate(createSQLStatement(false, "foo",
"t_order,t_order_item")));
}
@@ -74,7 +74,7 @@ class CreateShardingTableReferenceRuleExecutorTest {
@Test
void assertUpdateWithIfNotExists() {
CreateShardingTableReferenceRuleStatement sqlStatement =
createSQLStatement(true, "foo", "t_order,t_order_item");
- ShardingRuleConfiguration currentRuleConfig = getCurrentRuleConfig();
+ ShardingRuleConfiguration currentRuleConfig =
getCurrentRuleConfiguration();
ShardingRule rule = mock(ShardingRule.class);
when(rule.getConfiguration()).thenReturn(currentRuleConfig);
executor.setRule(rule);
@@ -84,7 +84,7 @@ class CreateShardingTableReferenceRuleExecutorTest {
assertThat(referenceRuleConfigs.size(), is(0));
}
- private ShardingRuleConfiguration getCurrentRuleConfig() {
+ private ShardingRuleConfiguration getCurrentRuleConfiguration() {
ShardingRuleConfiguration result = new ShardingRuleConfiguration();
result.getTables().add(new ShardingTableRuleConfiguration("t_order",
"ds.t_order_${0..2}"));
result.getTables().add(new
ShardingTableRuleConfiguration("t_order_item", "ds.t_order_item_${0..2}"));
diff --git
a/infra/common/src/main/java/org/apache/shardingsphere/infra/state/datasource/DataSourceStateManager.java
b/infra/common/src/main/java/org/apache/shardingsphere/infra/state/datasource/DataSourceStateManager.java
index 594b9042ab1..30935c38d53 100644
---
a/infra/common/src/main/java/org/apache/shardingsphere/infra/state/datasource/DataSourceStateManager.java
+++
b/infra/common/src/main/java/org/apache/shardingsphere/infra/state/datasource/DataSourceStateManager.java
@@ -96,7 +96,7 @@ public final class DataSourceStateManager {
* Get enabled data sources.
*
* @param databaseName database name
- * @param databaseConfig database config
+ * @param databaseConfig database configuration
* @return enabled data sources
*/
public Map<String, DataSource> getEnabledDataSources(final String
databaseName, final DatabaseConfiguration databaseConfig) {
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineProcessContext.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineProcessContext.java
index b63c29eab64..87ebbacd7f6 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineProcessContext.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/PipelineProcessContext.java
@@ -29,5 +29,5 @@ public interface PipelineProcessContext extends AutoCloseable
{
*
* @return pipeline process config
*/
- PipelineProcessConfiguration getProcessConfig();
+ PipelineProcessConfiguration getProcessConfiguration();
}
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/TransmissionProcessContext.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/TransmissionProcessContext.java
index 69be0097f05..a06e8894ec7 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/TransmissionProcessContext.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/context/TransmissionProcessContext.java
@@ -36,7 +36,7 @@ import
org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
public final class TransmissionProcessContext implements
PipelineProcessContext {
@Getter
- private final PipelineProcessConfiguration processConfig;
+ private final PipelineProcessConfiguration processConfiguration;
@Getter
private final JobRateLimitAlgorithm readRateLimitAlgorithm;
@@ -51,11 +51,11 @@ public final class TransmissionProcessContext implements
PipelineProcessContext
private final PipelineLazyInitializer<ExecuteEngine>
incrementalExecuteEngineLazyInitializer;
public TransmissionProcessContext(final String jobId, final
PipelineProcessConfiguration originalProcessConfig) {
- this.processConfig =
PipelineProcessConfigurationUtils.convertWithDefaultValue(originalProcessConfig);
- PipelineReadConfiguration readConfig = processConfig.getRead();
+ processConfiguration =
PipelineProcessConfigurationUtils.convertWithDefaultValue(originalProcessConfig);
+ PipelineReadConfiguration readConfig = processConfiguration.getRead();
AlgorithmConfiguration readRateLimiter = readConfig.getRateLimiter();
readRateLimitAlgorithm = null == readRateLimiter ? null :
TypedSPILoader.getService(JobRateLimitAlgorithm.class,
readRateLimiter.getType(), readRateLimiter.getProps());
- PipelineWriteConfiguration writeConfig = processConfig.getWrite();
+ PipelineWriteConfiguration writeConfig =
processConfiguration.getWrite();
AlgorithmConfiguration writeRateLimiter = writeConfig.getRateLimiter();
writeRateLimitAlgorithm = null == writeRateLimiter ? null :
TypedSPILoader.getService(JobRateLimitAlgorithm.class,
writeRateLimiter.getType(), writeRateLimiter.getProps());
inventoryDumperExecuteEngineLazyInitializer = new
PipelineLazyInitializer<ExecuteEngine>() {
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/inventory/InventoryTaskSplitter.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/inventory/InventoryTaskSplitter.java
index 8edf0499680..cfe3c9e87d6 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/inventory/InventoryTaskSplitter.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/inventory/InventoryTaskSplitter.java
@@ -83,7 +83,7 @@ public final class InventoryTaskSplitter {
TransmissionProcessContext processContext =
jobItemContext.getJobProcessContext();
for (InventoryDumperContext each :
splitInventoryDumperContext(jobItemContext)) {
AtomicReference<IngestPosition> position = new
AtomicReference<>(each.getCommonContext().getPosition());
- PipelineChannel channel =
PipelineTaskUtils.createInventoryChannel(processContext.getProcessConfig().getStreamChannel(),
importerConfig.getBatchSize(), position);
+ PipelineChannel channel =
PipelineTaskUtils.createInventoryChannel(processContext.getProcessConfiguration().getStreamChannel(),
importerConfig.getBatchSize(), position);
Dumper dumper = new InventoryDumper(each, channel,
sourceDataSource, jobItemContext.getSourceMetaDataLoader());
Importer importer = new SingleChannelConsumerImporter(channel,
importerConfig.getBatchSize(), 3000L, jobItemContext.getSink(), jobItemContext);
result.add(new
InventoryTask(PipelineTaskUtils.generateInventoryTaskId(each),
processContext.getInventoryDumperExecuteEngine(),
@@ -132,7 +132,7 @@ public final class InventoryTaskSplitter {
}
Collection<InventoryDumperContext> result = new LinkedList<>();
TransmissionProcessContext jobProcessContext =
jobItemContext.getJobProcessContext();
- PipelineReadConfiguration readConfig =
jobProcessContext.getProcessConfig().getRead();
+ PipelineReadConfiguration readConfig =
jobProcessContext.getProcessConfiguration().getRead();
int batchSize = readConfig.getBatchSize();
JobRateLimitAlgorithm rateLimitAlgorithm =
jobProcessContext.getReadRateLimitAlgorithm();
Collection<IngestPosition> inventoryPositions =
getInventoryPositions(dumperContext, jobItemContext, dataSource);
@@ -188,7 +188,7 @@ public final class InventoryTaskSplitter {
}
Collection<IngestPosition> result = new LinkedList<>();
Range<Long> uniqueKeyValuesRange =
getUniqueKeyValuesRange(jobItemContext, dataSource, dumperContext);
- int shardingSize =
jobItemContext.getJobProcessContext().getProcessConfig().getRead().getShardingSize();
+ int shardingSize =
jobItemContext.getJobProcessContext().getProcessConfiguration().getRead().getShardingSize();
long splitCount = tableRecordsCount / shardingSize +
(tableRecordsCount % shardingSize > 0 ? 1 : 0);
long interval = (uniqueKeyValuesRange.getMaximum() -
uniqueKeyValuesRange.getMinimum()) / splitCount;
IntervalToRangeIterator rangeIterator = new
IntervalToRangeIterator(uniqueKeyValuesRange.getMinimum(),
uniqueKeyValuesRange.getMaximum(), interval);
diff --git
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
index 5ab860e1735..e25d5dd249a 100644
---
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
+++
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
@@ -94,7 +94,7 @@ public final class CDCJob extends
AbstractInseparablePipelineJob<CDCJobItemConte
PipelineProcessConfiguration processConfig =
PipelineProcessConfigurationUtils.convertWithDefaultValue(
processConfigPersistService.load(PipelineJobIdUtils.parseContextKey(jobConfig.getJobId()),
"STREAMING"));
TransmissionProcessContext jobProcessContext = new
TransmissionProcessContext(jobConfig.getJobId(), processConfig);
- CDCTaskConfiguration taskConfig =
buildTaskConfiguration((CDCJobConfiguration) jobConfig, shardingItem,
jobProcessContext.getProcessConfig());
+ CDCTaskConfiguration taskConfig =
buildTaskConfiguration((CDCJobConfiguration) jobConfig, shardingItem,
jobProcessContext.getProcessConfiguration());
return new CDCJobItemContext((CDCJobConfiguration) jobConfig,
shardingItem, initProgress.orElse(null), jobProcessContext, taskConfig,
getJobRunnerManager().getDataSourceManager(), sink);
}
diff --git
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java
index d3bdb426d83..70874a8c6d6 100644
---
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java
+++
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java
@@ -118,7 +118,7 @@ public final class CDCJobPreparer {
for (InventoryDumperContext each : new
InventoryTaskSplitter(jobItemContext.getSourceDataSource(), new
InventoryDumperContext(taskConfig.getDumperContext().getCommonContext()),
importerConfig)
.splitInventoryDumperContext(jobItemContext)) {
AtomicReference<IngestPosition> position = new
AtomicReference<>(each.getCommonContext().getPosition());
- PipelineChannel channel =
PipelineTaskUtils.createInventoryChannel(processContext.getProcessConfig().getStreamChannel(),
importerConfig.getBatchSize(), position);
+ PipelineChannel channel =
PipelineTaskUtils.createInventoryChannel(processContext.getProcessConfiguration().getStreamChannel(),
importerConfig.getBatchSize(), position);
if (!(position.get() instanceof IngestFinishedPosition)) {
channelProgressPairs.add(new CDCChannelProgressPair(channel,
jobItemContext));
}
@@ -138,7 +138,7 @@ public final class CDCJobPreparer {
CDCTaskConfiguration taskConfig = jobItemContext.getTaskConfig();
IncrementalDumperContext dumperContext = taskConfig.getDumperContext();
IncrementalTaskProgress taskProgress =
PipelineTaskUtils.createIncrementalTaskProgress(dumperContext.getCommonContext().getPosition(),
jobItemContext.getInitProgress());
- PipelineChannel channel =
PipelineTaskUtils.createIncrementalChannel(jobItemContext.getJobProcessContext().getProcessConfig().getStreamChannel(),
taskProgress);
+ PipelineChannel channel =
PipelineTaskUtils.createIncrementalChannel(jobItemContext.getJobProcessContext().getProcessConfiguration().getStreamChannel(),
taskProgress);
channelProgressPairs.add(new CDCChannelProgressPair(channel,
jobItemContext));
Dumper dumper =
DatabaseTypedSPILoader.getService(DialectIncrementalDumperCreator.class,
dumperContext.getCommonContext().getDataSourceConfig().getDatabaseType())
.createIncrementalDumper(dumperContext,
dumperContext.getCommonContext().getPosition(), channel,
jobItemContext.getSourceMetaDataLoader());
diff --git
a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckProcessContext.java
b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckProcessContext.java
index 2d279047607..3d750ebd0c2 100644
---
a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckProcessContext.java
+++
b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckProcessContext.java
@@ -45,7 +45,7 @@ public final class ConsistencyCheckProcessContext implements
PipelineProcessCont
}
@Override
- public PipelineProcessConfiguration getProcessConfig() {
+ public PipelineProcessConfiguration getProcessConfiguration() {
return PipelineProcessConfigurationUtils.convertWithDefaultValue(null);
}
diff --git
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java
index 4b9a4426b24..6ae23369c6c 100644
---
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java
+++
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java
@@ -62,7 +62,7 @@ public final class MigrationJob extends
AbstractSeparablePipelineJob<MigrationJo
@Override
protected MigrationJobItemContext buildJobItemContext(final
MigrationJobConfiguration jobConfig,
final int
shardingItem, final TransmissionJobItemProgress jobItemProgress, final
TransmissionProcessContext jobProcessContext) {
- MigrationTaskConfiguration taskConfig =
buildTaskConfiguration(jobConfig, shardingItem,
jobProcessContext.getProcessConfig());
+ MigrationTaskConfiguration taskConfig =
buildTaskConfiguration(jobConfig, shardingItem,
jobProcessContext.getProcessConfiguration());
return new MigrationJobItemContext(jobConfig, shardingItem,
jobItemProgress, jobProcessContext, taskConfig,
getJobRunnerManager().getDataSourceManager());
}
diff --git
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/preparer/MigrationJobPreparer.java
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/preparer/MigrationJobPreparer.java
index e76e2509756..b0c695fc383 100644
---
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/preparer/MigrationJobPreparer.java
+++
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/preparer/MigrationJobPreparer.java
@@ -196,7 +196,7 @@ public final class MigrationJobPreparer {
IncrementalDumperContext dumperContext = taskConfig.getDumperContext();
ExecuteEngine incrementalExecuteEngine =
jobItemContext.getJobProcessContext().getIncrementalExecuteEngine();
IncrementalTaskProgress taskProgress =
PipelineTaskUtils.createIncrementalTaskProgress(dumperContext.getCommonContext().getPosition(),
jobItemContext.getInitProgress());
- PipelineChannel channel =
PipelineTaskUtils.createIncrementalChannel(jobItemContext.getJobProcessContext().getProcessConfig().getStreamChannel(),
taskProgress);
+ PipelineChannel channel =
PipelineTaskUtils.createIncrementalChannel(jobItemContext.getJobProcessContext().getProcessConfiguration().getStreamChannel(),
taskProgress);
Dumper dumper =
DatabaseTypedSPILoader.getService(DialectIncrementalDumperCreator.class,
dumperContext.getCommonContext().getDataSourceConfig().getDatabaseType())
.createIncrementalDumper(dumperContext,
dumperContext.getCommonContext().getPosition(), channel, sourceMetaDataLoader);
Collection<Importer> importers = Collections.singletonList(new
SingleChannelConsumerImporter(channel, 1, 5L, jobItemContext.getSink(),
jobItemContext));
diff --git
a/mode/type/cluster/repository/provider/zookeeper/src/test/java/org/apache/shardingsphere/mode/repository/cluster/zookeeper/ZookeeperRepositoryTest.java
b/mode/type/cluster/repository/provider/zookeeper/src/test/java/org/apache/shardingsphere/mode/repository/cluster/zookeeper/ZookeeperRepositoryTest.java
index cd5824293bf..6d9f952818f 100644
---
a/mode/type/cluster/repository/provider/zookeeper/src/test/java/org/apache/shardingsphere/mode/repository/cluster/zookeeper/ZookeeperRepositoryTest.java
+++
b/mode/type/cluster/repository/provider/zookeeper/src/test/java/org/apache/shardingsphere/mode/repository/cluster/zookeeper/ZookeeperRepositoryTest.java
@@ -182,7 +182,7 @@ class ZookeeperRepositoryTest {
}
@Test
- void assertBuildCuratorClientWithCustomConfig() {
+ void assertBuildCuratorClientWithCustomConfiguration() {
Properties props = PropertiesBuilder.build(
new
Property(ZookeeperPropertyKey.RETRY_INTERVAL_MILLISECONDS.getKey(), "1000"),
new Property(ZookeeperPropertyKey.MAX_RETRIES.getKey(), "1"),
diff --git
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataExecutor.java
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataExecutor.java
index f6e1f8826ac..d90f79aff93 100644
---
a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataExecutor.java
+++
b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataExecutor.java
@@ -63,11 +63,11 @@ public final class ImportMetaDataExecutor implements
DistSQLUpdateExecutor<Impor
}
ExportedClusterInfo exportedClusterInfo =
JsonUtils.fromJsonString(jsonMetaDataConfig, ExportedClusterInfo.class);
ExportedMetaData exportedMetaData = exportedClusterInfo.getMetaData();
- importServerConfig(contextManager, exportedMetaData);
+ importServerConfiguration(contextManager, exportedMetaData);
importDatabase(exportedMetaData);
}
- private void importServerConfig(final ContextManager contextManager, final
ExportedMetaData exportedMetaData) {
+ private void importServerConfiguration(final ContextManager
contextManager, final ExportedMetaData exportedMetaData) {
YamlProxyServerConfiguration yamlServerConfig =
YamlEngine.unmarshal(exportedMetaData.getRules() + System.lineSeparator() +
exportedMetaData.getProps(), YamlProxyServerConfiguration.class);
if (null == yamlServerConfig) {
return;