This is an automated email from the ASF dual-hosted git repository.

zhonghongsheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 0e42ac0bd64 Add StringPkSmallOrderDAO for pipeline E2E (#37862)
0e42ac0bd64 is described below

commit 0e42ac0bd64916608b6c9782797f97cf3b0c23d4
Author: Hongsheng Zhong <[email protected]>
AuthorDate: Tue Jan 27 19:01:06 2026 +0800

    Add StringPkSmallOrderDAO for pipeline E2E (#37862)
    
    * Add StringPkSmallOrderDAO
    
    * Improve StringPkSmallOrderDAO.batchInsert: add 
generateSmallOrderInsertData
    
    * Refactor E2E: use StringPkSmallOrderDAO
    
    * Add t_unsigned_int column
    
    * Extract StringPkSmallOrderDAO.insert
---
 .../migration/general/RulesMigrationE2EIT.java     | 11 ++---
 .../primarykey/MariaDBMigrationE2EIT.java          | 16 ++-----
 .../primarykey/TextPrimaryKeyMigrationE2EIT.java   | 15 ++----
 .../small/StringPkSmallOrderDAO.java}              | 54 +++++++++++++---------
 .../MySQLStringPkSmallOrderSQLBuilder.java         | 40 ++++++++++++++++
 .../OpenGaussStringPkSmallOrderSQLBuilder.java     | 39 ++++++++++++++++
 .../PostgreSQLStringPkSmallOrderSQLBuilder.java    | 39 ++++++++++++++++
 .../sqlbuilder/StringPkSmallOrderSQLBuilder.java   | 41 ++++++++++++++++
 .../pipeline/dao/orderitem/IntPkOrderItemDAO.java  | 10 ++--
 .../framework/helper/PipelineCaseHelper.java       | 40 ++++++++++++----
 .../pipeline/util/DataSourceExecuteUtils.java      |  6 +--
 ...r.small.sqlbuilder.StringPkSmallOrderSQLBuilder | 20 ++++++++
 12 files changed, 262 insertions(+), 69 deletions(-)

diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/general/RulesMigrationE2EIT.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/general/RulesMigrationE2EIT.java
index c9a4c32c9b2..aa12f6bae6e 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/general/RulesMigrationE2EIT.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/general/RulesMigrationE2EIT.java
@@ -18,10 +18,9 @@
 package 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.general;
 
 import 
org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJobType;
-import 
org.apache.shardingsphere.infra.algorithm.keygen.uuid.UUIDKeyGenerateAlgorithm;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.PipelineContainerComposer;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.AbstractMigrationE2EIT;
-import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.helper.PipelineCaseHelper;
+import 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.StringPkSmallOrderDAO;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ECondition;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings.PipelineE2EDatabaseSettings;
@@ -33,7 +32,6 @@ import org.junit.jupiter.api.extension.ExtensionContext;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.ArgumentsSource;
 
-import java.sql.Connection;
 import java.util.concurrent.Callable;
 
 import static org.hamcrest.MatcherAssert.assertThat;
@@ -76,10 +74,9 @@ class RulesMigrationE2EIT extends AbstractMigrationE2EIT {
     }
     
     private void assertMigrationSuccess(final PipelineContainerComposer 
containerComposer, final Callable<Void> addRuleFn) throws Exception {
-        containerComposer.createSourceOrderTable(SOURCE_TABLE_NAME);
-        try (Connection connection = 
containerComposer.getSourceDataSource().getConnection()) {
-            
PipelineCaseHelper.batchInsertOrderRecordsWithGeneralColumns(connection, new 
UUIDKeyGenerateAlgorithm(), SOURCE_TABLE_NAME, 
PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
-        }
+        StringPkSmallOrderDAO orderDAO = new 
StringPkSmallOrderDAO(containerComposer.getSourceDataSource(), 
containerComposer.getDatabaseType(), SOURCE_TABLE_NAME);
+        orderDAO.createTable();
+        orderDAO.batchInsert(PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
         addMigrationSourceResource(containerComposer);
         addMigrationTargetResource(containerComposer);
         if (null != addRuleFn) {
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/MariaDBMigrationE2EIT.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/MariaDBMigrationE2EIT.java
index 37a58ddb55e..17ec9e9ee4b 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/MariaDBMigrationE2EIT.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/MariaDBMigrationE2EIT.java
@@ -18,11 +18,9 @@
 package 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.primarykey;
 
 import 
org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJobType;
-import 
org.apache.shardingsphere.infra.algorithm.keygen.spi.KeyGenerateAlgorithm;
-import 
org.apache.shardingsphere.infra.algorithm.keygen.uuid.UUIDKeyGenerateAlgorithm;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.PipelineContainerComposer;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.AbstractMigrationE2EIT;
-import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.helper.PipelineCaseHelper;
+import 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.StringPkSmallOrderDAO;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ECondition;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings.PipelineE2EDatabaseSettings;
@@ -36,7 +34,6 @@ import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.ArgumentsSource;
 
 import javax.sql.DataSource;
-import java.sql.Connection;
 import java.sql.SQLException;
 
 import static org.hamcrest.MatcherAssert.assertThat;
@@ -57,12 +54,9 @@ class MariaDBMigrationE2EIT extends AbstractMigrationE2EIT {
     @ArgumentsSource(PipelineE2ETestCaseArgumentsProvider.class)
     void assertMigrationSuccess(final PipelineTestParameter testParam) throws 
SQLException {
         try (PipelineContainerComposer containerComposer = new 
PipelineContainerComposer(testParam)) {
-            String sqlPattern = "CREATE TABLE `%s` (`order_id` VARCHAR(64) NOT 
NULL, `user_id` INT NOT NULL, `status` varchar(255), PRIMARY KEY (`order_id`)) 
ENGINE=InnoDB DEFAULT CHARSET=utf8mb4";
-            containerComposer.sourceExecuteWithLog(String.format(sqlPattern, 
SOURCE_TABLE_NAME));
-            try (Connection connection = 
containerComposer.getSourceDataSource().getConnection()) {
-                KeyGenerateAlgorithm generateAlgorithm = new 
UUIDKeyGenerateAlgorithm();
-                
PipelineCaseHelper.batchInsertOrderRecordsWithGeneralColumns(connection, 
generateAlgorithm, SOURCE_TABLE_NAME, 
PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
-            }
+            StringPkSmallOrderDAO orderDAO = new 
StringPkSmallOrderDAO(containerComposer.getSourceDataSource(), 
containerComposer.getDatabaseType(), SOURCE_TABLE_NAME);
+            orderDAO.createTable();
+            
orderDAO.batchInsert(PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
             PipelineE2EDistSQLFacade distSQLFacade = new 
PipelineE2EDistSQLFacade(containerComposer, new MigrationJobType());
             distSQLFacade.alterPipelineRule();
             addMigrationSourceResource(containerComposer);
@@ -71,7 +65,7 @@ class MariaDBMigrationE2EIT extends AbstractMigrationE2EIT {
             startMigration(containerComposer, SOURCE_TABLE_NAME, 
TARGET_TABLE_NAME);
             String jobId = distSQLFacade.listJobIds().get(0);
             distSQLFacade.waitJobPreparingStageFinished(jobId);
-            containerComposer.sourceExecuteWithLog("INSERT INTO t_order 
(order_id, user_id, status) VALUES ('a1', 1, 'OK')");
+            orderDAO.insert("a1", 1, "OK");
             DataSource jdbcDataSource = 
containerComposer.generateShardingSphereDataSourceFromProxy();
             containerComposer.assertRecordExists(jdbcDataSource, "t_order", 
"a1");
             distSQLFacade.waitJobIncrementalStageFinished(jobId);
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationE2EIT.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationE2EIT.java
index 7d5e32e01a5..45be39c49c4 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationE2EIT.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationE2EIT.java
@@ -20,10 +20,9 @@ package 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.pr
 import com.google.common.collect.ImmutableMap;
 import 
org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJobType;
 import 
org.apache.shardingsphere.database.connector.mysql.type.MySQLDatabaseType;
-import 
org.apache.shardingsphere.infra.algorithm.keygen.uuid.UUIDKeyGenerateAlgorithm;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.PipelineContainerComposer;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.cases.migration.AbstractMigrationE2EIT;
-import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.helper.PipelineCaseHelper;
+import 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.StringPkSmallOrderDAO;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ECondition;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.param.PipelineE2ESettings.PipelineE2EDatabaseSettings;
@@ -35,7 +34,6 @@ import org.junit.jupiter.api.extension.ExtensionContext;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.ArgumentsSource;
 
-import java.sql.Connection;
 import java.sql.SQLException;
 
 import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -54,11 +52,9 @@ class TextPrimaryKeyMigrationE2EIT extends 
AbstractMigrationE2EIT {
     @ArgumentsSource(PipelineE2ETestCaseArgumentsProvider.class)
     void assertTextPrimaryMigrationSuccess(final PipelineTestParameter 
testParam) throws SQLException {
         try (PipelineContainerComposer containerComposer = new 
PipelineContainerComposer(testParam)) {
-            
containerComposer.createSourceOrderTable(getSourceTableName(containerComposer));
-            try (Connection connection = 
containerComposer.getSourceDataSource().getConnection()) {
-                UUIDKeyGenerateAlgorithm keyGenerateAlgorithm = new 
UUIDKeyGenerateAlgorithm();
-                
PipelineCaseHelper.batchInsertOrderRecordsWithGeneralColumns(connection, 
keyGenerateAlgorithm, getSourceTableName(containerComposer), 
PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
-            }
+            StringPkSmallOrderDAO orderDAO = new 
StringPkSmallOrderDAO(containerComposer.getSourceDataSource(), 
containerComposer.getDatabaseType(), getSourceTableName(containerComposer));
+            orderDAO.createTable();
+            
orderDAO.batchInsert(PipelineContainerComposer.TABLE_INIT_ROW_COUNT);
             PipelineE2EDistSQLFacade distSQLFacade = new 
PipelineE2EDistSQLFacade(containerComposer, new MigrationJobType());
             distSQLFacade.alterPipelineRule();
             addMigrationSourceResource(containerComposer);
@@ -66,8 +62,7 @@ class TextPrimaryKeyMigrationE2EIT extends 
AbstractMigrationE2EIT {
             createTargetOrderTableRule(containerComposer);
             startMigration(containerComposer, 
getSourceTableName(containerComposer), TARGET_TABLE_NAME);
             String jobId = distSQLFacade.listJobIds().get(0);
-            containerComposer.sourceExecuteWithLog(
-                    String.format("INSERT INTO %s (order_id,user_id,status) 
VALUES (%s, %s, '%s')", getSourceTableName(containerComposer), "1000000000", 1, 
"afterStop"));
+            orderDAO.insert("1000000000", 1, "afterStop");
             distSQLFacade.waitJobIncrementalStageFinished(jobId);
             distSQLFacade.startCheck(jobId, "DATA_MATCH", 
ImmutableMap.of("chunk-size", "300", "streaming-range-type", "SMALL"));
             distSQLFacade.verifyCheck(jobId);
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/StringPkSmallOrderDAO.java
similarity index 54%
copy from 
test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
copy to 
test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/StringPkSmallOrderDAO.java
index c4824b737d0..0e355f0f927 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/StringPkSmallOrderDAO.java
@@ -15,69 +15,77 @@
  * limitations under the License.
  */
 
-package org.apache.shardingsphere.test.e2e.operation.pipeline.dao.orderitem;
+package org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small;
 
 import lombok.extern.slf4j.Slf4j;
 import 
org.apache.shardingsphere.database.connector.core.spi.DatabaseTypedSPILoader;
 import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
-import 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.orderitem.sqlbuilder.IntPkOrderItemSQLBuilder;
+import 
org.apache.shardingsphere.infra.algorithm.keygen.uuid.UUIDKeyGenerateAlgorithm;
+import 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.StringPkSmallOrderSQLBuilder;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.framework.helper.PipelineCaseHelper;
-import 
org.apache.shardingsphere.test.e2e.operation.pipeline.util.AutoIncrementKeyGenerateAlgorithm;
 import 
org.apache.shardingsphere.test.e2e.operation.pipeline.util.DataSourceExecuteUtils;
 
 import javax.sql.DataSource;
 import java.sql.SQLException;
 import java.util.List;
 
+/**
+ * String PK small order DAO. Small table means the table has few columns.
+ */
 @Slf4j
-public final class IntPkOrderItemDAO {
+public final class StringPkSmallOrderDAO {
     
     private final DataSource dataSource;
     
-    private final IntPkOrderItemSQLBuilder sqlBuilder;
+    private final DatabaseType databaseType;
+    
+    private final StringPkSmallOrderSQLBuilder sqlBuilder;
     
-    public IntPkOrderItemDAO(final DataSource dataSource, final DatabaseType 
databaseType) {
+    private final String tableName;
+    
+    public StringPkSmallOrderDAO(final DataSource dataSource, final 
DatabaseType databaseType, final String tableName) {
         this.dataSource = dataSource;
-        sqlBuilder = 
DatabaseTypedSPILoader.getService(IntPkOrderItemSQLBuilder.class, databaseType);
+        this.databaseType = databaseType;
+        this.sqlBuilder = 
DatabaseTypedSPILoader.getService(StringPkSmallOrderSQLBuilder.class, 
databaseType);
+        this.tableName = tableName;
     }
     
     /**
-     * Create order_item table.
+     * Create order table.
      *
      * @throws SQLException SQL exception
      */
     public void createTable() throws SQLException {
-        String sql = sqlBuilder.buildCreateTableSQL();
-        log.info("Create order_item table SQL: {}", sql);
+        String sql = sqlBuilder.buildCreateTableSQL(tableName);
+        log.info("Create string pk small order table SQL: {}", sql);
         DataSourceExecuteUtils.execute(dataSource, sql);
     }
     
     /**
-     * Batch insert order items.
+     * Batch insert orders.
      *
      * @param insertRows insert rows
      * @throws SQLException SQL exception
      */
     public void batchInsert(final int insertRows) throws SQLException {
-        List<Object[]> params = 
PipelineCaseHelper.generateOrderItemInsertData(new 
AutoIncrementKeyGenerateAlgorithm(), insertRows);
-        String sql = sqlBuilder.buildPreparedInsertSQL();
-        log.info("Batch insert order_item SQL: {}, params size: {}", sql, 
params.size());
-        DataSourceExecuteUtils.execute(dataSource, sql, params);
+        List<Object[]> paramsList = 
PipelineCaseHelper.generateSmallOrderInsertData(new UUIDKeyGenerateAlgorithm(), 
insertRows);
+        String sql = sqlBuilder.buildPreparedInsertSQL(tableName);
+        log.info("Batch insert string pk small order SQL: {}, params list 
size: {}", sql, paramsList.size());
+        DataSourceExecuteUtils.execute(dataSource, sql, paramsList);
     }
     
     /**
-     * Insert order item.
+     * Insert order.
      *
-     * @param itemId item id
-     * @param orderId order id
-     * @param userId user id
+     * @param orderId order ID
+     * @param userId user ID
      * @param status status
      * @throws SQLException SQL exception
      */
-    public void insert(final long itemId, final long orderId, final int 
userId, final String status) throws SQLException {
-        String sql = sqlBuilder.buildPreparedInsertSQL();
-        Object[] params = new Object[]{itemId, orderId, userId, status};
-        log.info("Insert order_item SQL: {}, params: {}", sql, params);
+    public void insert(final String orderId, final int userId, final String 
status) throws SQLException {
+        String sql = sqlBuilder.buildPreparedInsertSQL(tableName);
+        Object[] params = new Object[]{orderId, userId, status};
+        log.info("Insert string pk small order SQL: {}, params: {}", sql, 
params);
         DataSourceExecuteUtils.execute(dataSource, sql, params);
     }
 }
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/MySQLStringPkSmallOrderSQLBuilder.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/MySQLStringPkSmallOrderSQLBuilder.java
new file mode 100644
index 00000000000..ca68a42234f
--- /dev/null
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/MySQLStringPkSmallOrderSQLBuilder.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder;
+
+public final class MySQLStringPkSmallOrderSQLBuilder implements 
StringPkSmallOrderSQLBuilder {
+    
+    @Override
+    public String buildCreateTableSQL(final String tableName) {
+        return String.format("""
+                CREATE TABLE `%s` (
+                `order_id` varchar(255) NOT NULL COMMENT 'pk id',
+                `user_id` INT NOT NULL,
+                `status` varchar(255) NULL,
+                `t_unsigned_int` int UNSIGNED NULL,
+                PRIMARY KEY ( `order_id` ),
+                INDEX ( `user_id` )
+                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 
COLLATE=utf8mb4_general_ci
+                """, tableName);
+    }
+    
+    @Override
+    public String getDatabaseType() {
+        return "MySQL";
+    }
+}
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/OpenGaussStringPkSmallOrderSQLBuilder.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/OpenGaussStringPkSmallOrderSQLBuilder.java
new file mode 100644
index 00000000000..c9db3c83390
--- /dev/null
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/OpenGaussStringPkSmallOrderSQLBuilder.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder;
+
+public final class OpenGaussStringPkSmallOrderSQLBuilder implements 
StringPkSmallOrderSQLBuilder {
+    
+    @Override
+    public String buildCreateTableSQL(final String tableName) {
+        return String.format("""
+                CREATE TABLE %s (
+                order_id varchar(255) NOT NULL,
+                user_id int NOT NULL,
+                status varchar(255) NULL,
+                t_unsigned_int int NULL,
+                PRIMARY KEY (order_id)
+                )
+                """, tableName);
+    }
+    
+    @Override
+    public String getDatabaseType() {
+        return "openGauss";
+    }
+}
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/PostgreSQLStringPkSmallOrderSQLBuilder.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/PostgreSQLStringPkSmallOrderSQLBuilder.java
new file mode 100644
index 00000000000..d730ee12ddc
--- /dev/null
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/PostgreSQLStringPkSmallOrderSQLBuilder.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder;
+
+public final class PostgreSQLStringPkSmallOrderSQLBuilder implements 
StringPkSmallOrderSQLBuilder {
+    
+    @Override
+    public String buildCreateTableSQL(final String tableName) {
+        return String.format("""
+                CREATE TABLE %s (
+                order_id varchar(255) NOT NULL,
+                user_id int NOT NULL,
+                status varchar(255) NULL,
+                t_unsigned_int int NULL,
+                PRIMARY KEY (order_id)
+                )
+                """, tableName);
+    }
+    
+    @Override
+    public String getDatabaseType() {
+        return "PostgreSQL";
+    }
+}
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/StringPkSmallOrderSQLBuilder.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/StringPkSmallOrderSQLBuilder.java
new file mode 100644
index 00000000000..0331ab37fbf
--- /dev/null
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/order/small/sqlbuilder/StringPkSmallOrderSQLBuilder.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder;
+
+import org.apache.shardingsphere.database.connector.core.spi.DatabaseTypedSPI;
+
+public interface StringPkSmallOrderSQLBuilder extends DatabaseTypedSPI {
+    
+    /**
+     * Build create table SQL.
+     *
+     * @param tableName table name
+     * @return create table SQL
+     */
+    String buildCreateTableSQL(String tableName);
+    
+    /**
+     * Build prepared insert SQL.
+     *
+     * @param tableName table name
+     * @return prepared insert SQL
+     */
+    default String buildPreparedInsertSQL(final String tableName) {
+        return String.format("INSERT INTO %s (order_id, user_id, status) 
VALUES (?, ?, ?)", tableName);
+    }
+}
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
index c4824b737d0..7db401973fc 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/dao/orderitem/IntPkOrderItemDAO.java
@@ -55,14 +55,14 @@ public final class IntPkOrderItemDAO {
     /**
      * Batch insert order items.
      *
-     * @param insertRows insert rows
+     * @param recordCount record count
      * @throws SQLException SQL exception
      */
-    public void batchInsert(final int insertRows) throws SQLException {
-        List<Object[]> params = 
PipelineCaseHelper.generateOrderItemInsertData(new 
AutoIncrementKeyGenerateAlgorithm(), insertRows);
+    public void batchInsert(final int recordCount) throws SQLException {
+        List<Object[]> paramsList = 
PipelineCaseHelper.generateOrderItemInsertData(new 
AutoIncrementKeyGenerateAlgorithm(), recordCount);
         String sql = sqlBuilder.buildPreparedInsertSQL();
-        log.info("Batch insert order_item SQL: {}, params size: {}", sql, 
params.size());
-        DataSourceExecuteUtils.execute(dataSource, sql, params);
+        log.info("Batch insert order_item SQL: {}, params list size: {}", sql, 
paramsList.size());
+        DataSourceExecuteUtils.execute(dataSource, sql, paramsList);
     }
     
     /**
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/framework/helper/PipelineCaseHelper.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/framework/helper/PipelineCaseHelper.java
index 8401957e287..88009cf719b 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/framework/helper/PipelineCaseHelper.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/framework/helper/PipelineCaseHelper.java
@@ -73,21 +73,40 @@ public final class PipelineCaseHelper {
         return Pair.of(orderData, orderItemData);
     }
     
+    /**
+     * Generate small order insert data.
+     *
+     * @param keyGenerateAlgorithm key generate algorithm
+     * @param recordCount record count
+     * @return small order insert data
+     */
+    public static List<Object[]> generateSmallOrderInsertData(final 
KeyGenerateAlgorithm keyGenerateAlgorithm, final int recordCount) {
+        List<Object[]> result = new ArrayList<>(recordCount);
+        for (int i = 0; i < recordCount; i++) {
+            Object[] params = new Object[3];
+            params[0] = 
keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next();
+            params[1] = ThreadLocalRandom.current().nextInt(0, 6);
+            params[2] = "OK";
+            result.add(params);
+        }
+        return result;
+    }
+    
     /**
      * Generate order insert data.
      *
      * @param databaseType database type
      * @param keyGenerateAlgorithm key generate algorithm
-     * @param insertRows insert rows
+     * @param recordCount record count
      * @return order insert data
      * @throws UnsupportedOperationException Unsupported operation exception
      */
     // TODO Refactor to use SPI
-    public static List<Object[]> generateOrderInsertData(final DatabaseType 
databaseType, final KeyGenerateAlgorithm keyGenerateAlgorithm, final int 
insertRows) {
-        List<Object[]> result = new ArrayList<>(insertRows);
+    public static List<Object[]> generateOrderInsertData(final DatabaseType 
databaseType, final KeyGenerateAlgorithm keyGenerateAlgorithm, final int 
recordCount) {
+        List<Object[]> result = new ArrayList<>(recordCount);
         String emojiText = "☠️x☺️x✋x☹️";
         if (databaseType instanceof MySQLDatabaseType || databaseType 
instanceof MariaDBDatabaseType) {
-            for (int i = 0; i < insertRows; i++) {
+            for (int i = 0; i < recordCount; i++) {
                 int randomInt = generateInt(-100, 100);
                 Object orderId = 
keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next();
                 int randomUnsignedInt = generateInt(0, 100);
@@ -101,7 +120,7 @@ public final class PipelineCaseHelper {
             return result;
         }
         if (databaseType instanceof PostgreSQLDatabaseType) {
-            for (int i = 0; i < insertRows; i++) {
+            for (int i = 0; i < recordCount; i++) {
                 Object orderId = 
keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next();
                 result.add(new Object[]{orderId, generateInt(0, 100), 
generateString(6), generateInt(-128, 127),
                         BigDecimal.valueOf(generateDouble()), true, 
"bytea".getBytes(), generateString(2), generateString(2), generateFloat(), 
generateDouble(),
@@ -111,7 +130,7 @@ public final class PipelineCaseHelper {
             return result;
         }
         if (databaseType instanceof OpenGaussDatabaseType) {
-            for (int i = 0; i < insertRows; i++) {
+            for (int i = 0; i < recordCount; i++) {
                 Object orderId = 
keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next();
                 byte[] bytesValue = {Byte.MIN_VALUE, -1, 0, 1, Byte.MAX_VALUE};
                 result.add(new Object[]{orderId, generateInt(0, 1000), 
"'status'" + i, generateInt(-1000, 9999), generateInt(0, 100), generateFloat(), 
generateDouble(),
@@ -173,12 +192,12 @@ public final class PipelineCaseHelper {
      * Generate order item insert data.
      *
      * @param keyGenerateAlgorithm key generate algorithm
-     * @param insertRows insert rows
+     * @param recordCount record count
      * @return order item insert data
      */
-    public static List<Object[]> generateOrderItemInsertData(final 
KeyGenerateAlgorithm keyGenerateAlgorithm, final int insertRows) {
-        List<Object[]> result = new ArrayList<>(insertRows);
-        for (int i = 0; i < insertRows; i++) {
+    public static List<Object[]> generateOrderItemInsertData(final 
KeyGenerateAlgorithm keyGenerateAlgorithm, final int recordCount) {
+        List<Object[]> result = new ArrayList<>(recordCount);
+        for (int i = 0; i < recordCount; i++) {
             Object orderId = 
keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next();
             int userId = generateInt(0, 100);
             result.add(new 
Object[]{keyGenerateAlgorithm.generateKeys(mock(AlgorithmSQLContext.class), 
1).iterator().next(), orderId, userId, "SUCCESS"});
@@ -195,6 +214,7 @@ public final class PipelineCaseHelper {
      * @param recordCount record count
      * @throws SQLException sql exception
      */
+    // TODO Delete
     public static void batchInsertOrderRecordsWithGeneralColumns(final 
Connection connection, final KeyGenerateAlgorithm keyGenerateAlgorithm, final 
String tableName,
                                                                  final int 
recordCount) throws SQLException {
         log.info("init data begin: {}", LocalDateTime.now());
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/util/DataSourceExecuteUtils.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/util/DataSourceExecuteUtils.java
index 40df562db8b..fdb8cd91758 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/util/DataSourceExecuteUtils.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/util/DataSourceExecuteUtils.java
@@ -75,16 +75,16 @@ public final class DataSourceExecuteUtils {
      *
      * @param dataSource data source
      * @param sql SQL
-     * @param params parameters
+     * @param paramsList parameters
      * @throws SQLException SQL exception
      */
     // TODO Rename executeBatch
-    public static void execute(final DataSource dataSource, final String sql, 
final List<Object[]> params) throws SQLException {
+    public static void execute(final DataSource dataSource, final String sql, 
final List<Object[]> paramsList) throws SQLException {
         try (Connection connection = dataSource.getConnection()) {
             PreparedStatement preparedStatement = 
connection.prepareStatement(sql);
             int batchSize = 1000;
             int count = 0;
-            for (Object[] each : params) {
+            for (Object[] each : paramsList) {
                 for (int i = 0; i < each.length; i++) {
                     preparedStatement.setObject(i + 1, each[i]);
                 }
diff --git 
a/test/e2e/operation/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.StringPkSmallOrderSQLBuilder
 
b/test/e2e/operation/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.StringPkSmallOrderSQLBuilder
new file mode 100644
index 00000000000..c30eb2d8f3c
--- /dev/null
+++ 
b/test/e2e/operation/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.StringPkSmallOrderSQLBuilder
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.MySQLStringPkSmallOrderSQLBuilder
+org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.PostgreSQLStringPkSmallOrderSQLBuilder
+org.apache.shardingsphere.test.e2e.operation.pipeline.dao.order.small.sqlbuilder.OpenGaussStringPkSmallOrderSQLBuilder


Reply via email to