This is an automated email from the ASF dual-hosted git repository.
zhonghongsheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 8a7b592a6d8 Add CDC E2E test case at pipeline (#24376)
8a7b592a6d8 is described below
commit 8a7b592a6d8acdd3b2fe298daabd3c92eb0f98a7
Author: Xinze Guo <[email protected]>
AuthorDate: Fri Mar 3 17:01:45 2023 +0800
Add CDC E2E test case at pipeline (#24376)
* Add CDC E2E test case
* Remove unused thread sleep
* Remove float double assert, because of variable-precision problem will
occur sometimes
* Refactor data check method
* Fix loss timestamp precision at CDC
* Add TODO and improve
* Simplify the code
---
.../pipeline/cdc/client/util/AnyValueConvert.java | 5 +-
.../pipeline/cdc/client/example/Bootstrap.java | 3 +
.../data/pipeline/cdc/api/job/type/CDCJobType.java | 2 +-
.../pipeline/cdc/util/ColumnValueConvertUtil.java | 4 +
.../impl/ShardingSphereProxyClusterContainer.java | 6 +-
test/e2e/pipeline/pom.xml | 10 +
.../pipeline/cases/base/PipelineBaseE2EIT.java | 66 +++++--
.../test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java | 219 +++++++++++++++++++++
.../pipeline/cases/task/MySQLIncrementTask.java | 12 +-
.../cases/task/PostgreSQLIncrementTask.java | 14 --
.../container/compose/BaseContainerComposer.java | 7 +
.../container/compose/DockerContainerComposer.java | 7 +-
.../container/compose/NativeContainerComposer.java | 5 +
.../src/test/resources/env/mysql/mysql8/my.cnf | 1 +
.../src/test/resources/env/mysql/server-5.yaml | 1 +
.../src/test/resources/env/mysql/server-8.yaml | 1 +
.../src/test/resources/env/opengauss/server.yaml | 1 +
.../test/resources/env/postgresql/postgresql.conf | 4 +-
.../src/test/resources/env/postgresql/server.yaml | 1 +
.../test/resources/env/scenario/general/mysql.xml | 2 +-
.../resources/env/scenario/general/postgresql.xml | 8 +-
21 files changed, 322 insertions(+), 57 deletions(-)
diff --git
a/kernel/data-pipeline/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/util/AnyValueConvert.java
b/kernel/data-pipeline/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/util/AnyValueConvert.java
index afc52c8c555..77811c757de 100644
---
a/kernel/data-pipeline/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/util/AnyValueConvert.java
+++
b/kernel/data-pipeline/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/util/AnyValueConvert.java
@@ -37,7 +37,6 @@ import
org.apache.shardingsphere.data.pipeline.cdc.protocol.response.NullValue;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Timestamp;
-import java.time.Instant;
import java.time.LocalTime;
/**
@@ -105,6 +104,8 @@ public final class AnyValueConvert {
}
private static Timestamp converProtobufTimestamp(final
com.google.protobuf.Timestamp timestamp) {
- return new Timestamp(Instant.ofEpochSecond(timestamp.getSeconds(),
timestamp.getNanos()).toEpochMilli());
+ Timestamp result = new Timestamp(timestamp.getSeconds() * 1000);
+ result.setNanos(timestamp.getNanos());
+ return result;
}
}
diff --git
a/kernel/data-pipeline/cdc/client/src/test/java/org/apache/shardingsphere/data/pipeline/cdc/client/example/Bootstrap.java
b/kernel/data-pipeline/cdc/client/src/test/java/org/apache/shardingsphere/data/pipeline/cdc/client/example/Bootstrap.java
index a6b51537e75..cb356c72c46 100644
---
a/kernel/data-pipeline/cdc/client/src/test/java/org/apache/shardingsphere/data/pipeline/cdc/client/example/Bootstrap.java
+++
b/kernel/data-pipeline/cdc/client/src/test/java/org/apache/shardingsphere/data/pipeline/cdc/client/example/Bootstrap.java
@@ -32,6 +32,9 @@ public final class Bootstrap {
* @param args args
*/
public static void main(final String[] args) {
+ // Pay attention to the time zone, to avoid the problem of incorrect
time zone, it is best to ensure that the time zone of the program is consistent
with the time zone of the database server
+ // and mysql-connector-java 5.x version will ignore serverTimezone
jdbc parameter and use the default time zone in the program
+ // TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
ImportDataSourceParameter importDataSourceParam = new
ImportDataSourceParameter("jdbc:opengauss://localhost:5432/cdc_db?stringtype=unspecified",
"gaussdb", "Root@123");
StartCDCClientParameter parameter = new
StartCDCClientParameter(importDataSourceParam);
parameter.setAddress("127.0.0.1");
diff --git
a/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/job/type/CDCJobType.java
b/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/job/type/CDCJobType.java
index 3cbfed081b0..9ee97fea0e3 100644
---
a/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/job/type/CDCJobType.java
+++
b/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/job/type/CDCJobType.java
@@ -27,6 +27,6 @@ public final class CDCJobType extends AbstractJobType {
public static final String TYPE_CODE = "03";
public CDCJobType() {
- super("CDC", TYPE_CODE);
+ super("STREAMING", TYPE_CODE);
}
}
diff --git
a/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/util/ColumnValueConvertUtil.java
b/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/util/ColumnValueConvertUtil.java
index b84cf56906b..efca9096544 100644
---
a/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/util/ColumnValueConvertUtil.java
+++
b/kernel/data-pipeline/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/util/ColumnValueConvertUtil.java
@@ -138,6 +138,10 @@ public final class ColumnValueConvertUtil {
}
private static com.google.protobuf.Timestamp
converToProtobufTimestamp(final Date timestamp) {
+ if (timestamp instanceof Timestamp) {
+ Timestamp value = (Timestamp) timestamp;
+ return
com.google.protobuf.Timestamp.newBuilder().setSeconds(value.getTime() /
1000).setNanos(value.getNanos()).build();
+ }
long millis = timestamp.getTime();
return com.google.protobuf.Timestamp.newBuilder().setSeconds(millis /
1000).setNanos((int) ((millis % 1000) * 1000000)).build();
}
diff --git
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
index 495a3b8e196..73ee519b670 100644
---
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
+++
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
@@ -18,12 +18,12 @@
package org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.impl;
import org.apache.shardingsphere.infra.database.type.DatabaseType;
+import
org.apache.shardingsphere.test.e2e.env.container.atomic.DockerITContainer;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.AdapterContainer;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.config.AdaptorContainerConfiguration;
+import
org.apache.shardingsphere.test.e2e.env.container.atomic.constants.ProxyContainerConstants;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.util.StorageContainerUtil;
import
org.apache.shardingsphere.test.e2e.env.container.wait.JdbcConnectionWaitStrategy;
-import
org.apache.shardingsphere.test.e2e.env.container.atomic.DockerITContainer;
-import
org.apache.shardingsphere.test.e2e.env.container.atomic.constants.ProxyContainerConstants;
import org.apache.shardingsphere.test.e2e.env.runtime.DataSourceEnvironment;
import org.testcontainers.containers.BindMode;
@@ -65,7 +65,7 @@ public final class ShardingSphereProxyClusterContainer
extends DockerITContainer
@Override
protected void configure() {
- withExposedPorts(3307, 3308);
+ withExposedPorts(3307, 33071, 3308);
mountConfigurationFiles();
setWaitStrategy(new JdbcConnectionWaitStrategy(() ->
DriverManager.getConnection(DataSourceEnvironment.getURL(databaseType,
getHost(), getMappedPort(3307),
config.getProxyDataSourceName()), ProxyContainerConstants.USERNAME,
ProxyContainerConstants.PASSWORD)));
diff --git a/test/e2e/pipeline/pom.xml b/test/e2e/pipeline/pom.xml
index 44eab611189..1186e431ee1 100644
--- a/test/e2e/pipeline/pom.xml
+++ b/test/e2e/pipeline/pom.xml
@@ -47,6 +47,11 @@
<artifactId>shardingsphere-jdbc-core</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.shardingsphere</groupId>
+ <artifactId>shardingsphere-data-pipeline-cdc-client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.apache.shardingsphere</groupId>
@@ -82,6 +87,11 @@
<artifactId>postgresql</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opengauss</groupId>
+ <artifactId>opengauss-jdbc</artifactId>
+ </dependency>
+
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/base/PipelineBaseE2EIT.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/base/PipelineBaseE2EIT.java
index 081e11f24b4..97de6d528fb 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/base/PipelineBaseE2EIT.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/base/PipelineBaseE2EIT.java
@@ -24,8 +24,11 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.data.pipeline.api.job.JobStatus;
import org.apache.shardingsphere.data.pipeline.core.util.ThreadUtil;
import org.apache.shardingsphere.data.pipeline.spi.job.JobType;
+import
org.apache.shardingsphere.driver.api.yaml.YamlShardingSphereDataSourceFactory;
import org.apache.shardingsphere.infra.database.metadata.url.JdbcUrlAppender;
import org.apache.shardingsphere.infra.database.type.DatabaseType;
+import org.apache.shardingsphere.infra.util.yaml.YamlEngine;
+import org.apache.shardingsphere.infra.yaml.config.pojo.YamlRootConfiguration;
import
org.apache.shardingsphere.test.e2e.data.pipeline.command.ExtraSQLCommand;
import
org.apache.shardingsphere.test.e2e.data.pipeline.env.PipelineE2EEnvironment;
import
org.apache.shardingsphere.test.e2e.data.pipeline.env.enums.PipelineEnvTypeEnum;
@@ -89,6 +92,8 @@ public abstract class PipelineBaseE2EIT {
protected static final int TABLE_INIT_ROW_COUNT = 3000;
+ private static final String REGISTER_STORAGE_UNIT_SQL = "REGISTER STORAGE
UNIT ${ds} ( URL='${url}', USER='${user}', PASSWORD='${password}')";
+
@Rule
@Getter(AccessLevel.NONE)
public PipelineWatcher pipelineWatcher;
@@ -143,6 +148,17 @@ public abstract class PipelineBaseE2EIT {
cleanUpDataSource();
}
+ protected String appendExtraParam(final String jdbcUrl) {
+ String result = jdbcUrl;
+ if (DatabaseTypeUtil.isMySQL(getDatabaseType())) {
+ result = new JdbcUrlAppender().appendQueryProperties(jdbcUrl,
PropertiesBuilder.build(new Property("rewriteBatchedStatements",
Boolean.TRUE.toString())));
+ }
+ if (DatabaseTypeUtil.isPostgreSQL(getDatabaseType()) ||
DatabaseTypeUtil.isOpenGauss(getDatabaseType())) {
+ result = new JdbcUrlAppender().appendQueryProperties(jdbcUrl,
PropertiesBuilder.build(new Property("stringtype", "unspecified")));
+ }
+ return result;
+ }
+
private void cleanUpProxyDatabase(final Connection connection) {
if (PipelineEnvTypeEnum.NATIVE != ENV.getItEnvType()) {
return;
@@ -161,8 +177,12 @@ public abstract class PipelineBaseE2EIT {
}
String jobTypeName = jobType.getTypeName();
List<Map<String, Object>> jobList;
- try (ResultSet resultSet =
connection.createStatement().executeQuery(String.format("SHOW %s LIST",
jobTypeName))) {
+ try {
+ ResultSet resultSet =
connection.createStatement().executeQuery(String.format("SHOW %s LIST",
jobTypeName));
jobList = transformResultSetToList(resultSet);
+ } catch (final SQLException ex) {
+ log.warn("{} execute failed, message {}", String.format("SHOW %s
LIST", jobTypeName), ex.getMessage());
+ return;
}
if (jobList.isEmpty()) {
return;
@@ -195,19 +215,17 @@ public abstract class PipelineBaseE2EIT {
ThreadUtil.sleep(2, TimeUnit.SECONDS);
}
- protected void addResource(final String distSQL) throws SQLException {
- proxyExecuteWithLog(distSQL, 2);
+ protected void registerStorageUnit(final String storageUnitName) throws
SQLException {
+ String registerStorageUnitTemplate =
REGISTER_STORAGE_UNIT_SQL.replace("${ds}", storageUnitName)
+ .replace("${user}", getUsername())
+ .replace("${password}", getPassword())
+ .replace("${url}",
appendExtraParam(getActualJdbcUrlTemplate(storageUnitName, true)));
+ proxyExecuteWithLog(registerStorageUnitTemplate, 2);
}
- protected String appendExtraParam(final String jdbcUrl) {
- String result = jdbcUrl;
- if (DatabaseTypeUtil.isMySQL(getDatabaseType())) {
- result = new JdbcUrlAppender().appendQueryProperties(jdbcUrl,
PropertiesBuilder.build(new Property("rewriteBatchedStatements",
Boolean.TRUE.toString())));
- }
- if (DatabaseTypeUtil.isPostgreSQL(getDatabaseType()) ||
DatabaseTypeUtil.isOpenGauss(getDatabaseType())) {
- result = new JdbcUrlAppender().appendQueryProperties(jdbcUrl,
PropertiesBuilder.build(new Property("stringtype", "unspecified")));
- }
- return result;
+ // TODO Use registerStorageUnit instead, and remove the method
+ protected void addResource(final String distSQL) throws SQLException {
+ proxyExecuteWithLog(distSQL, 2);
}
protected String getActualJdbcUrlTemplate(final String databaseName, final
boolean isInContainer, final int storageContainerIndex) {
@@ -280,9 +298,7 @@ public abstract class PipelineBaseE2EIT {
while (retryNumber <= 3) {
try (Connection connection = proxyDataSource.getConnection()) {
ResultSet resultSet =
connection.createStatement().executeQuery(sql);
- List<Map<String, Object>> result =
transformResultSetToList(resultSet);
- log.info("proxy query for list, sql: {}, result: {}", sql,
result);
- return result;
+ return transformResultSetToList(resultSet);
} catch (final SQLException ex) {
log.error("data access error", ex);
}
@@ -292,7 +308,7 @@ public abstract class PipelineBaseE2EIT {
throw new RuntimeException("can't get result from proxy");
}
- private List<Map<String, Object>> transformResultSetToList(final ResultSet
resultSet) throws SQLException {
+ protected List<Map<String, Object>> transformResultSetToList(final
ResultSet resultSet) throws SQLException {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
int columns = resultSetMetaData.getColumnCount();
List<Map<String, Object>> result = new ArrayList<>();
@@ -375,4 +391,22 @@ public abstract class PipelineBaseE2EIT {
int recordsCount = getTargetTableRecordsCount(tableName);
assertTrue("actual count " + recordsCount, recordsCount >
tableInitRows);
}
+
+ // TODO proxy support for some fields still needs to be optimized, such as
binary of MySQL, after these problems are optimized, Proxy dataSource can be
used.
+ protected DataSource generateShardingSphereDataSourceFromProxy() throws
SQLException {
+ String dataSourceConfigText = queryForListWithLog("EXPORT DATABASE
CONFIGURATION").get(0).get("result").toString();
+ YamlRootConfiguration rootConfig =
YamlEngine.unmarshal(dataSourceConfigText, YamlRootConfiguration.class);
+ if (PipelineEnvTypeEnum.DOCKER == ENV.getItEnvType()) {
+ DockerStorageContainer storageContainer =
((DockerContainerComposer) containerComposer).getStorageContainers().get(0);
+ String sourceUrl = String.join(":",
storageContainer.getNetworkAliases().get(0),
Integer.toString(storageContainer.getExposedPort()));
+ String targetUrl = String.join(":", storageContainer.getHost(),
Integer.toString(storageContainer.getMappedPort()));
+ for (Map<String, Object> each :
rootConfig.getDataSources().values()) {
+ each.put("url",
each.get("url").toString().replaceFirst(sourceUrl, targetUrl));
+ }
+ }
+ for (Map<String, Object> each : rootConfig.getDataSources().values()) {
+ each.put("dataSourceClassName",
"com.zaxxer.hikari.HikariDataSource");
+ }
+ return
YamlShardingSphereDataSourceFactory.createDataSourceWithoutCache(rootConfig);
+ }
}
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
new file mode 100644
index 00000000000..7ee151ac392
--- /dev/null
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.test.e2e.data.pipeline.cases.cdc;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.tuple.Pair;
+import
org.apache.shardingsphere.data.pipeline.api.check.consistency.DataConsistencyCheckResult;
+import
org.apache.shardingsphere.data.pipeline.api.datasource.PipelineDataSourceWrapper;
+import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaName;
+import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName;
+import org.apache.shardingsphere.data.pipeline.api.metadata.TableName;
+import
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData;
+import
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData;
+import org.apache.shardingsphere.data.pipeline.cdc.api.job.type.CDCJobType;
+import org.apache.shardingsphere.data.pipeline.cdc.client.CDCClient;
+import
org.apache.shardingsphere.data.pipeline.cdc.client.parameter.ImportDataSourceParameter;
+import
org.apache.shardingsphere.data.pipeline.cdc.client.parameter.StartCDCClientParameter;
+import
org.apache.shardingsphere.data.pipeline.cdc.protocol.request.StreamDataRequestBody.SchemaTable;
+import
org.apache.shardingsphere.data.pipeline.core.check.consistency.ConsistencyCheckJobItemProgressContext;
+import
org.apache.shardingsphere.data.pipeline.core.check.consistency.SingleTableInventoryDataConsistencyChecker;
+import
org.apache.shardingsphere.data.pipeline.core.check.consistency.algorithm.DataMatchDataConsistencyCalculateAlgorithm;
+import
org.apache.shardingsphere.data.pipeline.core.metadata.loader.StandardPipelineTableMetaDataLoader;
+import org.apache.shardingsphere.infra.database.type.dialect.MySQLDatabaseType;
+import
org.apache.shardingsphere.infra.database.type.dialect.OpenGaussDatabaseType;
+import
org.apache.shardingsphere.sharding.algorithm.keygen.SnowflakeKeyGenerateAlgorithm;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.cases.base.PipelineBaseE2EIT;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.cases.task.MySQLIncrementTask;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.cases.task.PostgreSQLIncrementTask;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.env.enums.PipelineEnvTypeEnum;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.framework.helper.PipelineCaseHelper;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.framework.param.PipelineTestParameter;
+import
org.apache.shardingsphere.test.e2e.data.pipeline.util.DataSourceExecuteUtil;
+import
org.apache.shardingsphere.test.e2e.env.container.atomic.constants.ProxyContainerConstants;
+import
org.apache.shardingsphere.test.e2e.env.container.atomic.util.StorageContainerUtil;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.testcontainers.shaded.org.awaitility.Awaitility;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.time.LocalDateTime;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+/**
+ * MySQL CDC E2E IT.
+ */
+@RunWith(Parameterized.class)
+@Slf4j
+public final class CDCE2EIT extends PipelineBaseE2EIT {
+
+ private static final String CREATE_SHARDING_RULE_SQL =
String.format("CREATE SHARDING TABLE RULE t_order("
+ + "STORAGE_UNITS(%s,%s),"
+ + "SHARDING_COLUMN=user_id,"
+ + "TYPE(NAME='hash_mod',PROPERTIES('sharding-count'='4')),"
+ + "KEY_GENERATE_STRATEGY(COLUMN=order_id,TYPE(NAME='snowflake'))"
+ + ")", DS_0, DS_1);
+
+ private final ExecutorService executor =
Executors.newSingleThreadExecutor();
+
+ public CDCE2EIT(final PipelineTestParameter testParam) {
+ super(testParam);
+ }
+
+ @Parameters(name = "{0}")
+ public static Collection<PipelineTestParameter> getTestParameters() {
+ Collection<PipelineTestParameter> result = new LinkedList<>();
+ if (PipelineBaseE2EIT.ENV.getItEnvType() == PipelineEnvTypeEnum.NONE) {
+ return result;
+ }
+ MySQLDatabaseType mysqlDatabaseType = new MySQLDatabaseType();
+ for (String each :
PipelineBaseE2EIT.ENV.listStorageContainerImages(mysqlDatabaseType)) {
+ result.add(new PipelineTestParameter(mysqlDatabaseType, each,
"env/scenario/general/mysql.xml"));
+ }
+ OpenGaussDatabaseType openGaussDatabaseType = new
OpenGaussDatabaseType();
+ for (String each :
PipelineBaseE2EIT.ENV.listStorageContainerImages(openGaussDatabaseType)) {
+ result.add(new PipelineTestParameter(openGaussDatabaseType, each,
"env/scenario/general/postgresql.xml"));
+ }
+ return result;
+ }
+
+ @Override
+ protected String getSourceTableOrderName() {
+ return "t_order";
+ }
+
+ @Test
+ public void assertCDCDataImportSuccess() throws SQLException,
InterruptedException {
+ // make sure the program time zone same with the database server at CI.
+ TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+ initEnvironment(getDatabaseType(), new CDCJobType());
+ for (String each : Arrays.asList(DS_0, DS_1)) {
+ registerStorageUnit(each);
+ }
+ createOrderTableRule();
+ try (Connection connection = getProxyDataSource().getConnection()) {
+ initSchemaAndTable(connection);
+ }
+ Pair<List<Object[]>, List<Object[]>> dataPair =
PipelineCaseHelper.generateFullInsertData(getDatabaseType(), 20);
+ log.info("init data begin: {}", LocalDateTime.now());
+ DataSourceExecuteUtil.execute(getProxyDataSource(),
getExtraSQLCommand().getFullInsertOrder(getSourceTableOrderName()),
dataPair.getLeft());
+ log.info("init data end: {}", LocalDateTime.now());
+ try (Connection connection =
DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false),
getUsername(), getPassword())) {
+ initSchemaAndTable(connection);
+ }
+ startCDCClient();
+ Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(1,
TimeUnit.SECONDS).until(() -> !queryForListWithLog("SHOW STREAMING
LIST").isEmpty());
+ if (getDatabaseType() instanceof MySQLDatabaseType) {
+ startIncrementTask(new MySQLIncrementTask(getProxyDataSource(),
getSourceTableOrderName(), new SnowflakeKeyGenerateAlgorithm(), 20));
+ } else {
+ startIncrementTask(new
PostgreSQLIncrementTask(getProxyDataSource(), PipelineBaseE2EIT.SCHEMA_NAME,
getSourceTableOrderName(), 20));
+ }
+ getIncreaseTaskThread().join(10000);
+ List<Map<String, Object>> actualProxyList;
+ try (Connection connection = getProxyDataSource().getConnection()) {
+ ResultSet resultSet =
connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER
BY order_id ASC", getOrderTableNameWithSchema()));
+ actualProxyList = transformResultSetToList(resultSet);
+ }
+ Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(2,
TimeUnit.SECONDS).until(() ->
listOrderRecords(getOrderTableNameWithSchema()).size() ==
actualProxyList.size());
+ List<Map<String, Object>> actualImportedList =
listOrderRecords(getOrderTableNameWithSchema());
+ assertThat(actualProxyList.size(), is(actualImportedList.size()));
+ SchemaTableName schemaTableName = getDatabaseType().isSchemaAvailable()
+ ? new SchemaTableName(new
SchemaName(PipelineBaseE2EIT.SCHEMA_NAME), new
TableName(getSourceTableOrderName()))
+ : new SchemaTableName(new SchemaName(null), new
TableName(getSourceTableOrderName()));
+ PipelineDataSourceWrapper targetDataSource = new
PipelineDataSourceWrapper(StorageContainerUtil.generateDataSource(getActualJdbcUrlTemplate(DS_4,
false), getUsername(), getPassword()),
+ getDatabaseType());
+ PipelineDataSourceWrapper sourceDataSource = new
PipelineDataSourceWrapper(generateShardingSphereDataSourceFromProxy(),
getDatabaseType());
+ StandardPipelineTableMetaDataLoader metaDataLoader = new
StandardPipelineTableMetaDataLoader(targetDataSource);
+ PipelineTableMetaData tableMetaData =
metaDataLoader.getTableMetaData(PipelineBaseE2EIT.SCHEMA_NAME, "t_order");
+ PipelineColumnMetaData primaryKeyMetaData =
tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0));
+ ConsistencyCheckJobItemProgressContext progressContext = new
ConsistencyCheckJobItemProgressContext("", 0);
+ SingleTableInventoryDataConsistencyChecker checker = new
SingleTableInventoryDataConsistencyChecker("", sourceDataSource,
targetDataSource, schemaTableName, schemaTableName,
+ primaryKeyMetaData, metaDataLoader, null, progressContext);
+ DataConsistencyCheckResult checkResult = checker.check(new
DataMatchDataConsistencyCalculateAlgorithm());
+ assertTrue(checkResult.isMatched());
+ }
+
+ private void createOrderTableRule() throws SQLException {
+ proxyExecuteWithLog(CREATE_SHARDING_RULE_SQL, 2);
+ }
+
+ private void initSchemaAndTable(final Connection connection) throws
SQLException {
+ if (getDatabaseType().isSchemaAvailable()) {
+ String sql = String.format("CREATE SCHEMA %s",
PipelineBaseE2EIT.SCHEMA_NAME);
+ log.info("create schema sql: {}", sql);
+ connection.createStatement().execute(sql);
+ }
+ String sql =
getExtraSQLCommand().getCreateTableOrder(getSourceTableOrderName());
+ log.info("create table sql: {}", sql);
+ connection.createStatement().execute(sql);
+ }
+
+ private void startCDCClient() {
+ ImportDataSourceParameter importDataSourceParam = new
ImportDataSourceParameter(appendExtraParam(getActualJdbcUrlTemplate(DS_4,
false, 0)), getUsername(), getPassword());
+ StartCDCClientParameter parameter = new
StartCDCClientParameter(importDataSourceParam);
+ parameter.setAddress("localhost");
+ parameter.setPort(getContainerComposer().getProxyCDCPort());
+ parameter.setUsername(ProxyContainerConstants.USERNAME);
+ parameter.setPassword(ProxyContainerConstants.PASSWORD);
+ parameter.setDatabase("sharding_db");
+ // TODO add full=false test case later
+ parameter.setFull(true);
+ String schema = getDatabaseType().isSchemaAvailable() ? "test" : "";
+
parameter.setSchemaTables(Collections.singletonList(SchemaTable.newBuilder().setTable(getSourceTableOrderName()).setSchema(schema).build()));
+ parameter.setDatabaseType(getDatabaseType().getType());
+ CompletableFuture.runAsync(() -> new CDCClient(parameter).start(),
executor).whenComplete((unused, throwable) -> {
+ if (null != throwable) {
+ log.error("cdc client sync failed, ", throwable);
+ }
+ });
+ }
+
+ private List<Map<String, Object>> listOrderRecords(final String
tableNameWithSchema) throws SQLException {
+ try (Connection connection =
DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false),
getUsername(), getPassword())) {
+ ResultSet resultSet =
connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER
BY order_id ASC", tableNameWithSchema));
+ return transformResultSetToList(resultSet);
+ }
+ }
+
+ private String getOrderTableNameWithSchema() {
+ if (getDatabaseType().isSchemaAvailable()) {
+ return String.join(".", PipelineBaseE2EIT.SCHEMA_NAME,
getSourceTableOrderName());
+ } else {
+ return getSourceTableOrderName();
+ }
+ }
+}
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/MySQLIncrementTask.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/MySQLIncrementTask.java
index e6f487762a9..2c079ae90ac 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/MySQLIncrementTask.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/MySQLIncrementTask.java
@@ -21,7 +21,6 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm;
import
org.apache.shardingsphere.test.e2e.data.pipeline.cases.base.BaseIncrementTask;
-import
org.apache.shardingsphere.test.e2e.data.pipeline.framework.helper.PipelineCaseHelper;
import
org.apache.shardingsphere.test.e2e.data.pipeline.util.DataSourceExecuteUtil;
import javax.sql.DataSource;
@@ -30,6 +29,7 @@ import java.util.concurrent.ThreadLocalRandom;
@RequiredArgsConstructor
@Slf4j
+// TODO merge MySQL,PostgreSQL increment task
public final class MySQLIncrementTask extends BaseIncrementTask {
private final DataSource dataSource;
@@ -52,8 +52,6 @@ public final class MySQLIncrementTask extends
BaseIncrementTask {
setNullToOrderFields(orderPrimaryKey);
updateOrderByPrimaryKey(orderPrimaryKey);
}
- Object orderItemPrimaryKey = insertOrderItem();
- DataSourceExecuteUtil.execute(dataSource, "UPDATE t_order_item SET
status = ? WHERE item_id = ?", new Object[]{"updated" +
Instant.now().getEpochSecond(), orderItemPrimaryKey});
executeCount++;
}
log.info("MySQL increment task runnable execute successfully.");
@@ -67,14 +65,6 @@ public final class MySQLIncrementTask extends
BaseIncrementTask {
return orderInsertDate[0];
}
- private Object insertOrderItem() {
- ThreadLocalRandom random = ThreadLocalRandom.current();
- String status = 0 == random.nextInt() % 2 ? null : "NOT-NULL";
- Object[] orderInsertItemDate = new
Object[]{primaryKeyGenerateAlgorithm.generateKey(),
PipelineCaseHelper.generateSnowflakeKey(), random.nextInt(0, 6), status};
- DataSourceExecuteUtil.execute(dataSource, "INSERT INTO
t_order_item(item_id,order_id,user_id,status) VALUES(?, ?, ?, ?)",
orderInsertItemDate);
- return orderInsertItemDate[0];
- }
-
private void updateOrderByPrimaryKey(final Object primaryKey) {
Object[] updateData = {"updated" + Instant.now().getEpochSecond(),
ThreadLocalRandom.current().nextInt(0, 100), primaryKey};
DataSourceExecuteUtil.execute(dataSource, String.format("UPDATE %s SET
t_char = ?,t_unsigned_int = ? WHERE order_id = ?", orderTableName), updateData);
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/PostgreSQLIncrementTask.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/PostgreSQLIncrementTask.java
index e4ee829895e..f413ac43edb 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/PostgreSQLIncrementTask.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/PostgreSQLIncrementTask.java
@@ -60,11 +60,6 @@ public final class PostgreSQLIncrementTask extends
BaseIncrementTask {
} else {
updateOrderByPrimaryKey(orderId);
}
- Object orderItemPrimaryKey = insertOrderItem();
- String updateSql = String.format("UPDATE %s SET status = ? WHERE
item_id = ?", getTableNameWithSchema("t_order_item"));
- DataSourceExecuteUtil.execute(dataSource, updateSql, new
Object[]{"updated" + Instant.now().getEpochSecond(), orderItemPrimaryKey});
- String deleteSql = String.format("DELETE FROM %s WHERE item_id =
?", getTableNameWithSchema("t_order_item"));
- DataSourceExecuteUtil.execute(dataSource, deleteSql, new
Object[]{orderItemPrimaryKey});
executeCount++;
}
log.info("PostgreSQL increment task runnable execute successfully.");
@@ -80,15 +75,6 @@ public final class PostgreSQLIncrementTask extends
BaseIncrementTask {
return orderInsertDate[0];
}
- private Object insertOrderItem() {
- ThreadLocalRandom random = ThreadLocalRandom.current();
- String status = 0 == random.nextInt() % 2 ? null : "NOT-NULL";
- Object[] orderInsertItemDate = new
Object[]{KEY_GENERATE_ALGORITHM.generateKey(),
PipelineCaseHelper.generateSnowflakeKey(), random.nextInt(0, 6), status};
- String insertSql = String.format("INSERT INTO
%s(item_id,order_id,user_id,status) VALUES(?,?,?,?)",
getTableNameWithSchema("t_order_item"));
- DataSourceExecuteUtil.execute(dataSource, insertSql,
orderInsertItemDate);
- return orderInsertItemDate[0];
- }
-
private void updateOrderByPrimaryKey(final Object primaryKey) {
// TODO openGauss incremental task parse single quote not correctly now
Object[] updateData = {"中文UPDATE" + Instant.now().getEpochSecond(),
PipelineCaseHelper.generateJsonString(5, true),
PipelineCaseHelper.generateJsonString(5, false), primaryKey};
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/BaseContainerComposer.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/BaseContainerComposer.java
index c167cf1c5cb..10ae400bb13 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/BaseContainerComposer.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/BaseContainerComposer.java
@@ -38,6 +38,13 @@ public abstract class BaseContainerComposer implements
Startable {
*/
public abstract String getProxyJdbcUrl(String databaseName);
+ /**
+ * Get proxy CDC port.
+ *
+ * @return proxy port
+ */
+ public abstract int getProxyCDCPort();
+
/**
* Clean up database.
*
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java
index e5c14661cff..ad82bcc7219 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java
@@ -24,8 +24,8 @@ import
org.apache.shardingsphere.test.e2e.data.pipeline.util.DockerImageVersion;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.AdapterContainerFactory;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.config.AdaptorContainerConfiguration;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.impl.ShardingSphereProxyClusterContainer;
-import
org.apache.shardingsphere.test.e2e.env.container.atomic.enums.AdapterType;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.enums.AdapterMode;
+import
org.apache.shardingsphere.test.e2e.env.container.atomic.enums.AdapterType;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.governance.GovernanceContainer;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.governance.impl.ZookeeperContainer;
import
org.apache.shardingsphere.test.e2e.env.container.atomic.storage.DockerStorageContainer;
@@ -92,6 +92,11 @@ public final class DockerContainerComposer extends
BaseContainerComposer {
return DataSourceEnvironment.getURL(databaseType,
proxyContainer.getHost(), proxyContainer.getFirstMappedPort(), databaseName);
}
+ @Override
+ public int getProxyCDCPort() {
+ return proxyContainer.getMappedPort(33071);
+ }
+
@Override
public void cleanUpDatabase(final String databaseName) {
}
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java
index e37d8c84510..2fca8cec79b 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java
@@ -104,4 +104,9 @@ public final class NativeContainerComposer extends
BaseContainerComposer {
public String getProxyJdbcUrl(final String databaseName) {
return DataSourceEnvironment.getURL(databaseType, "localhost", 3307,
databaseName);
}
+
+ @Override
+ public int getProxyCDCPort() {
+ return 33071;
+ }
}
diff --git a/test/e2e/pipeline/src/test/resources/env/mysql/mysql8/my.cnf
b/test/e2e/pipeline/src/test/resources/env/mysql/mysql8/my.cnf
index e420b68c856..33e64506339 100644
--- a/test/e2e/pipeline/src/test/resources/env/mysql/mysql8/my.cnf
+++ b/test/e2e/pipeline/src/test/resources/env/mysql/mysql8/my.cnf
@@ -26,3 +26,4 @@ default-authentication-plugin=caching_sha2_password
sql_mode=
lower_case_table_names=1
secure_file_priv=/var/lib/mysql
+default-time-zone='+00:00'
diff --git a/test/e2e/pipeline/src/test/resources/env/mysql/server-5.yaml
b/test/e2e/pipeline/src/test/resources/env/mysql/server-5.yaml
index bddbb93ed61..e5640cae2fc 100644
--- a/test/e2e/pipeline/src/test/resources/env/mysql/server-5.yaml
+++ b/test/e2e/pipeline/src/test/resources/env/mysql/server-5.yaml
@@ -42,3 +42,4 @@ props:
sql-show: false
sql-federation-type: ADVANCED
proxy-mysql-default-version: 5.7.22
+ cdc-server-port: 33071 # CDC server port
diff --git a/test/e2e/pipeline/src/test/resources/env/mysql/server-8.yaml
b/test/e2e/pipeline/src/test/resources/env/mysql/server-8.yaml
index 9e21a1578e9..b4192b07cdd 100644
--- a/test/e2e/pipeline/src/test/resources/env/mysql/server-8.yaml
+++ b/test/e2e/pipeline/src/test/resources/env/mysql/server-8.yaml
@@ -42,3 +42,4 @@ props:
sql-show: false
sql-federation-type: ADVANCED
proxy-mysql-default-version: 8.0.11
+ cdc-server-port: 33071 # CDC server port
diff --git a/test/e2e/pipeline/src/test/resources/env/opengauss/server.yaml
b/test/e2e/pipeline/src/test/resources/env/opengauss/server.yaml
index e5d1b8d174d..9e05f478e51 100644
--- a/test/e2e/pipeline/src/test/resources/env/opengauss/server.yaml
+++ b/test/e2e/pipeline/src/test/resources/env/opengauss/server.yaml
@@ -47,3 +47,4 @@ props:
# Available sql federation type: NONE (default), ORIGINAL, ADVANCED
sql-federation-type: NONE
proxy-frontend-database-protocol-type: openGauss
+ cdc-server-port: 33071 # CDC server port
diff --git
a/test/e2e/pipeline/src/test/resources/env/postgresql/postgresql.conf
b/test/e2e/pipeline/src/test/resources/env/postgresql/postgresql.conf
index 9d432a4cab4..1580796bf3b 100644
--- a/test/e2e/pipeline/src/test/resources/env/postgresql/postgresql.conf
+++ b/test/e2e/pipeline/src/test/resources/env/postgresql/postgresql.conf
@@ -19,7 +19,7 @@ listen_addresses = '*'
wal_level = logical
max_connections = 600
max_replication_slots = 10
-log_timezone = 'Asia/Shanghai'
+log_timezone = 'UTC'
datestyle = 'iso, mdy'
-timezone = 'Asia/Shanghai'
+timezone = 'UTC'
wal_sender_timeout = 0
diff --git a/test/e2e/pipeline/src/test/resources/env/postgresql/server.yaml
b/test/e2e/pipeline/src/test/resources/env/postgresql/server.yaml
index 07171243129..07a11385cfe 100644
--- a/test/e2e/pipeline/src/test/resources/env/postgresql/server.yaml
+++ b/test/e2e/pipeline/src/test/resources/env/postgresql/server.yaml
@@ -52,3 +52,4 @@ props:
# Available sql federation type: NONE (default), ORIGINAL, ADVANCED
sql-federation-type: NONE
proxy-frontend-database-protocol-type: PostgreSQL
+ cdc-server-port: 33071 # CDC server port
diff --git
a/test/e2e/pipeline/src/test/resources/env/scenario/general/mysql.xml
b/test/e2e/pipeline/src/test/resources/env/scenario/general/mysql.xml
index 0616067201e..c6d29086606 100644
--- a/test/e2e/pipeline/src/test/resources/env/scenario/general/mysql.xml
+++ b/test/e2e/pipeline/src/test/resources/env/scenario/general/mysql.xml
@@ -22,7 +22,7 @@
`status` varchar ( 255 ) NULL,
`t_mediumint` mediumint NULL,
`t_smallint` smallint NULL,
- `t_tinyint` tinyint ( 1 ) NULL,
+ `t_tinyint` tinyint ( 3 ) NULL,
`t_unsigned_int` int UNSIGNED NULL,
`t_unsigned_mediumint` mediumint UNSIGNED NULL,
`t_unsigned_smallint` smallint UNSIGNED NULL,
diff --git
a/test/e2e/pipeline/src/test/resources/env/scenario/general/postgresql.xml
b/test/e2e/pipeline/src/test/resources/env/scenario/general/postgresql.xml
index 90cdfdf27a7..1ecdbf243d0 100644
--- a/test/e2e/pipeline/src/test/resources/env/scenario/general/postgresql.xml
+++ b/test/e2e/pipeline/src/test/resources/env/scenario/general/postgresql.xml
@@ -33,16 +33,12 @@
t_text TEXT NULL,
t_date date NULL,
t_time TIME NULL,
- t_timestmap timestamp NULL,
+ t_timestamp timestamp NULL,
t_timestamptz timestamptz NULL,
PRIMARY KEY ( order_id )
)
</create-table-order>
- <create-table-index>
- CREATE INDEX "idx_user_id" ON test.t_order ( user_id );
- </create-table-index>
-
<create-table-order-item>
CREATE TABLE test.t_order_item (
item_id int8 NOT NULL,
@@ -71,7 +67,7 @@
t_text,
t_date,
t_time,
- t_timestmap,
+ t_timestamp,
t_timestamptz)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
</full-insert-order>