This is an automated email from the ASF dual-hosted git repository.

haonan pushed a commit to branch remove_testcontainer
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit a2f441db8ffad3f6706bee05884ec18a8eb43f6f
Author: HTHou <[email protected]>
AuthorDate: Sun Jun 25 15:45:34 2023 +0800

    Remove useless testcontainer module
---
 pom.xml                                            |  31 -
 test/e2e/base/docker-compose.yaml                  |  51 --
 test/e2e/cases/README.md                           |  53 --
 test/e2e/cases/cli/README.md                       |  24 -
 test/e2e/cases/cli/cleanup.sh                      |  27 -
 test/e2e/cases/cli/docker-compose.yaml             |  41 -
 test/e2e/cases/cli/res/init.sql                    |  26 -
 test/e2e/cases/cli/run.sh                          |  48 --
 testcontainer/Readme.md                            |  62 --
 testcontainer/pom.xml                              | 246 ------
 .../test/java/org/apache/iotdb/db/sql/Cases.java   | 949 ---------------------
 .../java/org/apache/iotdb/db/sql/ClusterIT.java    | 105 ---
 .../iotdb/db/sql/ClusterSessionSimpleIT.java       |  85 --
 .../java/org/apache/iotdb/db/sql/SingleNodeIT.java |  88 --
 .../iotdb/db/sql/node1/OneNodeClusterIT.java       |  54 --
 .../db/sql/nodes3/AbstractThreeNodeClusterIT.java  |  61 --
 .../iotdb/db/sql/nodes3/ThreeNodeCluster1IT.java   |  23 -
 .../iotdb/db/sql/nodes3/ThreeNodeCluster2IT.java   |  32 -
 .../db/sql/nodes5/AbstractFiveNodeClusterIT.java   |  85 --
 .../iotdb/db/sql/nodes5/FiveNodeCluster1IT.java    |  23 -
 .../iotdb/db/sql/nodes5/FiveNodeCluster2IT.java    |  32 -
 .../iotdb/db/sql/nodes5/FiveNodeCluster4IT.java    |  32 -
 .../test/java/org/apache/iotdb/db/sync/SyncIT.java | 426 ---------
 .../apache/iotdb/db/sync/SyncWeakNetworkIT.java    |  40 -
 .../NoProjectNameDockerComposeContainer.java       |  38 -
 .../src/test/resources/iotdb-datanode.properties   |  24 -
 .../src/test/resources/logback-container.xml       |  39 -
 testcontainer/src/test/resources/logback.xml       |  52 --
 .../src/test/resources/sync/docker-compose.yaml    |  52 --
 testcontainer/src/tool/README.md                   |  33 -
 testcontainer/src/tool/parser.py                   |  79 --
 31 files changed, 2961 deletions(-)

diff --git a/pom.xml b/pom.xml
index 4c2ba201827..3d910d50641 100644
--- a/pom.xml
+++ b/pom.xml
@@ -216,7 +216,6 @@
         <commons-beanutils.version>1.9.4</commons-beanutils.version>
         <commons-compress.version>1.21</commons-compress.version>
         
<error_prone_annotations.version>2.7.1</error_prone_annotations.version>
-        <testcontainers.version>1.15.3</testcontainers.version>
         <eclipse-collections.version>10.4.0</eclipse-collections.version>
         <awaitility.version>4.0.3</awaitility.version>
         <!-- JDK1.8 only support google java format 1.7-->
@@ -657,12 +656,6 @@
             <artifactId>junit</artifactId>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.testcontainers</groupId>
-            <artifactId>testcontainers</artifactId>
-            <version>${testcontainers.version}</version>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-jexl3</artifactId>
@@ -1195,30 +1188,6 @@
                 <thrift.exec-cmd.args>+x 
${project.build.directory}/tools/${thrift.executable}</thrift.exec-cmd.args>
             </properties>
         </profile>
-        <!-- for TestContainer. As it requires docker, we have to detect 
whether docker exists.-->
-        <profile>
-            <!-- Mac and Unix-->
-            <id>unixDockerCheck</id>
-            <activation>
-                <file>
-                    <exists>/var/run/docker.sock</exists>
-                </file>
-            </activation>
-            <modules>
-                <module>testcontainer</module>
-            </modules>
-        </profile>
-        <profile>
-            <id>WinDockerCheck</id>
-            <activation>
-                <file>
-                    <exists>C:\Program 
Files\Docker\Docker\resources\bin\docker.exe</exists>
-                </file>
-            </activation>
-            <modules>
-                <module>testcontainer</module>
-            </modules>
-        </profile>
         <!-- Some APIs were removed in Java 11, so we need to add replacements 
-->
         <profile>
             <id>java-11-and-above</id>
diff --git a/test/e2e/base/docker-compose.yaml 
b/test/e2e/base/docker-compose.yaml
deleted file mode 100644
index f536ec69662..00000000000
--- a/test/e2e/base/docker-compose.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-version: '3.8'
-
-services:
-  server-prototype:
-    build:
-      context: ../../..
-      dockerfile: docker/src/main/Dockerfile-single
-    ports:
-      - 6667:6667
-    networks:
-      iotdb:
-    healthcheck:
-      test: [ "CMD", "bash", "-c", "cat < /dev/null > /dev/tcp/127.0.0.1/6667" 
]
-      interval: 5s
-      timeout: 60s
-      retries: 120
-
-  initializer:
-    build:
-      context: ../../..
-      dockerfile: docker/src/main/Dockerfile-single
-    networks:
-      iotdb:
-    entrypoint:
-      - bash
-      - -c
-      - |
-        cat /res/init.sql | grep -v '^--' | xargs -I {} 
/iotdb/sbin/start-cli.sh -h server -e {}
-        echo "Ready to Run IoTDB E2E Tests"
-
-networks:
-  iotdb:
diff --git a/test/e2e/cases/README.md b/test/e2e/cases/README.md
deleted file mode 100644
index c4a15e3b965..00000000000
--- a/test/e2e/cases/README.md
+++ /dev/null
@@ -1,53 +0,0 @@
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-
-# IoTDB E2E tests cases
-
-Test cases are organized into sub-directories, each of which contains the 
following files:
-
-* `run.sh`: the entry of the test case.
-* `cleanup.sh`: a cleanup script to clean up resources that are created during 
the test.
-* `res`: resources files that will be mounted into the container(s) and be 
used there.
-* `docker-compose.yaml`: orchestrates the services used in the test process.
-* `README.md` (Optional): docs or notes when running this case manually.
-
-any other additional files are completely acceptable here, for example, when 
building
-a case to test the JDBC SDK, the files structure may be something like:
-
-```text
-.
-├── README.md
-├── cleanup.sh
-├── docker-compose.yaml
-├── app      <------- Java application that uses JDBC SDK to communicate with 
IoTDB
-│   ├── pom.xml
-│   ├── src
-│   │   ├── main
-│   │   │   └── java
-│   │   └── test
-│   │       └── java
-│   └── src
-│       ├── main
-│       └── test
-├── res
-│   └── init.sql
-└── run.sh
-```
diff --git a/test/e2e/cases/cli/README.md b/test/e2e/cases/cli/README.md
deleted file mode 100644
index 343abebc56e..00000000000
--- a/test/e2e/cases/cli/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-
-# Standalone Server Test
-
-The simplest test case that starts up an IoTDB server and verifies that the 
CLI works.
diff --git a/test/e2e/cases/cli/cleanup.sh b/test/e2e/cases/cli/cleanup.sh
deleted file mode 100755
index a96f4d06c52..00000000000
--- a/test/e2e/cases/cli/cleanup.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-set -x
-
-cd "$(dirname "$0")" || exit 1
-
-docker-compose down
-
-cd - || exit 1
diff --git a/test/e2e/cases/cli/docker-compose.yaml 
b/test/e2e/cases/cli/docker-compose.yaml
deleted file mode 100644
index 2e9da0422e6..00000000000
--- a/test/e2e/cases/cli/docker-compose.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-version: '3.8'
-
-services:
-  server:
-    extends:
-      file: ../../base/docker-compose.yaml
-      service: server-prototype
-    volumes:
-      - ./res:/resources
-
-  initializer:
-    extends:
-      service: initializer
-      file: ../../base/docker-compose.yaml
-    volumes:
-      - ./res:/res:ro
-    depends_on:
-      server:
-        condition: service_healthy
-
-networks:
-  iotdb:
diff --git a/test/e2e/cases/cli/res/init.sql b/test/e2e/cases/cli/res/init.sql
deleted file mode 100644
index d8d19b0e857..00000000000
--- a/test/e2e/cases/cli/res/init.sql
+++ /dev/null
@@ -1,26 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing,
--- software distributed under the License is distributed on an
--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
--- KIND, either express or implied.  See the License for the
--- specific language governing permissions and limitations
--- under the License.
---
-
-CREATE DATABASE root.ln;
-SHOW DATABASES;
-
-CREATE TIMESERIES root.ln.wf01.wt01.temperature WITH DATATYPE=FLOAT, 
ENCODING=PLAIN;
-
-INSERT INTO root.ln.wf01.wt01(timestamp,temperature) values(100, 16);
-INSERT INTO root.ln.wf01.wt01(timestamp,temperature) values(200, 26);
diff --git a/test/e2e/cases/cli/run.sh b/test/e2e/cases/cli/run.sh
deleted file mode 100755
index a62bba7ecb0..00000000000
--- a/test/e2e/cases/cli/run.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-set -e
-
-cd "$(dirname "$0")"
-
-docker-compose up -d
-
-max_attempts=10
-attempts=1
-
-while ! docker-compose logs | grep -c 'Ready to Run IoTDB E2E Tests' > 
/dev/null 2>&1; do
-  if [[ $attempts -gt $max_attempts ]]; then
-    echo "Preparation is not ready after $max_attempts attempts, will exit now"
-    exit 1
-  fi
-  echo "Preparation is not ready yet, retrying ($attempts/$max_attempts)"
-  sleep 3
-  attempts=$((attempts+1))
-done
-
-results=$(docker-compose exec -T server /iotdb/sbin/start-cli.sh -e 'SELECT 
temperature FROM root.ln.wf01.wt01')
-
-if [[ $results != *"Total line number = 2"* ]]; then
-  echo "Total line number should be 2"
-  echo "$results"
-  exit 1
-fi
-
-cd -
diff --git a/testcontainer/Readme.md b/testcontainer/Readme.md
deleted file mode 100644
index d4899781d48..00000000000
--- a/testcontainer/Readme.md
+++ /dev/null
@@ -1,62 +0,0 @@
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-
-# Description
-
-This module is for using Docker and TestContainer for end to end test.
-
-## Requirements
-
-You have to install Docker before you runn this module.
-See [Docker Version 
Requirement](https://www.testcontainers.org/supported_docker_environment/).
-
-IoTDB project will detect whether a Docker is installed (but do not check the 
docker's version).
-
-The logic is, for Unix-like system, it checks whether `/var/run/docker.sock` 
exists.
-For Window system, it checks whether `C:\Program 
Files\Docker\Docker\resources\bin\docker.exe` exists.
-
-If you are sure you have installed the Docker but `testcontainer` module is 
not loaed, use `-P unixDockerCheck`
-in your maven command, which also works on Windows OS.
-
-## Behavior
-
-Before running `integration-test` in this module, binaries must be generated 
in the `distribution` module,
-e.g, call `mvn package -Dmaven.test.skip=true`.
-
-In this module, when running `mvn pre-integration-test` (or `mvn 
integration-test`, `mvn post-integration-test`),
-the module will build docker image, `apache/iotdb:maven-development`.
-
-In the `post-integration-test` phase, the above images will be removed.
-
-In the `integration-test` phase, all `src/test/java/**/*IT.java` will be 
tested.
-
-## How it runs
-
-`apache/iotdb:maven-development` is generated following the Dockerfile 
`${basedir}/docker/src/main/Dockerfile-single`, and
-
-For testing sync module only, we use `mvn integration-test -P sync`.
-
-TestContainer can start the docker (or docker compose) automatically.
-
-But these docker compose files can also be used independently.
-e.g., `docker-compose up`.
-
-
diff --git a/testcontainer/pom.xml b/testcontainer/pom.xml
deleted file mode 100644
index cdbe5cbb958..00000000000
--- a/testcontainer/pom.xml
+++ /dev/null
@@ -1,246 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <parent>
-        <groupId>org.apache.iotdb</groupId>
-        <artifactId>iotdb-parent</artifactId>
-        <version>1.3.0-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-    <artifactId>testcontainer</artifactId>
-    <properties>
-        <docker.test.skip>false</docker.test.skip>
-        <docker.build.executable>docker</docker.build.executable>
-        <docker.build.single.argument>build -t apache/iotdb:maven-development 
-f ${basedir}/../docker/src/main/Dockerfile-single 
${basedir}/../.</docker.build.single.argument>
-        <docker.clean.single.argument>image rm 
apache/iotdb:maven-development</docker.clean.single.argument>
-        <docker.build.sync.argument>build -t 
apache/iotdb:sync-maven-development -f 
${basedir}/../docker/src/main/Dockerfile-single-tc 
${basedir}/../.</docker.build.sync.argument>
-        <docker.clean.sync.argument>image rm 
apache/iotdb:sync-maven-development</docker.clean.sync.argument>
-    </properties>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.iotdb</groupId>
-            <artifactId>iotdb-jdbc</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.iotdb</groupId>
-            <artifactId>iotdb-session</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.iotdb</groupId>
-            <artifactId>iotdb-cli</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.iotdb</groupId>
-            <artifactId>iotdb-server</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-    <profiles>
-        <profile>
-            <id>testcontainer</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <build>
-                <plugins>
-                    <!-- before integration test, we build the docker image -->
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>exec-maven-plugin</artifactId>
-                        <version>1.6.0</version>
-                        <executions>
-                            <execution>
-                                <id>build-docker-image</id>
-                                <phase>pre-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.build.single.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>build-sync-docker-image</id>
-                                <phase>pre-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.build.sync.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>clean-docker-image</id>
-                                <phase>post-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.clean.single.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>clean-sync-docker-image</id>
-                                <phase>post-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.clean.sync.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>build-helper-maven-plugin</artifactId>
-                        <version>3.2.0</version>
-                        <executions>
-                            <execution>
-                                <id>add-test-container-source</id>
-                                <phase>generate-test-sources</phase>
-                                <goals>
-                                    <goal>add-test-source</goal>
-                                </goals>
-                                <configuration>
-                                    <sources>
-                                        
<source>${basedir}/src/test/java</source>
-                                    </sources>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-failsafe-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>run-integration-tests</id>
-                                <phase>integration-test</phase>
-                                <goals>
-                                    <goal>integration-test</goal>
-                                    <goal>verify</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
-            <id>test-sync</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <build>
-                <plugins>
-                    <!-- before integration test, we build the docker image -->
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>exec-maven-plugin</artifactId>
-                        <version>1.6.0</version>
-                        <executions>
-                            <execution>
-                                <id>build-sync-docker-image</id>
-                                <phase>pre-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.build.sync.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>clean-sync-docker-image</id>
-                                <phase>post-integration-test</phase>
-                                <goals>
-                                    <goal>exec</goal>
-                                </goals>
-                                <configuration>
-                                    <skip>${docker.test.skip}</skip>
-                                    
<executable>${docker.build.executable}</executable>
-                                    
<commandlineArgs>${docker.clean.sync.argument}</commandlineArgs>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>build-helper-maven-plugin</artifactId>
-                        <version>3.2.0</version>
-                        <executions>
-                            <execution>
-                                <id>add-test-container-source</id>
-                                <phase>generate-test-sources</phase>
-                                <goals>
-                                    <goal>add-test-source</goal>
-                                </goals>
-                                <configuration>
-                                    <sources>
-                                        
<source>${basedir}/src/test/java</source>
-                                    </sources>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-failsafe-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>run-integration-tests</id>
-                                <phase>integration-test</phase>
-                                <goals>
-                                    <goal>integration-test</goal>
-                                    <goal>verify</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <includes>
-                                
<include>**/org/apache/iotdb/db/sync/**</include>
-                            </includes>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-</project>
diff --git a/testcontainer/src/test/java/org/apache/iotdb/db/sql/Cases.java 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/Cases.java
deleted file mode 100644
index 7411971471f..00000000000
--- a/testcontainer/src/test/java/org/apache/iotdb/db/sql/Cases.java
+++ /dev/null
@@ -1,949 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.sql;
-
-import org.apache.iotdb.isession.SessionDataSet;
-import org.apache.iotdb.rpc.BatchExecutionException;
-import org.apache.iotdb.rpc.IoTDBConnectionException;
-import org.apache.iotdb.rpc.StatementExecutionException;
-import org.apache.iotdb.session.Session;
-import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
-import org.apache.iotdb.tsfile.read.common.Field;
-import org.apache.iotdb.tsfile.utils.Binary;
-import org.apache.iotdb.tsfile.utils.BitMap;
-import org.apache.iotdb.tsfile.write.record.Tablet;
-import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-public abstract class Cases {
-
-  protected Statement writeStatement;
-  protected Connection writeConnection;
-  protected Statement[] readStatements;
-  protected Connection[] readConnections;
-  protected Session session;
-
-  /** initialize the writeStatement,writeConnection, readStatements and the 
readConnections. */
-  public abstract void init() throws Exception;
-
-  public void clean() throws Exception {
-    writeStatement.close();
-    writeConnection.close();
-    for (Statement statement : readStatements) {
-      statement.close();
-    }
-    for (Connection connection : readConnections) {
-      connection.close();
-    }
-    session.close();
-  }
-
-  // if we seperate the test into multiply test() methods, then the docker 
container have to be
-  // built
-  // several times. So, if the test cases are not conflict, we can put them 
into one method.
-  // but if you want to avoid other cases' impact, use a seperate test() 
method.
-  @Test
-  public void multiCasesTest() throws SQLException {
-
-    String[] timeSeriesArray = {"root.sg1.aa.bb", "root.sg1.aa.bb.cc", 
"root.sg1.aa"};
-
-    for (String timeSeries : timeSeriesArray) {
-      try {
-        writeStatement.execute(
-            String.format(
-                "create timeseries %s with datatype=INT64, encoding=PLAIN, 
compression=SNAPPY",
-                timeSeries));
-      } catch (Exception e) {
-        if (timeSeries.equals("root.sg1.aa.bb")) {
-          e.printStackTrace();
-          fail();
-        }
-      }
-    }
-    ResultSet resultSet = null;
-    // try to read data on each node.
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("show timeseries");
-      Set<String> result = new HashSet<>();
-      while (resultSet.next()) {
-        result.add(resultSet.getString(1));
-      }
-      Assert.assertEquals(1, result.size());
-      Assert.assertTrue(result.contains("root.sg1.aa.bb"));
-      Assert.assertFalse(result.contains("root.sg1.aa.bb.cc"));
-      Assert.assertFalse(result.contains("root.sg1.aa"));
-      resultSet.close();
-    }
-
-    // test https://issues.apache.org/jira/browse/IOTDB-1331
-    writeStatement.execute(
-        "create timeseries root.ln.wf01.wt01.temperature WITH DATATYPE=FLOAT, 
ENCODING=RLE");
-    String[] initDataArray = {
-      "INSERT INTO root.ln.wf01.wt01(timestamp,temperature) values(200,20.71)",
-      "INSERT INTO root.ln.wf01.wt01(timestamp,temperature) values(220,50.71)"
-    };
-    for (String initData : initDataArray) {
-      writeStatement.execute(initData);
-    }
-    // try to read data on each node.
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("select avg(temperature) from 
root.ln.wf01.wt01");
-      if (resultSet.next()) {
-        Assert.assertEquals(35.71, resultSet.getDouble(1), 0.01);
-      } else {
-        fail("expect 1 result, but get an empty resultSet.");
-      }
-      Assert.assertFalse(resultSet.next());
-      resultSet.close();
-    }
-
-    // test https://issues.apache.org/jira/browse/IOTDB-1348
-    initDataArray =
-        new String[] {
-          "INSERT INTO root.ln.wf01.wt01(timestamp, temperature) values(250, 
10.0)",
-          "INSERT INTO root.ln.wf01.wt01(timestamp, temperature) values(300, 
20.0)",
-          "INSERT INTO root.ln.wf01.wt01(timestamp, temperature) values(350, 
25.0)"
-        };
-
-    for (String initData : initDataArray) {
-      writeStatement.execute(initData);
-    }
-    // try to read data on each node.
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("select last * from 
root.ln.wf01.wt01;");
-      Assert.assertTrue(resultSet.next());
-      double last = Double.parseDouble(resultSet.getString("value"));
-      Assert.assertEquals(25.0, last, 0.1);
-      resultSet.close();
-    }
-
-    // test https://issues.apache.org/jira/browse/IOTDB-1457
-    initDataArray =
-        new String[] {
-          "INSERT INTO root.ln.wf011.wt0110(timestamp, temperature) 
values(250, 10.0)",
-          "INSERT INTO root.ln.wf011.wt0111(timestamp, temperature) 
values(300, 20.0)",
-          "INSERT INTO root.ln.wf011.wt0112(timestamp, temperature) 
values(350, 25.0)"
-        };
-
-    for (String initData : initDataArray) {
-      writeStatement.execute(initData);
-    }
-    try {
-      session.executeNonQueryStatement(" delete from root.ln.wf011.**");
-    } catch (StatementExecutionException | IoTDBConnectionException e) {
-      Assert.assertFalse(e instanceof BatchExecutionException);
-    }
-
-    // test dictionary encoding
-    writeStatement.execute(
-        "create timeseries root.ln.wf01.wt02.city WITH DATATYPE=TEXT, 
ENCODING=DICTIONARY");
-    initDataArray =
-        new String[] {
-          "INSERT INTO root.ln.wf01.wt02(timestamp, city) values(250, 
'Nanjing')",
-          "INSERT INTO root.ln.wf01.wt02(timestamp, city) values(300, 
'Nanjing')",
-          "INSERT INTO root.ln.wf01.wt02(timestamp, city) values(350, 
'Singapore')",
-          "INSERT INTO root.ln.wf01.wt02(timestamp, city) values(400, 
'Shanghai')"
-        };
-    for (String initData : initDataArray) {
-      writeStatement.execute(initData);
-    }
-
-    String[] results = new String[] {"Nanjing", "Nanjing", "Singapore", 
"Shanghai"};
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("select * from 
root.ln.wf01.wt02");
-      int i = 0;
-      while (resultSet.next()) {
-        Assert.assertEquals(results[i++], 
resultSet.getString("root.ln.wf01.wt02.city"));
-      }
-      Assert.assertFalse(resultSet.next());
-      resultSet.close();
-    }
-
-    // test https://issues.apache.org/jira/browse/IOTDB-1600
-    try {
-      // Target data of device "root.ln.wf01.d_1600"
-      // Time s1   s2   s3
-      // 1000 1.0  2.0  null
-      // 2000 null 3.0  4.0
-      // 3000 5.0  6.0  7.0
-      String testDevice = "root.ln.wf01.d_1600";
-      session.createTimeseries(
-          testDevice + ".s1", TSDataType.DOUBLE, TSEncoding.GORILLA, 
CompressionType.SNAPPY);
-      session.createTimeseries(
-          testDevice + ".s2", TSDataType.DOUBLE, TSEncoding.GORILLA, 
CompressionType.SNAPPY);
-      session.createTimeseries(
-          testDevice + ".s3", TSDataType.DOUBLE, TSEncoding.GORILLA, 
CompressionType.SNAPPY);
-      List<Long> insertTimes = Arrays.asList(1000L, 2000L, 3000L);
-      List<List<String>> insertedMeasurements =
-          Arrays.asList(
-              Arrays.asList("s1", "s2"),
-              Arrays.asList("s2", "s3"),
-              Arrays.asList("s1", "s2", "s3"));
-      List<List<TSDataType>> insertedDataTypes =
-          Arrays.asList(
-              Arrays.asList(TSDataType.DOUBLE, TSDataType.DOUBLE),
-              Arrays.asList(TSDataType.DOUBLE, TSDataType.DOUBLE),
-              Arrays.asList(TSDataType.DOUBLE, TSDataType.DOUBLE, 
TSDataType.DOUBLE));
-      List<List<Object>> insertedValues =
-          Arrays.asList(
-              Arrays.asList(1.0D, 2.0D),
-              Arrays.asList(3.0D, 4.0D),
-              Arrays.asList(5.0D, 6.0D, 7.0D));
-      session.insertRecordsOfOneDevice(
-          testDevice, insertTimes, insertedMeasurements, insertedDataTypes, 
insertedValues);
-      final double E = 0.00001;
-      for (Statement readStatement : readStatements) {
-        resultSet = readStatement.executeQuery("select s1, s2, s3 from " + 
testDevice);
-        Assert.assertTrue(resultSet.next());
-        Assert.assertEquals(1000L, resultSet.getLong("Time"));
-        Assert.assertEquals(1.0D, resultSet.getDouble(testDevice + ".s1"), E);
-        Assert.assertEquals(2.0D, resultSet.getDouble(testDevice + ".s2"), E);
-        Assert.assertNull(resultSet.getObject(testDevice + ".s3"));
-
-        Assert.assertTrue(resultSet.next());
-        Assert.assertEquals(2000L, resultSet.getLong("Time"));
-        Assert.assertNull(resultSet.getObject(testDevice + ".s1"));
-        Assert.assertEquals(3.0D, resultSet.getDouble(testDevice + ".s2"), E);
-        Assert.assertEquals(4.0D, resultSet.getDouble(testDevice + ".s3"), E);
-
-        Assert.assertTrue(resultSet.next());
-        Assert.assertEquals(3000L, resultSet.getLong("Time"));
-        Assert.assertEquals(5.0D, resultSet.getDouble(testDevice + ".s1"), E);
-        Assert.assertEquals(6.0D, resultSet.getDouble(testDevice + ".s2"), E);
-        Assert.assertEquals(7.0D, resultSet.getDouble(testDevice + ".s3"), E);
-
-        Assert.assertFalse(resultSet.next());
-        resultSet.close();
-      }
-    } catch (IoTDBConnectionException | StatementExecutionException e) {
-      e.printStackTrace();
-      fail();
-    }
-  }
-
-  // test https://issues.apache.org/jira/browse/IOTDB-1266
-  @Test
-  public void showTimeseriesRowsTest() throws SQLException {
-
-    int n = 3000;
-    String timeSeriesPrefix = "root.ln.wf01.wt";
-    String timeSeriesSuffix = ".temperature WITH DATATYPE=DOUBLE, 
ENCODING=RLE";
-    String timeSeries;
-    for (int i = 0; i < n; i++) {
-      timeSeries = timeSeriesPrefix + String.valueOf(i) + timeSeriesSuffix;
-      writeStatement.execute(String.format("create timeseries %s ", 
timeSeries));
-    }
-
-    // try to read data on each node.
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet = readStatement.executeQuery("SHOW TIMESERIES");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(n, cnt);
-      resultSet.close();
-    }
-
-    // try to get devices on each node;
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet = readStatement.executeQuery("COUNT DEVICES");
-      while (resultSet.next()) {
-        assertEquals(n, resultSet.getInt(1));
-      }
-    }
-  }
-
-  @Test
-  public void clusterLastQueryTest() throws IoTDBConnectionException, 
StatementExecutionException {
-
-    session.setStorageGroup("root.sg1");
-    session.createTimeseries(
-        "root.sg1.d1.s1", TSDataType.INT64, TSEncoding.RLE, 
CompressionType.SNAPPY);
-    session.createTimeseries(
-        "root.sg1.d2.s1", TSDataType.INT64, TSEncoding.RLE, 
CompressionType.SNAPPY);
-
-    insertRecords();
-
-    List<String> paths = new ArrayList<>();
-
-    paths.add("root.sg1.d1.s1");
-    paths.add("root.sg1.d2.s1");
-
-    SessionDataSet sessionDataSet = session.executeLastDataQuery(paths);
-    sessionDataSet.setFetchSize(1024);
-
-    int count = 0;
-    while (sessionDataSet.hasNext()) {
-      count++;
-      List<Field> fields = sessionDataSet.next().getFields();
-      Assert.assertEquals("[root.sg1.d1.s1,1,INT64]", 
fields.toString().replace(" ", ""));
-    }
-    Assert.assertEquals(1, count);
-    sessionDataSet.closeOperationHandle();
-  }
-
-  private void insertRecords() throws IoTDBConnectionException, 
StatementExecutionException {
-    String deviceId = "root.sg1.d1";
-    List<String> measurements = new ArrayList<>();
-    measurements.add("s1");
-    List<String> deviceIds = new ArrayList<>();
-    List<List<String>> measurementsList = new ArrayList<>();
-    List<List<Object>> valuesList = new ArrayList<>();
-    List<Long> timestamps = new ArrayList<>();
-    List<List<TSDataType>> typesList = new ArrayList<>();
-
-    for (long time = 0; time < 500; time++) {
-      List<Object> values = new ArrayList<>();
-      List<TSDataType> types = new ArrayList<>();
-      values.add(1L);
-      values.add(2L);
-      values.add(3L);
-      types.add(TSDataType.INT64);
-      types.add(TSDataType.INT64);
-      types.add(TSDataType.INT64);
-
-      deviceIds.add(deviceId);
-      measurementsList.add(measurements);
-      valuesList.add(values);
-      typesList.add(types);
-      timestamps.add(time);
-      if (time != 0 && time % 100 == 0) {
-        session.insertRecords(deviceIds, timestamps, measurementsList, 
typesList, valuesList);
-        deviceIds.clear();
-        measurementsList.clear();
-        valuesList.clear();
-        timestamps.clear();
-      }
-    }
-
-    session.insertRecords(deviceIds, timestamps, measurementsList, typesList, 
valuesList);
-  }
-
-  // test https://issues.apache.org/jira/browse/IOTDB-1407
-  @Test
-  public void showTimeseriesTagsTest() throws SQLException {
-    String createTimeSeries1 =
-        "create timeseries root.ln.wf01.wt1 WITH DATATYPE=DOUBLE, 
ENCODING=RLE, compression=SNAPPY tags(tag1=v1, tag2=v2)";
-    String createTimeSeries2 =
-        "create timeseries root.ln.wf01.wt2 WITH DATATYPE=DOUBLE, 
ENCODING=RLE, compression=SNAPPY tags(tag1=v1, tag2=v2)";
-    writeStatement.execute(createTimeSeries1);
-    writeStatement.execute(createTimeSeries2);
-    // try to read data on each node. select .*
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet =
-          readStatement.executeQuery("SHOW TIMESERIES root.ln.wf01.* where 
tag1=v1");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(2, cnt);
-      resultSet.close();
-    }
-
-    // try to read data on each node. select from parent series
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet =
-          readStatement.executeQuery("SHOW TIMESERIES root.ln.wf01.* where 
tag1=v1");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(2, cnt);
-      resultSet.close();
-    }
-
-    // try to read data on each node. select from one series
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet =
-          readStatement.executeQuery("SHOW TIMESERIES root.ln.wf01.wt1 where 
tag1=v1");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(1, cnt);
-      resultSet.close();
-    }
-
-    // try to read data on each node. select from root
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet = readStatement.executeQuery("SHOW TIMESERIES 
root.** where tag1=v1");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(2, cnt);
-      resultSet.close();
-    }
-
-    // try to read data on each node. SHOW TIMESERIES root.ln.wf01.* where 
tag1=v3"
-    for (Statement readStatement : readStatements) {
-      ResultSet resultSet =
-          readStatement.executeQuery("SHOW TIMESERIES root.ln.wf01.* where 
tag1=v3");
-      int cnt = 0;
-      while (resultSet.next()) {
-        cnt++;
-      }
-      Assert.assertEquals(0, cnt);
-      resultSet.close();
-    }
-
-    // try to read data on each node. SHOW TIMESERIES root.ln.wf01.* where 
tag3=v1"
-    for (Statement readStatement : readStatements) {
-      try (ResultSet rs =
-          readStatement.executeQuery("SHOW TIMESERIES root.ln.wf01.* where 
tag3=v1")) {
-        Assert.assertFalse(rs.next());
-      }
-    }
-  }
-
-  @Test
-  public void clusterUDTFQueryTest() throws SQLException {
-    // Prepare data.
-    writeStatement.execute(
-        "CREATE timeseries root.sg.d.s WITH datatype=DOUBLE, encoding=RLE, 
compression=SNAPPY");
-    for (int i = 10; i < 20; i++) {
-      writeStatement.execute(
-          String.format("INSERT INTO root.sg.d(timestamp,s) VALUES(%s,%s)", i, 
i));
-    }
-    for (int i = 0; i < 10; i++) {
-      writeStatement.execute(
-          String.format("INSERT INTO root.sg.d(timestamp,s) VALUES(%s,%s)", i, 
i));
-    }
-
-    ResultSet resultSet = null;
-
-    // Try to execute udf query on each node.
-    // Without time filter
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("SELECT sin(s) FROM root.sg.d");
-
-      double i = 0;
-      while (resultSet.next()) {
-        Assert.assertEquals(Math.sin(i++), resultSet.getDouble(2), 0.00001);
-      }
-      Assert.assertFalse(resultSet.next());
-      resultSet.close();
-    }
-
-    // With time filter
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("SELECT sin(s) FROM root.sg.d 
WHERE time >= 5");
-
-      double i = 5;
-      while (resultSet.next()) {
-        Assert.assertEquals(Math.sin(i++), resultSet.getDouble(2), 0.00001);
-      }
-      Assert.assertFalse(resultSet.next());
-      resultSet.close();
-    }
-  }
-
-  @Test
-  public void testSelectInto() throws SQLException {
-    for (int i = 0; i < 10; i++) {
-      writeStatement.execute(
-          String.format(
-              "CREATE timeseries root.sg.device%s.s WITH datatype=DOUBLE, 
encoding=RLE, compression=SNAPPY",
-              i));
-      writeStatement.execute(
-          String.format(
-              "CREATE timeseries root.sg.device%s.t WITH datatype=DOUBLE, 
encoding=RLE, compression=SNAPPY",
-              i));
-      writeStatement.execute(
-          String.format("INSERT INTO root.sg.device%s(timestamp,s) 
VALUES(1,1)", i));
-    }
-
-    writeStatement.execute(
-        "SELECT device0.s, device1.s, device2.s, device3.s, device4.s, 
device5.s, device6.s, device7.s, device8.s, device9.s "
-            + "INTO device0.t, device1.t, device2.t, device3.t, device4.t, 
device5.t, device6.t, device7.t, device8.t, device9.t "
-            + "FROM root.sg;");
-
-    for (int i = 0; i < 10; i++) {
-      writeStatement.execute(
-          String.format("INSERT INTO root.sg.device%s(timestamp,s) 
VALUES(2,2)", i));
-      writeStatement.execute(
-          String.format("SELECT device%s.s into device%s.t from root.sg;", i, 
i));
-    }
-
-    for (Statement readStatement : readStatements) {
-      for (int i = 0; i < 10; ++i) {
-        try (ResultSet resultSet =
-            readStatement.executeQuery(String.format("SELECT s, t FROM 
root.sg.device%s", i))) {
-          Assert.assertTrue(resultSet.next());
-          Assert.assertEquals(1, Double.parseDouble(resultSet.getString(1)), 
0);
-          Assert.assertEquals(
-              Double.parseDouble(resultSet.getString(1)),
-              Double.parseDouble(resultSet.getString(2)),
-              0);
-          Assert.assertEquals(
-              Double.parseDouble(resultSet.getString(2)),
-              Double.parseDouble(resultSet.getString(3)),
-              0);
-
-          Assert.assertTrue(resultSet.next());
-          Assert.assertEquals(2, Double.parseDouble(resultSet.getString(1)), 
0);
-          Assert.assertEquals(
-              Double.parseDouble(resultSet.getString(1)),
-              Double.parseDouble(resultSet.getString(2)),
-              0);
-          Assert.assertEquals(
-              Double.parseDouble(resultSet.getString(2)),
-              Double.parseDouble(resultSet.getString(3)),
-              0);
-
-          Assert.assertFalse(resultSet.next());
-        }
-      }
-    }
-  }
-
-  @Test
-  public void SetSystemReadOnlyWritableTest() throws SQLException {
-
-    String setReadOnly = "SET SYSTEM TO READONLY";
-    String createTimeSeries =
-        "create timeseries root.ln.wf01.wt1 WITH DATATYPE=DOUBLE, 
ENCODING=RLE, compression=SNAPPY tags(tag1=v1, tag2=v2)";
-    String setWritable = "SET SYSTEM TO WRITABLE";
-
-    writeStatement.execute(setReadOnly);
-
-    try {
-      writeStatement.execute(createTimeSeries);
-    } catch (Exception e) {
-      Assert.assertTrue(
-          e.getMessage()
-              .contains("Database is read-only, and does not accept non-query 
operation now"));
-    }
-
-    writeStatement.execute(setWritable);
-  }
-
-  @Test
-  public void testAutoCreateSchemaInClusterMode()
-      throws IoTDBConnectionException, StatementExecutionException, 
SQLException {
-    List<String> measurementList = new ArrayList<>();
-    measurementList.add("s1");
-    measurementList.add("s2");
-    measurementList.add("s3");
-
-    List<TSDataType> typeList = new ArrayList<>();
-    typeList.add(TSDataType.INT64);
-    typeList.add(TSDataType.INT64);
-    typeList.add(TSDataType.INT64);
-
-    List<Object> valueList = new ArrayList<>();
-    valueList.add(1L);
-    valueList.add(2L);
-    valueList.add(3L);
-
-    for (int i = 0; i < 5; i++) {
-      String sg = "root.sg" + String.valueOf(i);
-      session.setStorageGroup(sg);
-      for (int j = 0; j < 10; j++) {
-        session.createTimeseries(
-            String.format("%s.d1.s%s", sg, j),
-            TSDataType.INT64,
-            TSEncoding.RLE,
-            CompressionType.SNAPPY);
-        session.createTimeseries(
-            String.format("%s.d2.s%s", sg, j),
-            TSDataType.INT64,
-            TSEncoding.RLE,
-            CompressionType.SNAPPY);
-        session.createTimeseries(
-            String.format("%s.d3.s%s", sg, j),
-            TSDataType.INT64,
-            TSEncoding.RLE,
-            CompressionType.SNAPPY);
-        session.createTimeseries(
-            String.format("%s.d4.s%s", sg, j),
-            TSDataType.INT64,
-            TSEncoding.RLE,
-            CompressionType.SNAPPY);
-      }
-    }
-
-    // step 1: insert into existing time series.
-    for (int i = 0; i < 5; i++) {
-      for (long t = 0; t < 3; t++) {
-        session.insertRecord(
-            String.format("root.sg%s.d1", i), t, measurementList, typeList, 
1L, 2L, 3L);
-      }
-    }
-
-    List<List<String>> measurementsList = new ArrayList<>();
-    List<List<Object>> valuesList = new ArrayList<>();
-    List<List<TSDataType>> typesList = new ArrayList<>();
-    List<String> deviceList = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      String devicePath = String.format("root.sg%s.d2", i);
-      deviceList.add(devicePath);
-      typesList.add(typeList);
-      measurementsList.add(measurementList);
-      valuesList.add(valueList);
-    }
-
-    for (long t = 0; t < 3; t++) {
-      List<Long> timeList = new ArrayList<>();
-      for (int i = 0; i < 5; i++) {
-        timeList.add(t);
-      }
-      session.insertRecords(deviceList, timeList, measurementsList, typesList, 
valuesList);
-    }
-
-    List<MeasurementSchema> schemaList = new ArrayList<>();
-    schemaList.add(new MeasurementSchema("s1", TSDataType.INT64));
-    schemaList.add(new MeasurementSchema("s2", TSDataType.INT64));
-    schemaList.add(new MeasurementSchema("s3", TSDataType.INT64));
-
-    Map<String, Tablet> tabletMap = new HashMap<>();
-    for (int i = 0; i < 5; i++) {
-      Tablet tablet = new Tablet(String.format("root.sg%s.d3", i), schemaList, 
10);
-      for (long row = 0; row < 3; row++) {
-        int rowIndex = tablet.rowSize++;
-        tablet.addTimestamp(rowIndex, row);
-        tablet.addValue("s1", rowIndex, 1L);
-        tablet.addValue("s2", rowIndex, 2L);
-        tablet.addValue("s3", rowIndex, 3L);
-      }
-      session.insertTablet(tablet);
-      tablet.setDeviceId(String.format("root.sg%s.d4", i));
-      tabletMap.put(String.format("root.sg%s.d4", i), tablet);
-    }
-
-    session.insertTablets(tabletMap);
-
-    // step 2: test auto create sg and time series schema
-    for (int i = 5; i < 10; i++) {
-      for (long t = 0; t < 3; t++) {
-        session.insertRecord(
-            String.format("root.sg%s.d1", i), t, measurementList, typeList, 
1L, 2L, 3L);
-      }
-    }
-
-    deviceList.clear();
-    for (int i = 5; i < 10; i++) {
-      String device_path = String.format("root.sg%s.d2", i);
-      deviceList.add(device_path);
-    }
-
-    for (long t = 0; t < 3; t++) {
-      List<Long> timeList = new ArrayList<>();
-      for (int i = 0; i < 5; i++) {
-        timeList.add(t);
-      }
-      session.insertRecords(deviceList, timeList, measurementsList, typesList, 
valuesList);
-    }
-
-    tabletMap.clear();
-    for (int i = 5; i < 10; i++) {
-      Tablet tablet = new Tablet(String.format("root.sg%s.d3", i), schemaList, 
10);
-      for (long row = 0; row < 3; row++) {
-        int rowIndex = tablet.rowSize++;
-        tablet.addTimestamp(rowIndex, row);
-        tablet.addValue("s1", rowIndex, 1L);
-        tablet.addValue("s2", rowIndex, 2L);
-        tablet.addValue("s3", rowIndex, 3L);
-      }
-      session.insertTablet(tablet);
-      tablet.setDeviceId(String.format("root.sg%s.d4", i));
-      tabletMap.put(String.format("root.sg%s.d4", i), tablet);
-    }
-
-    session.insertTablets(tabletMap);
-
-    measurementsList.clear();
-    List<Long> timeList = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      timeList.add((long) i);
-      List<String> measurements = new ArrayList<>();
-      measurements.add(String.format("s%d", i));
-      measurements.add(String.format("s%d", i + 5));
-      measurements.add(String.format("s%d", i + 10));
-      measurementsList.add(measurements);
-    }
-
-    session.insertRecordsOfOneDevice(
-        "root.sg0.d5", timeList, measurementsList, typesList, valuesList);
-    session.insertRecordsOfOneDevice(
-        "root.sg20.d1", timeList, measurementsList, typesList, valuesList);
-
-    for (Statement readStatement : readStatements) {
-      for (int i = 0; i < 10; i++) {
-        for (int d = 1; d <= 4; d++) {
-          ResultSet resultSet =
-              readStatement.executeQuery(String.format("SELECT s1,s2,s3 from 
root.sg%s.d%s", i, d));
-          for (long t = 0; t < 3; t++) {
-            Assert.assertTrue(resultSet.next());
-            Assert.assertEquals(resultSet.getLong(1), t);
-            Assert.assertEquals(resultSet.getString(2), "1");
-            Assert.assertEquals(resultSet.getString(3), "2");
-            Assert.assertEquals(resultSet.getString(4), "3");
-          }
-        }
-      }
-
-      for (int i = 0; i < 5; i++) {
-        ResultSet resultSet =
-            readStatement.executeQuery(
-                String.format("select s%d,s%d,s%d from root.sg0.d5", i, i + 5, 
i + 10));
-        Assert.assertTrue(resultSet.next());
-        Assert.assertEquals(resultSet.getLong(1), i);
-        Assert.assertEquals(resultSet.getString(2), "1");
-        Assert.assertEquals(resultSet.getString(3), "2");
-        Assert.assertEquals(resultSet.getString(4), "3");
-
-        resultSet =
-            readStatement.executeQuery(
-                String.format("select s%d,s%d,s%d from root.sg20.d1", i, i + 
5, i + 10));
-        Assert.assertTrue(resultSet.next());
-        Assert.assertEquals(resultSet.getLong(1), i);
-        Assert.assertEquals(resultSet.getString(2), "1");
-        Assert.assertEquals(resultSet.getString(3), "2");
-        Assert.assertEquals(resultSet.getString(4), "3");
-      }
-    }
-
-    // test create time series
-    for (int i = 0; i < 5; i++) {
-      session.createTimeseries(
-          String.format("root.sg1%s.d1.s1", i),
-          TSDataType.INT64,
-          TSEncoding.RLE,
-          CompressionType.SNAPPY);
-    }
-
-    List<String> path = new ArrayList<>();
-    List<TSDataType> dataTypes = new ArrayList<>();
-    List<TSEncoding> encodings = new ArrayList<>();
-    List<CompressionType> compressionTypes = new ArrayList<>();
-    for (int i = 5; i < 10; i++) {
-      path.add(String.format("root.sg1%s.d1.s1", i));
-      dataTypes.add(TSDataType.INT64);
-      encodings.add(TSEncoding.RLE);
-      compressionTypes.add(CompressionType.SNAPPY);
-    }
-    session.createMultiTimeseries(
-        path, dataTypes, encodings, compressionTypes, null, null, null, null);
-    for (Statement readStatement : readStatements) {
-      for (int i = 0; i < 10; i++) {
-        ResultSet resultSet =
-            readStatement.executeQuery(String.format("show timeseries 
root.sg1%s.d1.s1", i));
-        Assert.assertTrue(resultSet.next());
-      }
-    }
-  }
-
-  @Test
-  public void testAutoCreateSchemaForAlignedTimeseries()
-      throws IoTDBConnectionException, StatementExecutionException, 
SQLException {
-    List<String> multiMeasurementComponents = new ArrayList<>();
-    multiMeasurementComponents.add("s1");
-    multiMeasurementComponents.add("s2");
-    multiMeasurementComponents.add("s3");
-
-    List<TSDataType> types = new ArrayList<>();
-    types.add(TSDataType.INT64);
-    types.add(TSDataType.INT32);
-    types.add(TSDataType.FLOAT);
-
-    List<Object> values = new ArrayList<>();
-    values.add(1L);
-    values.add(2);
-    values.add(3.0f);
-
-    List<Long> times = new ArrayList<>();
-    times.add(1L);
-    times.add(2L);
-    times.add(3L);
-
-    session.setStorageGroup(String.format("root.sg0"));
-
-    for (long time = 1; time <= 3; time++) {
-      session.insertAlignedRecord(
-          "root.sg1.d1.v1", time, multiMeasurementComponents, types, values);
-    }
-    List<String> multiSeriesIds = new ArrayList<>();
-    List<List<String>> multiMeasurementComponentsList = new ArrayList<>();
-    List<List<TSDataType>> typeList = new ArrayList<>();
-    List<List<Object>> valueList = new ArrayList<>();
-
-    for (int i = 2; i <= 4; i++) {
-      multiMeasurementComponentsList.add(multiMeasurementComponents);
-      typeList.add(types);
-      valueList.add(values);
-      multiSeriesIds.add(String.format("root.sg%d.d1.v1", i));
-    }
-    for (long time = 1; time <= 3; time++) {
-      List<Long> tmp_times = new ArrayList<>();
-      tmp_times.add(time);
-      tmp_times.add(time);
-      tmp_times.add(time);
-      session.insertAlignedRecords(
-          multiSeriesIds, tmp_times, multiMeasurementComponentsList, typeList, 
valueList);
-    }
-    multiSeriesIds.clear();
-    multiSeriesIds.add("root.sg0.d2.v1");
-    multiSeriesIds.add("root.sg0.d2.v1");
-    multiSeriesIds.add("root.sg0.d2.v1");
-
-    session.insertAlignedRecordsOfOneDevice(
-        "root.sg5.d1.v1", times, multiMeasurementComponentsList, typeList, 
valueList);
-
-    List<MeasurementSchema> schemaList = new ArrayList<>();
-    schemaList.add(new MeasurementSchema("s1", TSDataType.INT64));
-    schemaList.add(new MeasurementSchema("s2", TSDataType.INT32));
-    schemaList.add(new MeasurementSchema("s3", TSDataType.FLOAT));
-
-    Tablet tablet = new Tablet("root.sg6.d1.v1", schemaList);
-
-    for (long row = 1; row <= 3; row++) {
-      int rowIndex = tablet.rowSize++;
-      tablet.addTimestamp(rowIndex, row);
-      tablet.addValue(schemaList.get(0).getMeasurementId(), rowIndex, 1L);
-      tablet.addValue(schemaList.get(1).getMeasurementId(), rowIndex, 2);
-      tablet.addValue(schemaList.get(2).getMeasurementId(), rowIndex, 3.0f);
-    }
-    session.insertAlignedTablet(tablet, true);
-    tablet.reset();
-
-    List<MeasurementSchema> schemaList1 = new ArrayList<>();
-    schemaList1.add(new MeasurementSchema("s1", TSDataType.INT64));
-    schemaList1.add(new MeasurementSchema("s2", TSDataType.INT32));
-    schemaList1.add(new MeasurementSchema("s3", TSDataType.FLOAT));
-    List<MeasurementSchema> schemaList2 = new ArrayList<>();
-    schemaList2.add(new MeasurementSchema("s1", TSDataType.INT64));
-    schemaList2.add(new MeasurementSchema("s2", TSDataType.INT32));
-    schemaList2.add(new MeasurementSchema("s3", TSDataType.FLOAT));
-    List<MeasurementSchema> schemaList3 = new ArrayList<>();
-    schemaList3.add(new MeasurementSchema("s1", TSDataType.INT64));
-    schemaList3.add(new MeasurementSchema("s2", TSDataType.INT32));
-    schemaList3.add(new MeasurementSchema("s3", TSDataType.FLOAT));
-
-    Tablet tablet1 = new Tablet("root.sg7.d1.v1", schemaList1, 100);
-    Tablet tablet2 = new Tablet("root.sg8.d1.v1", schemaList2, 100);
-    Tablet tablet3 = new Tablet("root.sg9.d1.v1", schemaList3, 100);
-
-    Map<String, Tablet> tabletMap = new HashMap<>();
-    tabletMap.put("root.sg7.d1.v1", tablet1);
-    tabletMap.put("root.sg8.d1.v1", tablet2);
-    tabletMap.put("root.sg9.d1.v1", tablet3);
-
-    for (long row = 1; row <= 3; row++) {
-      int row1 = tablet1.rowSize++;
-      int row2 = tablet2.rowSize++;
-      int row3 = tablet3.rowSize++;
-      tablet1.addTimestamp(row1, row);
-      tablet2.addTimestamp(row2, row);
-      tablet3.addTimestamp(row3, row);
-      for (int i = 0; i < 3; i++) {
-        tablet1.addValue(schemaList1.get(i).getMeasurementId(), row1, 
values.get(i));
-        tablet2.addValue(schemaList2.get(i).getMeasurementId(), row2, 
values.get(i));
-        tablet3.addValue(schemaList3.get(i).getMeasurementId(), row3, 
values.get(i));
-      }
-    }
-    session.insertAlignedTablets(tabletMap, true);
-
-    tabletMap.clear();
-
-    for (Statement readStatement : readStatements) {
-      for (int sg = 1; sg <= 9; sg++) {
-        ResultSet resultSet =
-            readStatement.executeQuery(String.format("SELECT * from 
root.sg%d.d1.v1", sg));
-        for (long t = 1; t <= 3; t++) {
-          Assert.assertTrue(resultSet.next());
-          Assert.assertEquals(resultSet.getLong(1), t);
-          Assert.assertEquals(resultSet.getString(2), "1");
-          Assert.assertEquals(resultSet.getString(3), "2");
-          Assert.assertEquals(resultSet.getString(4), "3.0");
-        }
-      }
-    }
-  }
-
-  @Test
-  public void testInsertTabletWithNullValues()
-      throws IoTDBConnectionException, StatementExecutionException, 
SQLException {
-    List<MeasurementSchema> schemaList = new ArrayList<>();
-    schemaList.add(new MeasurementSchema("s0", TSDataType.DOUBLE, 
TSEncoding.RLE));
-    schemaList.add(new MeasurementSchema("s1", TSDataType.FLOAT, 
TSEncoding.RLE));
-    schemaList.add(new MeasurementSchema("s2", TSDataType.INT64, 
TSEncoding.RLE));
-    schemaList.add(new MeasurementSchema("s3", TSDataType.INT32, 
TSEncoding.RLE));
-    schemaList.add(new MeasurementSchema("s4", TSDataType.BOOLEAN, 
TSEncoding.RLE));
-    schemaList.add(new MeasurementSchema("s5", TSDataType.TEXT, 
TSEncoding.RLE));
-
-    Tablet tablet = new Tablet("root.sg1.d1", schemaList);
-    for (long time = 0; time < 10; time++) {
-      int rowIndex = tablet.rowSize++;
-      tablet.addTimestamp(rowIndex, time);
-
-      tablet.addValue(schemaList.get(0).getMeasurementId(), rowIndex, (double) 
time);
-      tablet.addValue(schemaList.get(1).getMeasurementId(), rowIndex, (float) 
time);
-      tablet.addValue(schemaList.get(2).getMeasurementId(), rowIndex, time);
-      tablet.addValue(schemaList.get(3).getMeasurementId(), rowIndex, (int) 
time);
-      tablet.addValue(schemaList.get(4).getMeasurementId(), rowIndex, time % 2 
== 0);
-      tablet.addValue(
-          schemaList.get(5).getMeasurementId(), rowIndex, new 
Binary(String.valueOf(time)));
-    }
-
-    BitMap[] bitMaps = new BitMap[schemaList.size()];
-    for (int i = 0; i < schemaList.size(); i++) {
-      if (bitMaps[i] == null) {
-        bitMaps[i] = new BitMap(10);
-      }
-      bitMaps[i].mark(i);
-    }
-    tablet.bitMaps = bitMaps;
-
-    if (tablet.rowSize != 0) {
-      session.insertTablet(tablet);
-      tablet.reset();
-    }
-
-    ResultSet resultSet;
-    // try to read data on each node.
-    for (Statement readStatement : readStatements) {
-      resultSet = readStatement.executeQuery("select count(*) from 
root.sg1.d1");
-      Assert.assertTrue(resultSet.next());
-      for (int i = 1; i <= schemaList.size(); ++i) {
-        Assert.assertEquals(9L, resultSet.getLong(i));
-      }
-    }
-  }
-}
diff --git a/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterIT.java 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterIT.java
deleted file mode 100644
index 71d88826085..00000000000
--- a/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterIT.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql;
-
-import org.apache.iotdb.jdbc.Config;
-import org.apache.iotdb.session.Session;
-
-import org.junit.After;
-import org.junit.Before;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.Statement;
-import java.util.concurrent.TimeUnit;
-
-// do not add tests here.
-// add tests into Cases.java instead.
-public abstract class ClusterIT extends Cases {
-
-  private static Logger logger = LoggerFactory.getLogger(ClusterIT.class);
-
-  // "root.sg1" is a special storage for testing whether the read and write 
operations can be run
-  // correctly if the data is not on the connected node.
-  public String defaultSG = "root.sg1";
-
-  protected int getWriteRpcPort() {
-    return getContainer().getServicePort("iotdb-server_1", 6667);
-  }
-
-  protected String getWriteRpcIp() {
-    return getContainer().getServiceHost("iotdb-server_1", 6667);
-  }
-
-  protected int[] getReadRpcPorts() {
-    return new int[] {getContainer().getServicePort("iotdb-server_1", 6667)};
-  }
-
-  protected String[] getReadRpcIps() {
-    return new String[] {getContainer().getServiceHost("iotdb-server_1", 
6667)};
-  }
-
-  protected void startCluster() {}
-
-  protected abstract DockerComposeContainer getContainer();
-
-  @Before
-  public void init() throws Exception {
-    startCluster();
-
-    Class.forName(Config.JDBC_DRIVER_NAME);
-    writeConnection =
-        DriverManager.getConnection(
-            "jdbc:iotdb://" + getWriteRpcIp() + ":" + getWriteRpcPort(), 
"root", "root");
-    writeStatement = writeConnection.createStatement();
-
-    int[] readPorts = getReadRpcPorts();
-    String[] readIps = getReadRpcIps();
-    readConnections = new Connection[readPorts.length];
-    readStatements = new Statement[readPorts.length];
-    for (int i = 0; i < readPorts.length; i++) {
-      readConnections[i] =
-          DriverManager.getConnection(
-              "jdbc:iotdb://" + readIps[i] + ":" + readPorts[i], "root", 
"root");
-      readStatements[i] = readConnections[i].createStatement();
-    }
-    session =
-        new Session.Builder()
-            .host(getWriteRpcIp())
-            .port(getWriteRpcPort())
-            .username("root")
-            .password("root")
-            .enableRedirection(false)
-            .build();
-    session.open();
-    TimeUnit.MILLISECONDS.sleep(3000);
-  }
-
-  @After
-  public void clean() throws Exception {
-    super.clean();
-  }
-
-  // do not add tests here.
-  // add tests into Cases.java instead.
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterSessionSimpleIT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterSessionSimpleIT.java
deleted file mode 100644
index 6f9189fd5ed..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/ClusterSessionSimpleIT.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql;
-
-import org.apache.iotdb.rpc.IoTDBConnectionException;
-import org.apache.iotdb.rpc.StatementExecutionException;
-import org.apache.iotdb.session.Session;
-import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-import org.testcontainers.containers.NoProjectNameDockerComposeContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-public class ClusterSessionSimpleIT {
-
-  private static Logger node1Logger = 
LoggerFactory.getLogger("iotdb-server_1");
-  private static Logger node2Logger = 
LoggerFactory.getLogger("iotdb-server_2");
-  private static Logger node3Logger = 
LoggerFactory.getLogger("iotdb-server_3");
-
-  private Session session;
-
-  @Rule
-  public DockerComposeContainer environment =
-      new NoProjectNameDockerComposeContainer(
-              "3nodes", new 
File("src/test/resources/3nodes/docker-compose.yaml"))
-          .withExposedService("iotdb-server_1", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_1", new Slf4jLogConsumer(node1Logger))
-          .withExposedService("iotdb-server_2", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_2", new Slf4jLogConsumer(node2Logger))
-          .withExposedService("iotdb-server_3", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_3", new Slf4jLogConsumer(node3Logger))
-          .withLocalCompose(true);
-
-  protected DockerComposeContainer getContainer() {
-    return environment;
-  }
-
-  @Test
-  public void testSessionCluster() throws IoTDBConnectionException, 
StatementExecutionException {
-    List<String> stringList = new ArrayList<>();
-    Integer service1Port = getContainer().getServicePort("iotdb-server_1", 
6667);
-    Integer service2Port = getContainer().getServicePort("iotdb-server_2", 
6667);
-    Integer service3Port = getContainer().getServicePort("iotdb-server_3", 
6667);
-    stringList.add("localhost:" + service1Port);
-    stringList.add("localhost:" + service2Port);
-    stringList.add("localhost:" + service3Port);
-    session = new Session(stringList, "root", "root");
-    session.open();
-    session.setStorageGroup("root.sg1");
-    session.createTimeseries(
-        "root.sg1.d1.s1", TSDataType.INT64, TSEncoding.RLE, 
CompressionType.SNAPPY);
-
-    session.createTimeseries(
-        "root.sg1.d2.s1", TSDataType.INT64, TSEncoding.RLE, 
CompressionType.SNAPPY);
-    session.close();
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/SingleNodeIT.java 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/SingleNodeIT.java
deleted file mode 100644
index fa75d1e63c4..00000000000
--- a/testcontainer/src/test/java/org/apache/iotdb/db/sql/SingleNodeIT.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.sql;
-
-import org.apache.iotdb.jdbc.Config;
-import org.apache.iotdb.session.Session;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.BindMode;
-import org.testcontainers.containers.GenericContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-import org.testcontainers.images.PullPolicy;
-import org.testcontainers.utility.DockerImageName;
-
-import java.io.File;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.Statement;
-
-// do not add tests here.
-// add tests into Cases.java instead.
-public class SingleNodeIT extends Cases {
-  private static Logger logger = LoggerFactory.getLogger(SingleNodeIT.class);
-
-  @Rule
-  public GenericContainer dslContainer =
-      new 
GenericContainer(DockerImageName.parse("apache/iotdb:maven-development"))
-          .withImagePullPolicy(PullPolicy.defaultPolicy())
-          // mount another properties for changing parameters, e.g., open 5555 
port (sync module)
-          .withFileSystemBind(
-              new 
File("src/test/resources/iotdb-datanode.properties").getAbsolutePath(),
-              "/iotdb/conf/iotdb-datanode.properties",
-              BindMode.READ_ONLY)
-          .withFileSystemBind(
-              new 
File("src/test/resources/logback-container.xml").getAbsolutePath(),
-              "/iotdb/conf/logback.xml",
-              BindMode.READ_ONLY)
-          .withLogConsumer(new Slf4jLogConsumer(logger))
-          .withExposedPorts(6667)
-          .waitingFor(Wait.forListeningPort());
-
-  int rpcPort = 6667;
-  int syncPort = 5555;
-
-  @Before
-  public void init() throws Exception {
-    rpcPort = dslContainer.getMappedPort(6667);
-    syncPort = dslContainer.getMappedPort(5555);
-    Class.forName(Config.JDBC_DRIVER_NAME);
-    readConnections = new Connection[1];
-    readStatements = new Statement[1];
-    writeConnection =
-        readConnections[0] =
-            DriverManager.getConnection("jdbc:iotdb://127.0.0.1:" + rpcPort, 
"root", "root");
-    writeStatement = readStatements[0] = writeConnection.createStatement();
-    session = new Session("127.0.0.1", rpcPort);
-    session.open();
-  }
-
-  @After
-  public void clean() throws Exception {
-    super.clean();
-  }
-
-  // do not add tests here.
-  // add tests into Cases.java instead.
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/node1/OneNodeClusterIT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/node1/OneNodeClusterIT.java
deleted file mode 100644
index daa7161ac14..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/node1/OneNodeClusterIT.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.node1;
-
-import org.apache.iotdb.db.sql.ClusterIT;
-
-import org.junit.Rule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-import org.testcontainers.containers.NoProjectNameDockerComposeContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-
-import java.io.File;
-
-// a cluster with only one node.
-// in this case, the read and write connection must be on the same node,
-// the data is also
-public class OneNodeClusterIT extends ClusterIT {
-  private static Logger node1Logger = 
LoggerFactory.getLogger("iotdb-server_1");
-
-  // in TestContainer's document, it is @ClassRule, and the environment is 
`public static`
-  // I am not sure the difference now.
-  @Rule
-  public DockerComposeContainer environment =
-      new NoProjectNameDockerComposeContainer(
-              "1node", new 
File("src/test/resources/1node/docker-compose.yaml"))
-          .withExposedService("iotdb-server_1", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_1", new Slf4jLogConsumer(node1Logger))
-          .withLocalCompose(true);
-
-  @Override
-  protected DockerComposeContainer getContainer() {
-    return environment;
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/AbstractThreeNodeClusterIT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/AbstractThreeNodeClusterIT.java
deleted file mode 100644
index 2729581ff2a..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/AbstractThreeNodeClusterIT.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes3;
-
-import org.apache.iotdb.db.sql.ClusterIT;
-
-import org.junit.Rule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-import org.testcontainers.containers.NoProjectNameDockerComposeContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-
-import java.io.File;
-
-// just add real ITs into AbstractClusterIT.
-// in this case, the data must be on all nodes.
-// we just simulate write data on node A and read data on either node A or B.
-public abstract class AbstractThreeNodeClusterIT extends ClusterIT {
-
-  private static Logger node1Logger = 
LoggerFactory.getLogger("iotdb-server_1");
-  private static Logger node2Logger = 
LoggerFactory.getLogger("iotdb-server_2");
-  private static Logger node3Logger = 
LoggerFactory.getLogger("iotdb-server_3");
-
-  // in TestContainer's document, it is @ClassRule, and the environment is 
`public static`
-  // I am not sure the difference now.
-  @Rule
-  public DockerComposeContainer environment =
-      new NoProjectNameDockerComposeContainer(
-              "3nodes", new 
File("src/test/resources/3nodes/docker-compose.yaml"))
-          .withExposedService("iotdb-server_1", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_1", new Slf4jLogConsumer(node1Logger))
-          .withExposedService("iotdb-server_2", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_2", new Slf4jLogConsumer(node2Logger))
-          .withExposedService("iotdb-server_3", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_3", new Slf4jLogConsumer(node3Logger))
-          .withLocalCompose(true);
-
-  @Override
-  protected DockerComposeContainer getContainer() {
-    return environment;
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster1IT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster1IT.java
deleted file mode 100644
index 4802100fbd9..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster1IT.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes3;
-
-// read and the write statements are on the same node.
-public class ThreeNodeCluster1IT extends AbstractThreeNodeClusterIT {}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster2IT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster2IT.java
deleted file mode 100644
index 69ee3a57bf0..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes3/ThreeNodeCluster2IT.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes3;
-
-// read and the write statements are on the different nodes.
-public class ThreeNodeCluster2IT extends AbstractThreeNodeClusterIT {
-
-  protected String getWriteRpcIp() {
-    return getContainer().getServiceHost("iotdb-server_2", 6667);
-  }
-
-  protected int getWriteRpcPort() {
-    return getContainer().getServicePort("iotdb-server_2", 6667);
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/AbstractFiveNodeClusterIT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/AbstractFiveNodeClusterIT.java
deleted file mode 100644
index 4535fdb2990..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/AbstractFiveNodeClusterIT.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes5;
-
-import org.apache.iotdb.db.sql.ClusterIT;
-
-import org.junit.Rule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-import org.testcontainers.containers.NoProjectNameDockerComposeContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-
-import java.io.File;
-
-// just add real ITs into AbstractClusterIT.
-// in this case, the data may be not on the node that a read client connects.
-// So, we have: write data on node A and read data on either node A, B, and C.
-public abstract class AbstractFiveNodeClusterIT extends ClusterIT {
-
-  private static Logger node1Logger = 
LoggerFactory.getLogger("iotdb-server_1");
-  private static Logger node2Logger = 
LoggerFactory.getLogger("iotdb-server_2");
-  private static Logger node3Logger = 
LoggerFactory.getLogger("iotdb-server_3");
-  private static Logger node4Logger = 
LoggerFactory.getLogger("iotdb-server_4");
-  private static Logger node5Logger = 
LoggerFactory.getLogger("iotdb-server_5");
-
-  // in TestContainer's document, it is @ClassRule, and the environment is 
`public static`
-  // I am not sure the difference now.
-  @Rule
-  public DockerComposeContainer environment =
-      new NoProjectNameDockerComposeContainer(
-              "5nodes", new 
File("src/test/resources/5nodes/docker-compose.yaml"))
-          .withExposedService("iotdb-server_1", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_1", new Slf4jLogConsumer(node1Logger))
-          .withExposedService("iotdb-server_2", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_2", new Slf4jLogConsumer(node2Logger))
-          .withExposedService("iotdb-server_3", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_3", new Slf4jLogConsumer(node3Logger))
-          .withExposedService("iotdb-server_4", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_4", new Slf4jLogConsumer(node4Logger))
-          .withExposedService("iotdb-server_5", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-server_5", new Slf4jLogConsumer(node5Logger))
-          .withLocalCompose(true);
-
-  @Override
-  protected DockerComposeContainer getContainer() {
-    return environment;
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster1IT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster1IT.java
deleted file mode 100644
index 328de27a484..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster1IT.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes5;
-
-// read and the write statements are on the different nodes.
-public class FiveNodeCluster1IT extends AbstractFiveNodeClusterIT {}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster2IT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster2IT.java
deleted file mode 100644
index 6a455bccb54..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster2IT.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes5;
-
-// read and the write statements are on the different nodes, and maybe in the 
same raft group.
-public class FiveNodeCluster2IT extends AbstractFiveNodeClusterIT {
-
-  protected String getWriteRpcIp() {
-    return getContainer().getServiceHost("iotdb-server_2", 6667);
-  }
-
-  protected int getWriteRpcPort() {
-    return getContainer().getServicePort("iotdb-server_2", 6667);
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster4IT.java
 
b/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster4IT.java
deleted file mode 100644
index 2b8491f5ffa..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sql/nodes5/FiveNodeCluster4IT.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.db.sql.nodes5;
-
-// read and the write statements are on the different nodes, and maybe in the 
different raft groups.
-public class FiveNodeCluster4IT extends AbstractFiveNodeClusterIT {
-
-  protected String getWriteRpcIp() {
-    return getContainer().getServiceHost("iotdb-server_4", 6667);
-  }
-
-  protected int getWriteRpcPort() {
-    return getContainer().getServicePort("iotdb-server_4", 6667);
-  }
-}
diff --git a/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncIT.java 
b/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncIT.java
deleted file mode 100644
index ebda8f04c9f..00000000000
--- a/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncIT.java
+++ /dev/null
@@ -1,426 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.sync;
-
-import org.apache.iotdb.jdbc.Config;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.DockerComposeContainer;
-import org.testcontainers.containers.NoProjectNameDockerComposeContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-
-import java.io.File;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-public class SyncIT {
-  private static Logger receiverLogger = 
LoggerFactory.getLogger("iotdb-receiver_1");
-  private static Logger senderLogger = 
LoggerFactory.getLogger("iotdb-sender_1");
-  private static int RETRY_TIME = 30;
-
-  protected Statement senderStatement;
-  protected Connection senderConnection;
-  protected Statement receiverStatement;
-  protected Connection receiverConnection;
-
-  // in TestContainer's document, it is @ClassRule, and the environment is 
`public static`
-  // I am not sure the difference now.
-  @Rule
-  public DockerComposeContainer environment =
-      new NoProjectNameDockerComposeContainer(
-              "sync", new File("src/test/resources/sync/docker-compose.yaml"))
-          .withExposedService("iotdb-sender_1", 6667, Wait.forListeningPort())
-          .withLogConsumer("iotdb-sender_1", new 
Slf4jLogConsumer(senderLogger))
-          .withExposedService("iotdb-receiver_1", 6667, 
Wait.forListeningPort())
-          .withLogConsumer("iotdb-receiver_1", new 
Slf4jLogConsumer(receiverLogger))
-          .withLocalCompose(true);
-
-  protected int getSenderRpcPort() {
-    return environment.getServicePort("iotdb-sender_1", 6667);
-  }
-
-  protected String getSenderIp() {
-    return environment.getServiceHost("iotdb-sender_1", 6667);
-  }
-
-  protected int getReceiverRpcPort() {
-    return environment.getServicePort("iotdb-receiver_1", 6667);
-  }
-
-  protected String getReceiverIp() {
-    return environment.getServiceHost("iotdb-receiver_1", 6667);
-  }
-
-  @Before
-  public void init() throws Exception {
-    Class.forName(Config.JDBC_DRIVER_NAME);
-    senderConnection =
-        DriverManager.getConnection(
-            "jdbc:iotdb://" + getSenderIp() + ":" + getSenderRpcPort(), 
"root", "root");
-    senderStatement = senderConnection.createStatement();
-    receiverConnection =
-        DriverManager.getConnection(
-            "jdbc:iotdb://" + getReceiverIp() + ":" + getReceiverRpcPort(), 
"root", "root");
-    receiverStatement = receiverConnection.createStatement();
-  }
-
-  @After
-  public void clean() throws Exception {
-    senderStatement.close();
-    senderConnection.close();
-    receiverStatement.close();
-    receiverConnection.close();
-  }
-
-  private void prepareSchema() throws Exception {
-    senderStatement.execute("CREATE DATABASE root.sg1");
-    senderStatement.execute("CREATE DATABASE root.sg2");
-    senderStatement.execute("create timeseries root.sg1.d1.s1 with 
datatype=int32, encoding=PLAIN");
-    senderStatement.execute("create timeseries root.sg1.d1.s2 with 
datatype=float, encoding=RLE");
-    senderStatement.execute("create timeseries root.sg1.d1.s3 with 
datatype=TEXT, encoding=PLAIN");
-    senderStatement.execute("create timeseries root.sg1.d2.s4 with 
datatype=int64, encoding=PLAIN");
-    senderStatement.execute("create timeseries root.sg2.d1.s0 with 
datatype=int32, encoding=PLAIN");
-    senderStatement.execute(
-        "create timeseries root.sg2.d2.s1 with datatype=boolean, 
encoding=PLAIN");
-  }
-
-  /* add one seq tsfile in sg1 */
-  private void prepareIns1() throws Exception {
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(1, 1, 16.0, 
'a')");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(2, 2, 25.16, 
'b')");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(3, 3, 65.25, 
'c')");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(16, 25, 100.0, 
'd')");
-    senderStatement.execute("insert into root.sg1.d2(timestamp, s4) values(1, 
1)");
-    senderStatement.execute("flush");
-  }
-
-  /* add one seq tsfile in sg1 */
-  private void prepareIns2() throws Exception {
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(100, 65, 16.25, 
'e')");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(65, 100, 25.0, 
'f')");
-    senderStatement.execute("insert into root.sg1.d2(timestamp, s4) 
values(200, 100)");
-    senderStatement.execute("flush");
-  }
-
-  /* add one seq tsfile in sg1, one unseq tsfile in sg1, one seq tsfile in sg2 
*/
-  private void prepareIns3() throws Exception {
-    senderStatement.execute("insert into root.sg2.d1(timestamp, s0) 
values(100, 100)");
-    senderStatement.execute("insert into root.sg2.d1(timestamp, s0) values(65, 
65)");
-    senderStatement.execute("insert into root.sg2.d2(timestamp, s1) values(1, 
true)");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(25, 16, 65.16, 
'g')");
-    senderStatement.execute(
-        "insert into root.sg1.d1(timestamp, s1, s2, s3) values(200, 100, 
16.65, 'h')");
-    senderStatement.execute("flush");
-  }
-
-  private void prepareDel1() throws Exception { // after ins1, add 2 deletions
-    senderStatement.execute("delete from root.sg1.d1.s1 where time == 3");
-    senderStatement.execute("delete from root.sg1.d1.s2 where time >= 1 and 
time <= 2");
-  }
-
-  private void prepareDel2() throws Exception { // after ins2, add 3 deletions
-    senderStatement.execute("delete from root.sg1.d1.s3 where time <= 65");
-  }
-
-  private void prepareDel3() throws Exception { // after ins3, add 5 
deletions, 2 schemas{
-    senderStatement.execute("delete from root.sg1.d1.* where time <= 2");
-    senderStatement.execute("delete timeseries root.sg1.d2.*");
-    senderStatement.execute("delete database root.sg2");
-  }
-
-  private void preparePipe() throws Exception {
-    receiverStatement.execute("start pipeserver");
-    senderStatement.execute(
-        "create pipesink my_iotdb as 
iotdb(ip='sync_iotdb-receiver_1',port=6670)");
-    senderStatement.execute("create pipe p to my_iotdb");
-  }
-
-  private void startPipe() throws Exception {
-    senderStatement.execute("start pipe p");
-  }
-
-  private void stopPipe() throws Exception {
-    senderStatement.execute("stop pipe p");
-  }
-
-  private void dropPipe() throws Exception {
-    senderStatement.execute("drop pipe p");
-  }
-
-  private void checkResult() throws Exception {
-    String[] columnNames =
-        new String[] {
-          "root.sg1.d1.s3",
-          "root.sg1.d1.s1",
-          "root.sg1.d1.s2",
-          "root.sg1.d2.s4",
-          "root.sg2.d1.s0",
-          "root.sg2.d2.s1"
-        };
-    String[] results =
-        new String[] {
-          "1,a,1,16.0,1,null,true",
-          "2,b,2,25.16,null,null,null",
-          "3,c,3,65.25,null,null,null",
-          "16,d,25,100.0,null,null,null",
-          "25,g,16,65.16,null,null,null",
-          "65,f,100,25.0,null,65,null",
-          "100,e,65,16.25,null,100,null",
-          "200,h,100,16.65,100,null,null"
-        };
-    checkResult(receiverStatement, "select ** from root", columnNames, 
results, true);
-  }
-
-  private void checkResultWithDeletion() throws Exception {
-    String[] columnNames =
-        new String[] {
-          "root.sg1.d1.s3", "root.sg1.d1.s1", "root.sg1.d1.s2",
-        };
-    String[] results =
-        new String[] {
-          "3,null,null,65.25",
-          "16,null,25,100.0",
-          "25,null,16,65.16",
-          "65,null,100,25.0",
-          "100,e,65,16.25",
-          "200,h,100,16.65"
-        };
-    checkResult(receiverStatement, "select ** from root", columnNames, 
results, true);
-  }
-
-  @Test
-  public void testCreatePipe() throws Exception {
-    preparePipe();
-    checkResult(
-        receiverStatement,
-        "show pipe",
-        new String[] {"name", "role", "status"},
-        new String[] {"p,receiver,STOP"},
-        false);
-    dropPipe();
-    checkResult(
-        senderStatement,
-        "show pipe",
-        new String[] {"name", "role", "remote", "status"},
-        new String[] {"p,sender,my_iotdb,DROP"},
-        false);
-  }
-
-  @Test
-  public void testHistoryInsert() {
-    try {
-      prepareSchema();
-      prepareIns1();
-      prepareIns2();
-      prepareIns3();
-      preparePipe();
-      startPipe();
-      checkResult();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testHistoryAndRealTimeInsert() {
-    try {
-      prepareSchema();
-      prepareIns1();
-      prepareIns2();
-      preparePipe();
-      startPipe();
-      prepareIns3();
-      checkResult();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testStopAndStartInsert() {
-    try {
-      prepareSchema();
-      prepareIns1();
-      preparePipe();
-      startPipe();
-      prepareIns2();
-      stopPipe();
-      prepareIns3();
-      startPipe();
-      checkResult();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testRealTimeAndStopInsert() {
-    try {
-      preparePipe(); // realtime
-      startPipe();
-      prepareSchema();
-      prepareIns1();
-      stopPipe();
-      prepareIns2();
-      startPipe();
-      prepareIns3();
-      checkResult();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testHistoryDel() {
-    try {
-      prepareSchema(); // history
-      prepareIns1();
-      prepareIns2();
-      prepareIns3();
-      prepareDel1();
-      prepareDel2();
-      prepareDel3();
-      preparePipe(); // realtime
-      startPipe();
-      checkResultWithDeletion();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testRealtimeDel() {
-    try {
-      prepareSchema(); // history
-      prepareIns1();
-      preparePipe(); // realtime
-      startPipe();
-      prepareIns2();
-      prepareDel1();
-      stopPipe();
-      prepareIns3();
-      startPipe();
-      prepareDel2();
-      prepareDel3();
-      checkResultWithDeletion();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail();
-    }
-  }
-
-  /**
-   * Execute sql in IoTDB and compare resultSet with expected result. This 
method only check columns
-   * that is explicitly declared in columnNames. This method will compare 
expected result with
-   * actual result RETRY_TIME times. Interval of each run is 1000ms.
-   *
-   * @param statement Statement of IoTDB.
-   * @param sql SQL to be executed.
-   * @param columnNames Columns to be compared with.
-   * @param retArray Expected result set. Order of columns is as same as 
columnNames.
-   * @param hasTimeColumn If result set contains time column (e.g. timeserires 
query), set
-   *     hasTimeColumn = true.
-   */
-  private static void checkResult(
-      Statement statement,
-      String sql,
-      String[] columnNames,
-      String[] retArray,
-      boolean hasTimeColumn)
-      throws Exception {
-    loop:
-    for (int loop = 0; loop < RETRY_TIME; loop++) {
-      try {
-        Thread.sleep(1000);
-        boolean hasResultSet = statement.execute(sql);
-        if (!assertOrCompareEqual(true, hasResultSet, loop)) {
-          continue;
-        }
-        ResultSet resultSet = statement.getResultSet();
-        ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
-        Map<String, Integer> map = new HashMap<>();
-        for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
-          map.put(resultSetMetaData.getColumnName(i), i);
-        }
-        int cnt = 0;
-        while (resultSet.next()) {
-          StringBuilder builder = new StringBuilder();
-          if (hasTimeColumn) {
-            builder.append(resultSet.getString(1)).append(",");
-          }
-          for (String columnName : columnNames) {
-            int index = map.get(columnName);
-            builder.append(resultSet.getString(index)).append(",");
-          }
-          if (builder.length() > 0) {
-            builder.deleteCharAt(builder.length() - 1);
-          }
-          if (!assertOrCompareEqual(retArray[cnt], builder.toString(), loop)) {
-            continue loop;
-          }
-          cnt++;
-        }
-        if (!assertOrCompareEqual(retArray.length, cnt, loop)) {
-          continue;
-        }
-        return;
-      } catch (Exception e) {
-        if (loop == RETRY_TIME - 1) {
-          throw e;
-        }
-      }
-    }
-    Assert.fail();
-  }
-
-  private static boolean assertOrCompareEqual(Object expected, Object actual, 
int loop) {
-    if (loop == RETRY_TIME - 1) {
-      assertEquals(expected, actual);
-      return true;
-    } else {
-      return expected.equals(actual);
-    }
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncWeakNetworkIT.java 
b/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncWeakNetworkIT.java
deleted file mode 100644
index 09effada3ae..00000000000
--- 
a/testcontainer/src/test/java/org/apache/iotdb/db/sync/SyncWeakNetworkIT.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.sync;
-
-import org.junit.Assert;
-import org.testcontainers.containers.Container;
-import org.testcontainers.containers.ContainerState;
-
-/** Simulate network delay and loss. */
-public class SyncWeakNetworkIT extends SyncIT {
-  @Override
-  public void init() throws Exception {
-    super.init();
-    // set delay is 200±50ms that conform to a normal distribution;
-    // network packet with 10% loss rate, 10% duplicate rate, 10% reorder rate 
and 10% corrupt rate;
-    Container.ExecResult res =
-        ((ContainerState) 
environment.getContainerByServiceName("iotdb-sender_1").get())
-            .execInContainer(
-                "sh",
-                "-c",
-                "tc qdisc add dev eth0 root netem delay 200ms 50ms 25% 
distribution normal loss random 10% duplicate 10% reorder 10% corrupt 10%");
-    Assert.assertEquals(0, res.getExitCode());
-  }
-}
diff --git 
a/testcontainer/src/test/java/org/testcontainers/containers/NoProjectNameDockerComposeContainer.java
 
b/testcontainer/src/test/java/org/testcontainers/containers/NoProjectNameDockerComposeContainer.java
deleted file mode 100644
index 0d1fa76a833..00000000000
--- 
a/testcontainer/src/test/java/org/testcontainers/containers/NoProjectNameDockerComposeContainer.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.testcontainers.containers;
-
-import java.io.File;
-import java.lang.reflect.Field;
-
-public class NoProjectNameDockerComposeContainer extends 
DockerComposeContainer {
-
-  public NoProjectNameDockerComposeContainer(String identifier, File... 
composeFiles) {
-    super(identifier, composeFiles);
-    Field project = null;
-    try {
-      project = DockerComposeContainer.class.getDeclaredField("project");
-      project.setAccessible(true);
-      project.set(this, "");
-    } catch (NoSuchFieldException | IllegalAccessException e) {
-      e.printStackTrace();
-    }
-  }
-}
diff --git a/testcontainer/src/test/resources/iotdb-datanode.properties 
b/testcontainer/src/test/resources/iotdb-datanode.properties
deleted file mode 100644
index ed9f096aab5..00000000000
--- a/testcontainer/src/test/resources/iotdb-datanode.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-#Licensed to the Apache Software Foundation (ASF) under one
-#or more contributor license agreements.  See the NOTICE file
-#distributed with this work for additional information
-#regarding copyright ownership.  The ASF licenses this file
-#to you under the Apache License, Version 2.0 (the
-#"License"); you may not use this file except in compliance
-#with the License.  You may obtain a copy of the License at
-#
-#http://www.apache.org/licenses/LICENSE-2.0
-#
-#Unless required by applicable law or agreed to in writing,
-#software distributed under the License is distributed on an
-#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#KIND, either express or implied.  See the License for the
-#specific language governing permissions and limitations
-#under the License.
-
-
-base_dir=target/tmp
-data_dirs=target/data
-wal_dirs=target/wal
-index_root_dir=target/index
-udf_lib_dir=target/ext
-tracing_dir=target/data/tracing
\ No newline at end of file
diff --git a/testcontainer/src/test/resources/logback-container.xml 
b/testcontainer/src/test/resources/logback-container.xml
deleted file mode 100644
index e897fe24cb4..00000000000
--- a/testcontainer/src/test/resources/logback-container.xml
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<configuration debug="false">
-    <property name="LOG_PATH" value="target/logs"/>
-    <!-- prevent logback from outputting its own status at the start of every 
log -->
-    <statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
-    <appender class="ch.qos.logback.core.ConsoleAppender" name="stdout">
-        <Target>System.out</Target>
-        <encoder>
-            <pattern>%-5p [%d] [%thread] %C{25}:%L - %m %n</pattern>
-            <charset>utf-8</charset>
-        </encoder>
-        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-            <level>DEBUG</level>
-        </filter>
-    </appender>
-    <root level="INFO">
-        <appender-ref ref="stdout"/>
-    </root>
-</configuration>
diff --git a/testcontainer/src/test/resources/logback.xml 
b/testcontainer/src/test/resources/logback.xml
deleted file mode 100644
index 098f463a66d..00000000000
--- a/testcontainer/src/test/resources/logback.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<configuration debug="false">
-    <property name="LOG_PATH" value="target/logs"/>
-    <!-- prevent logback from outputting its own status at the start of every 
log -->
-    <statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
-    <appender class="ch.qos.logback.core.ConsoleAppender" name="stdout">
-        <Target>System.out</Target>
-        <encoder>
-            <pattern>%-5p [%d] [%thread] %C{25}:%L - %m %n</pattern>
-            <charset>utf-8</charset>
-        </encoder>
-        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-            <level>DEBUG</level>
-        </filter>
-    </appender>
-    <appender class="ch.qos.logback.core.ConsoleAppender" name="stdout">
-        <Target>System.out</Target>
-        <encoder>
-            <pattern>[%thread] %m %n</pattern>
-            <charset>utf-8</charset>
-        </encoder>
-        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-            <level>DEBUG</level>
-        </filter>
-    </appender>
-    <logger name="org.testcontainers.containers.output.Slf4jLogConsumer" 
level="INFO">
-        <appender-ref ref="Container"/>
-    </logger>
-    <root level="INFO">
-        <appender-ref ref="stdout"/>
-    </root>
-</configuration>
diff --git a/testcontainer/src/test/resources/sync/docker-compose.yaml 
b/testcontainer/src/test/resources/sync/docker-compose.yaml
deleted file mode 100644
index fa0cedc8a4b..00000000000
--- a/testcontainer/src/test/resources/sync/docker-compose.yaml
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-version: '3.8'
-
-services:
-  iotdb-sender:
-    image: apache/iotdb:sync-maven-development
-    networks:
-      - iotdb
-    cap_add:
-      - NET_ADMIN
-    healthcheck:
-      test: [ "CMD", "bash", "-c", "cat < /dev/null > /dev/tcp/127.0.0.1/6667" 
]
-      interval: 5s
-      timeout: 60s
-      retries: 120
-    volumes:
-    - ../logback-container.xml:/iotdb/conf/logback.xml
-  iotdb-receiver:
-    image: apache/iotdb:sync-maven-development
-    networks:
-      - iotdb
-    cap_add:
-      - NET_ADMIN
-    healthcheck:
-      test: [ "CMD", "bash", "-c", "cat < /dev/null > /dev/tcp/127.0.0.1/6667" 
]
-      interval: 5s
-      timeout: 60s
-      retries: 120
-    volumes:
-      - ../logback-container.xml:/iotdb/conf/logback.xml
-
-
-networks:
-  iotdb:
-    driver: bridge
\ No newline at end of file
diff --git a/testcontainer/src/tool/README.md b/testcontainer/src/tool/README.md
deleted file mode 100644
index 3b943af7ff8..00000000000
--- a/testcontainer/src/tool/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-
-## Motivation
-The current E2E framework writes all process logs into a single test log. This 
approach makes it difficult to find the root cause when there are failed tests 
with unstable recurrence. So we need a tool that can find the failed tests in 
this log and separate the logs from the different nodes.
-
-## Usage
-1. Download log archive from CI.
-
-2. Parse log.
-```
-python3 parser.py [filename]
-```
-
-3. View the separated logs in the current directory.
\ No newline at end of file
diff --git a/testcontainer/src/tool/parser.py b/testcontainer/src/tool/parser.py
deleted file mode 100644
index 5cee0a85a05..00000000000
--- a/testcontainer/src/tool/parser.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-import os
-import sys
-import re
-
-pattern = re.compile(r"docker\-java\-stream\-+(\d+)")
-
-
-def getAllLogs(filename):
-    with open(filename, "r") as f:
-        data = f.read()
-        return data.split("up -d")[1:]
-
-
-def writeAllLogs(filename, content):
-    with open(filename, "w") as f:
-        for row in content:
-            f.write(row)
-            f.write("\n")
-
-
-def getNodes(log):
-    ids = pattern.findall(log)
-    nodes = {}
-    for id in ids:
-        if not nodes.__contains__(id):
-            nodes[id] = []
-    return nodes
-
-
-def checkAndMkdir(i):
-    if not os.path.exists("./{}".format(i)):
-        os.mkdir("./{}".format(i))
-
-
-def parse(nodes, rows):
-    for row in rows:
-        for id, content in nodes.items():
-            if row.__contains__(id):
-                content.append(row)
-        if row.__contains__("[ERROR]"):
-            for content in nodes.values():
-                content.append(row)
-
-
-def output(nodes, i):
-    for key, content in nodes.items():
-        writeAllLogs("./{}/{}_{}.txt".format(i, i, key), content)
-
-
-if __name__ == "__main__":
-    logs = getAllLogs(sys.argv[1])
-    count = 0
-    for i in range(len(logs)):
-        if logs[i].__contains__("FAILURE!"):
-            nodes = getNodes(logs[i])
-            parse(nodes, logs[i].split("\n"))
-            checkAndMkdir(i)
-            output(nodes, i)
-            count = count + 1
-
-    print("find {} failed tests".format(count))

Reply via email to