This is an automated email from the ASF dual-hosted git repository.

panyuepeng pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git


The following commit(s) were added to refs/heads/dev by this push:
     new 049fc6780 [Improve][CI] Improve CI check and delete unused 
streampark-tests module (#3753)
049fc6780 is described below

commit 049fc67805d64fa99efe1ed5c6eb77907156f3f5
Author: xiangzihao <[email protected]>
AuthorDate: Fri Jun 14 15:20:20 2024 +0800

    [Improve][CI] Improve CI check and delete unused streampark-tests module 
(#3753)
---
 .asf.yaml                                          |   7 +
 .github/workflows/{maven.yml => backend.yml}       |  26 ++-
 .github/workflows/docs.yml                         |   5 +-
 .github/workflows/frontend.yml                     |  14 ++
 .github/workflows/{maven.yml => unit-test.yml}     |  31 +++-
 pom.xml                                            |   1 -
 .../streampark-console-service/pom.xml             |   7 -
 .../service/ApplicationManageServiceITest.java     | 159 ------------------
 streampark-tests/pom.xml                           |  40 -----
 streampark-tests/streampark-testcontainer/pom.xml  | 105 ------------
 .../testcontainer/flink/FlinkComponent.java        |  38 -----
 .../testcontainer/flink/FlinkContainer.java        |  68 --------
 .../flink/FlinkStandaloneSessionCluster.java       | 184 ---------------------
 .../testcontainer/hadoop/HadoopContainer.java      |  86 ----------
 .../flink/FlinkStandaloneSessionClusterITest.java  |  57 -------
 .../testcontainer/hadoop/HadoopContainerTest.java  |  63 -------
 16 files changed, 65 insertions(+), 826 deletions(-)

diff --git a/.asf.yaml b/.asf.yaml
index 59811a44a..24b12d572 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -33,7 +33,14 @@ github:
   protected_branches:
     dev:
       required_status_checks:
+        # strict means "Require branches to be up to date before merging".
         strict: true
+        contexts:
+          - Backend - Result
+          - E2E - Result
+          - Docs - Result
+          - Frontend - Result
+          - Unit-Test - Result
       required_pull_request_reviews:
         dismiss_stale_reviews: true
         required_approving_review_count: 1
diff --git a/.github/workflows/maven.yml b/.github/workflows/backend.yml
similarity index 81%
copy from .github/workflows/maven.yml
copy to .github/workflows/backend.yml
index 30be37e82..2ade9ba8c 100644
--- a/.github/workflows/maven.yml
+++ b/.github/workflows/backend.yml
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-name: Maven Package Test
+name: Backend
 
 on:
   push:
@@ -34,8 +34,10 @@ on:
       - 'deploy/**'
       - 'script/**'
       - 'streampark-console/streampark-console-webapp/**'
+
+
 concurrency:
-  group: ${{ github.workflow }}-${{ github.ref }}
+  group: backend-${{ github.workflow }}-${{ github.ref }}
   cancel-in-progress: true
 
 jobs:
@@ -62,7 +64,7 @@ jobs:
       - name: Check codestyle
         run: ./mvnw -B -q -nsu checkstyle:check spotless:check
   build:
-    name: "maven-compile (java-${{ matrix.java }})"
+    name: "Backend-Build (java-${{ matrix.java }})"
     runs-on: ubuntu-latest
     strategy:
       fail-fast: false
@@ -77,8 +79,18 @@ jobs:
           java-version: ${{ matrix.java }}
           distribution: "adopt"
           cache: "maven"
-      - name: Build with Maven
+      - name: Backend Build with Maven
         run: ./mvnw -B clean install -Pshaded -DskipTests
-      - name: Test with Maven
-        run: ./mvnw -B test
-
+  result:
+    name: Backend - Result
+    runs-on: ubuntu-latest
+    timeout-minutes: 30
+    needs: [ build, license-header, code-style ]
+    if: always()
+    steps:
+      - name: Status
+        run: |
+          if [[ ${{ contains(needs.*.result, 'success') }} != 'true' ]]; then
+            echo "Backend Build Failed!"
+            exit -1
+          fi
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 8b0956e93..29caba107 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -33,7 +33,7 @@ name: Docs
 
 
 concurrency:
-  group: doc-${{ github.event.pull_request.number || github.ref }}
+  group: docs-${{ github.event.pull_request.number || github.ref }}
   cancel-in-progress: true
 
 
@@ -51,13 +51,14 @@ jobs:
             markdown-link-check -c .dlc.json -q "$file"
           done
   result:
-    name: Docs
+    name: Docs - Result
     runs-on: ubuntu-latest
     timeout-minutes: 30
     needs:
       - dead-link
     if: always()
     steps:
+      - uses: actions/checkout@v4
       - name: Status
         run: |
           if [[ ${{ contains(needs.*.result, 'failure') }} == 'true' || ${{ 
contains(needs.*.result, 'cancelled') }} == 'true' ]]; then
diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml
index 41d702d0b..1dbc52f38 100644
--- a/.github/workflows/frontend.yml
+++ b/.github/workflows/frontend.yml
@@ -58,3 +58,17 @@ jobs:
           pnpm install
           pnpm run lint:lint-staged
           pnpm run build
+  result:
+    name: Frontend - Result
+    runs-on: ubuntu-latest
+    timeout-minutes: 30
+    needs: [ build ]
+    if: always()
+    steps:
+      - uses: actions/checkout@v4
+      - name: Status
+        run: |
+          if [[ ${{ contains(needs.*.result, 'success') }} != 'true' ]]; then
+            echo "Frontend Build Failed!"
+            exit -1
+          fi
diff --git a/.github/workflows/maven.yml b/.github/workflows/unit-test.yml
similarity index 78%
rename from .github/workflows/maven.yml
rename to .github/workflows/unit-test.yml
index 30be37e82..c3269e648 100644
--- a/.github/workflows/maven.yml
+++ b/.github/workflows/unit-test.yml
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-name: Maven Package Test
+name: Unit-Test
 
 on:
   push:
@@ -34,8 +34,10 @@ on:
       - 'deploy/**'
       - 'script/**'
       - 'streampark-console/streampark-console-webapp/**'
+
+
 concurrency:
-  group: ${{ github.workflow }}-${{ github.ref }}
+  group: unit-test-${{ github.workflow }}-${{ github.ref }}
   cancel-in-progress: true
 
 jobs:
@@ -61,8 +63,8 @@ jobs:
           submodules: true
       - name: Check codestyle
         run: ./mvnw -B -q -nsu checkstyle:check spotless:check
-  build:
-    name: "maven-compile (java-${{ matrix.java }})"
+  unit-test:
+    name: "Unit-Test (java-${{ matrix.java }})"
     runs-on: ubuntu-latest
     strategy:
       fail-fast: false
@@ -77,8 +79,19 @@ jobs:
           java-version: ${{ matrix.java }}
           distribution: "adopt"
           cache: "maven"
-      - name: Build with Maven
-        run: ./mvnw -B clean install -Pshaded -DskipTests
-      - name: Test with Maven
-        run: ./mvnw -B test
-
+      - name: Unit-Test with Maven
+        run: ./mvnw clean -B test
+  result:
+    name: Unit-Test - Result
+    runs-on: ubuntu-latest
+    timeout-minutes: 30
+    needs: [ unit-test, license-header, code-style ]
+    if: always()
+    steps:
+      - uses: actions/checkout@v4
+      - name: Status
+        run: |
+          if [[ ${{ contains(needs.*.result, 'success') }} != 'true' ]]; then
+            echo "Unit-Test Failed!"
+            exit -1
+          fi
diff --git a/pom.xml b/pom.xml
index 329e77110..185c4b74b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -81,7 +81,6 @@
         <module>streampark-flink</module>
         <module>streampark-spark</module>
         <module>streampark-console</module>
-        <module>streampark-tests</module>
     </modules>
 
     <properties>
diff --git a/streampark-console/streampark-console-service/pom.xml 
b/streampark-console/streampark-console-service/pom.xml
index 50ade576b..442560e61 100644
--- a/streampark-console/streampark-console-service/pom.xml
+++ b/streampark-console/streampark-console-service/pom.xml
@@ -440,13 +440,6 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.streampark</groupId>
-            <artifactId>streampark-testcontainer</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-
         <!--Test dependencies end.-->
 
         <!--log4j -->
diff --git 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java
 
b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java
deleted file mode 100644
index 10ca766f4..000000000
--- 
a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.streampark.console.core.service;
-
-import org.apache.streampark.common.enums.FlinkExecutionMode;
-import org.apache.streampark.common.util.DeflaterUtils;
-import org.apache.streampark.console.SpringIntegrationTestBase;
-import org.apache.streampark.console.core.entity.Application;
-import org.apache.streampark.console.core.entity.FlinkCluster;
-import org.apache.streampark.console.core.entity.FlinkEnv;
-import org.apache.streampark.console.core.entity.FlinkSql;
-import org.apache.streampark.console.core.enums.FlinkAppStateEnum;
-import org.apache.streampark.console.core.enums.ReleaseStateEnum;
-import 
org.apache.streampark.console.core.service.application.ApplicationActionService;
-import 
org.apache.streampark.console.core.service.application.ApplicationManageService;
-import org.apache.streampark.console.core.service.impl.FlinkClusterServiceImpl;
-import org.apache.streampark.console.core.watcher.FlinkAppHttpWatcher;
-import org.apache.streampark.testcontainer.flink.FlinkStandaloneSessionCluster;
-
-import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.Timeout;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import java.util.Base64;
-import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.TimeUnit;
-
-import static 
org.apache.streampark.console.core.watcher.FlinkAppHttpWatcher.WATCHING_INTERVAL;
-import static org.assertj.core.api.Assertions.assertThat;
-
-/**
- * Integration test for {@link
- * 
org.apache.streampark.console.core.service.application.ApplicationManageService}.
- */
-@Disabled("Disabled due to unstable performance.")
-class ApplicationManageServiceITest extends SpringIntegrationTestBase {
-
-  static FlinkStandaloneSessionCluster cluster =
-      
FlinkStandaloneSessionCluster.builder().slotsNumPerTm(4).slf4jLogConsumer(null).build();
-
-  @Autowired private ApplicationManageService applicationManageService;
-
-  @Autowired private ApplicationActionService applicationActionService;
-
-  @Autowired private FlinkClusterService clusterService;
-
-  @Autowired private FlinkEnvService envService;
-
-  @Autowired private AppBuildPipeService appBuildPipeService;
-
-  @Autowired private FlinkSqlService sqlService;
-
-  @Autowired private FlinkAppHttpWatcher flinkAppHttpWatcher;
-
-  @BeforeAll
-  static void setup() {
-    cluster.start();
-  }
-
-  @AfterAll
-  static void teardown() {
-    cluster.stop();
-  }
-
-  @AfterEach
-  void clear() {
-    applicationManageService.getBaseMapper().delete(new QueryWrapper<>());
-    clusterService.getBaseMapper().delete(new QueryWrapper<>());
-    envService.getBaseMapper().delete(new QueryWrapper<>());
-    appBuildPipeService.getBaseMapper().delete(new QueryWrapper<>());
-    sqlService.getBaseMapper().delete(new QueryWrapper<>());
-  }
-
-  @Test
-  @Timeout(value = 180)
-  void testStartAppOnRemoteSessionMode() throws Exception {
-    FlinkEnv flinkEnv = new FlinkEnv();
-    flinkEnv.setFlinkHome(defaultFlinkHome);
-    flinkEnv.setFlinkName(DEFAULT_FLINK_VERSION);
-    flinkEnv.setId(1L);
-    envService.create(flinkEnv);
-    FlinkCluster flinkCluster = new FlinkCluster();
-    flinkCluster.setId(1L);
-    flinkCluster.setAddress(cluster.getFlinkJobManagerUrl());
-    flinkCluster.setExecutionMode(FlinkExecutionMode.REMOTE.getMode());
-    flinkCluster.setClusterName("docker-Cluster-1.17.1");
-    flinkCluster.setVersionId(1L);
-    flinkCluster.setUserId(100000L);
-    ((FlinkClusterServiceImpl) clusterService).internalCreate(flinkCluster);
-    Application appParam = new Application();
-    appParam.setId(100000L);
-    appParam.setTeamId(100000L);
-    Application application = 
applicationManageService.getApp(appParam.getId());
-    application.setFlinkClusterId(1L);
-    application.setSqlId(100000L);
-    application.setVersionId(1L);
-    application.setExecutionMode(FlinkExecutionMode.REMOTE.getMode());
-
-    // Avoid exceptional error.
-    application.setFlinkSql(
-        new 
String(Base64.getDecoder().decode(application.getFlinkSql().getBytes())));
-    FlinkSql flinkSql = sqlService.getEffective(application.getId(), false);
-    flinkSql.setSql(DeflaterUtils.zipString(flinkSql.getSql()));
-    sqlService.getBaseMapper().updateById(flinkSql);
-
-    // Continue operations link.
-    applicationManageService.update(application);
-    appBuildPipeService.buildApplication(100000L, false);
-
-    CompletableFuture<Boolean> buildCompletableFuture =
-        CompletableFuture.supplyAsync(
-            () -> {
-              while (true) {
-                Application app = applicationManageService.getById(100000L);
-                if (app != null && app.getReleaseState() == 
ReleaseStateEnum.DONE) {
-                  break;
-                }
-              }
-              return true;
-            });
-    buildCompletableFuture.get();
-
-    applicationActionService.start(applicationManageService.getById(100000L), 
false);
-    CompletableFuture<Boolean> completableFuture =
-        CompletableFuture.supplyAsync(
-            () -> {
-              while (true) {
-                if 
(flinkAppHttpWatcher.tryQueryFlinkAppState(application.getId())
-                    == FlinkAppStateEnum.RUNNING) {
-                  break;
-                }
-              }
-              return true;
-            });
-
-    assertThat(completableFuture.get(WATCHING_INTERVAL.toMillis() * 24, 
TimeUnit.MILLISECONDS))
-        .isEqualTo(true);
-  }
-}
diff --git a/streampark-tests/pom.xml b/streampark-tests/pom.xml
deleted file mode 100644
index 97f75dea0..000000000
--- a/streampark-tests/pom.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.streampark</groupId>
-        <artifactId>streampark</artifactId>
-        <version>2.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>streampark-tests</artifactId>
-    <packaging>pom</packaging>
-
-    <name>StreamPark : Tests</name>
-    <url>http://maven.apache.org</url>
-    <modules>
-        <module>streampark-testcontainer</module>
-    </modules>
-
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-
-</project>
diff --git a/streampark-tests/streampark-testcontainer/pom.xml 
b/streampark-tests/streampark-testcontainer/pom.xml
deleted file mode 100644
index 78a3c7838..000000000
--- a/streampark-tests/streampark-testcontainer/pom.xml
+++ /dev/null
@@ -1,105 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.streampark</groupId>
-        <artifactId>streampark-tests</artifactId>
-        <version>2.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>streampark-testcontainer</artifactId>
-    <packaging>jar</packaging>
-
-    <name>StreamPark : Test Container</name>
-    <url>http://maven.apache.org</url>
-
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <!--<kotlin-reflect.version>1.6.21</kotlin-reflect.version>-->
-        <findbugs.version>3.0.2</findbugs.version>
-        <testcontainer.version>1.16.2</testcontainer.version>
-    </properties>
-
-    <dependencyManagement>
-        <dependencies>
-            <dependency>
-                <groupId>org.testcontainers</groupId>
-                <artifactId>testcontainers-bom</artifactId>
-                <version>${testcontainer.version}</version>
-                <type>pom</type>
-                <scope>import</scope>
-            </dependency>
-        </dependencies>
-    </dependencyManagement>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.testcontainers</groupId>
-            <artifactId>testcontainers</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.httpcomponents.client5</groupId>
-            <artifactId>httpclient5</artifactId>
-            <version>${httpclient5.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <!--<dependency>
-            <groupId>org.jetbrains.kotlin</groupId>
-            <artifactId>kotlin-reflect</artifactId>
-            <version>${kotlin-reflect.version}</version>
-        </dependency>-->
-        <dependency>
-            <groupId>org.assertj</groupId>
-            <artifactId>assertj-core</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.jupiter</groupId>
-            <artifactId>junit-jupiter</artifactId>
-            <version>${jupiter.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.streampark</groupId>
-            <artifactId>streampark-common_2.12</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.google.code.findbugs</groupId>
-            <artifactId>jsr305</artifactId>
-            <version>${findbugs.version}</version>
-        </dependency>
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>test-jar</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-</project>
diff --git 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkComponent.java
 
b/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkComponent.java
deleted file mode 100644
index ebb907690..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkComponent.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.flink;
-
-import javax.annotation.Nonnull;
-
-/** The enum is used to represent the type of the flink component. */
-enum FlinkComponent {
-  JOBMANAGER("jobmanager"),
-
-  TASKMANAGER("taskmanager");
-
-  private final String name;
-
-  FlinkComponent(@Nonnull String name) {
-    this.name = name;
-  }
-
-  @Nonnull
-  public String getName() {
-    return name;
-  }
-}
diff --git 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkContainer.java
 
b/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkContainer.java
deleted file mode 100644
index 074b5f04e..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkContainer.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.flink;
-
-import org.testcontainers.containers.GenericContainer;
-import org.testcontainers.containers.Network;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.utility.DockerImageName;
-
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-
-import java.util.Optional;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import static 
org.apache.streampark.testcontainer.flink.FlinkComponent.JOBMANAGER;
-import static 
org.apache.streampark.testcontainer.flink.FlinkComponent.TASKMANAGER;
-
-/**
- * The Flink container class. It would be created as a flink jobmanager 
container or a taskmanaager
- * container. Note: It's an internal class to construct a flink session 
cluster.
- */
-class FlinkContainer extends GenericContainer<FlinkContainer> {
-
-  private static final String FLINK_PROPS_KEY = "FLINK_PROPERTIES";
-  private static final AtomicInteger TM_INDEX_SUFFIX = new AtomicInteger(0);
-
-  private final @Nonnull FlinkComponent component;
-
-  FlinkContainer(
-      @Nonnull DockerImageName dockerImageName,
-      @Nonnull FlinkComponent component,
-      @Nonnull Network network,
-      @Nonnull String yamlPropContent,
-      @Nullable Slf4jLogConsumer slf4jLogConsumer) {
-    super(dockerImageName);
-    this.component = component;
-    this.withCommand("/docker-entrypoint.sh", component.getName());
-    this.withCreateContainerCmdModifier(
-        createContainerCmd -> 
createContainerCmd.withName(getFlinkContainerName()));
-    this.withNetwork(network);
-    this.withEnv(FLINK_PROPS_KEY, yamlPropContent);
-    Optional.ofNullable(slf4jLogConsumer).ifPresent(this::withLogConsumer);
-  }
-
-  @Nonnull
-  protected String getFlinkContainerName() {
-    if (component == JOBMANAGER) {
-      return JOBMANAGER.getName();
-    }
-    return String.format("%s_%s", TASKMANAGER.getName(), 
TM_INDEX_SUFFIX.incrementAndGet());
-  }
-}
diff --git 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionCluster.java
 
b/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionCluster.java
deleted file mode 100644
index 326ff1737..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionCluster.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.flink;
-
-import org.apache.streampark.common.util.AssertUtils;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.Network;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-import org.testcontainers.lifecycle.Startable;
-import org.testcontainers.utility.DockerImageName;
-
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-
-import static 
org.apache.streampark.testcontainer.flink.FlinkComponent.JOBMANAGER;
-import static 
org.apache.streampark.testcontainer.flink.FlinkComponent.TASKMANAGER;
-
-/**
- * Class to start a couple of flink 1-jobmanager & n-taskmanagers. The 
priority of
- * flinkYamlConfContent is the highest. But: The 'jobmanager.rpc.address' is 
always 'jobmanager'.
- * The 'rest.port' always is 8081.
- */
-public class FlinkStandaloneSessionCluster implements Startable {
-
-  public static final Logger LOG = 
LoggerFactory.getLogger(FlinkStandaloneSessionCluster.class);
-
-  private static final int BLOB_SERVER_PORT = 6123;
-  private static final int WEB_PORT = 8081;
-  private static final Network NETWORK = Network.newNetwork();
-  private static final String JM_RPC_ADDR_KEY = "jobmanager.rpc.address";
-  private static final String TM_SLOT_NUM_KEY = 
"taskmanager.numberOfTaskSlots";
-  private static final String SLOT_CONF_FORMAT = String.format("%s: %%s", 
TM_SLOT_NUM_KEY);
-
-  private String yamlConfContent = String.format("%s: %s", JM_RPC_ADDR_KEY, 
JOBMANAGER.getName());
-  private final FlinkContainer jobManagerContainer;
-  private final List<FlinkContainer> taskManagerContainers = new ArrayList<>();
-
-  private FlinkStandaloneSessionCluster(
-      DockerImageName dockerImageName,
-      int taskManagerNum,
-      int slotsNumPerTm,
-      @Nullable String yamlConfContent,
-      Slf4jLogConsumer slf4jLogConsumer) {
-
-    renderJmRpcConfIfNeeded(yamlConfContent);
-
-    renderSlotNumIfNeeded(slotsNumPerTm);
-
-    // Set for JM.
-    this.jobManagerContainer =
-        new FlinkContainer(
-            dockerImageName, JOBMANAGER, NETWORK, this.yamlConfContent, 
slf4jLogConsumer);
-    this.jobManagerContainer.addExposedPort(BLOB_SERVER_PORT);
-    this.jobManagerContainer.addExposedPort(WEB_PORT);
-
-    this.jobManagerContainer.setWaitStrategy(
-        Wait.forHttp("/config")
-            .forStatusCode(200)
-            .forPort(WEB_PORT)
-            .withStartupTimeout(Duration.ofMinutes(8)));
-
-    // Set for TMs.
-    for (int i = 0; i < taskManagerNum; i++) {
-      FlinkContainer taskManager =
-          new FlinkContainer(
-              dockerImageName, TASKMANAGER, NETWORK, this.yamlConfContent, 
slf4jLogConsumer);
-      this.taskManagerContainers.add(taskManager);
-    }
-  }
-
-  @Nonnull
-  public String getFlinkJobManagerUrl() {
-    return String.format(
-        "http://%s:%s";, jobManagerContainer.getHost(), 
jobManagerContainer.getMappedPort(WEB_PORT));
-  }
-
-  @Override
-  public void start() {
-    AssertUtils.notNull(jobManagerContainer);
-    jobManagerContainer.start();
-    AssertUtils.notNull(taskManagerContainers);
-    for (FlinkContainer taskManagerContainer : taskManagerContainers) {
-      taskManagerContainer.start();
-    }
-  }
-
-  @Override
-  public void stop() {
-    AssertUtils.notNull(taskManagerContainers);
-    for (FlinkContainer taskManagerContainer : taskManagerContainers) {
-      taskManagerContainer.stop();
-    }
-    AssertUtils.notNull(jobManagerContainer);
-    jobManagerContainer.stop();
-  }
-
-  private void renderSlotNumIfNeeded(int slotsNumPerTm) {
-    if (!this.yamlConfContent.contains(TM_SLOT_NUM_KEY)) {
-      this.yamlConfContent =
-          String.format(
-              "%s%n%s%n", this.yamlConfContent, 
String.format(SLOT_CONF_FORMAT, slotsNumPerTm));
-    }
-  }
-
-  private void renderJmRpcConfIfNeeded(@Nullable String yamlConfStr) {
-    this.yamlConfContent =
-        yamlConfStr == null
-            ? this.yamlConfContent
-            : (yamlConfStr.contains(JM_RPC_ADDR_KEY)
-                ? yamlConfStr
-                : String.format("%s%n%s%n", this.yamlConfContent, 
yamlConfStr));
-  }
-
-  /** A tool class to create a flink standalone session cluster quickly. */
-  public static class Builder {
-
-    private DockerImageName dockerImageName =
-        DockerImageName.parse("apache/flink:1.17.1-scala_2.12");
-    private int taskManagerNum = 1;
-    private int slotsNumPerTm = 8;
-    private @Nullable String yamlConfContent;
-    private Slf4jLogConsumer slf4jLogConsumer = new Slf4jLogConsumer(LOG, 
false);
-
-    private Builder() {}
-
-    public Builder dockerImageName(DockerImageName dockerImageName) {
-      this.dockerImageName = dockerImageName;
-      return this;
-    }
-
-    public Builder taskManagerNum(int taskManagerNum) {
-      AssertUtils.required(taskManagerNum >= 0, "taskManagerNum must be 
greater than -1.");
-      this.taskManagerNum = taskManagerNum;
-      return this;
-    }
-
-    public Builder slotsNumPerTm(int slotsNumPerTm) {
-      AssertUtils.required(slotsNumPerTm > 0, "slotsNumPerTm must be greater 
than 0.");
-      this.slotsNumPerTm = slotsNumPerTm;
-      return this;
-    }
-
-    public Builder yamlConfContent(@Nullable String yamlConfContent) {
-      this.yamlConfContent = yamlConfContent;
-      return this;
-    }
-
-    public Builder slf4jLogConsumer(Slf4jLogConsumer slf4jLogConsumer) {
-      this.slf4jLogConsumer = slf4jLogConsumer;
-      return this;
-    }
-
-    public FlinkStandaloneSessionCluster build() {
-      return new FlinkStandaloneSessionCluster(
-          dockerImageName, taskManagerNum, slotsNumPerTm, yamlConfContent, 
slf4jLogConsumer);
-    }
-  }
-
-  public static Builder builder() {
-    return new Builder();
-  }
-}
diff --git 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/hadoop/HadoopContainer.java
 
b/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/hadoop/HadoopContainer.java
deleted file mode 100644
index 0e92fae83..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/main/java/org/apache/streampark/testcontainer/hadoop/HadoopContainer.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.hadoop;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.GenericContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
-import org.testcontainers.containers.wait.strategy.WaitAllStrategy;
-import org.testcontainers.utility.DockerImageName;
-
-import javax.annotation.Nonnull;
-
-import java.time.Duration;
-import java.util.HashMap;
-import java.util.Map;
-
-/** Hadoop Container for integration test. Note: It's experimental now. */
-public class HadoopContainer extends GenericContainer<HadoopContainer> {
-
-  public static final Logger LOG = 
LoggerFactory.getLogger(HadoopContainer.class);
-
-  // Hadoop version is 2.7.0
-  private static final DockerImageName DOCKER_IMAGE_NAME =
-      DockerImageName.parse("sequenceiq/hadoop-docker:latest");
-
-  public static final Map<Integer, Integer> MAPPED_PORTS =
-      new HashMap<Integer, Integer>() {
-        {
-          put(50070, 50070);
-          put(8088, 8088);
-          put(9000, 9000);
-        }
-      };
-
-  public HadoopContainer() {
-    this(DOCKER_IMAGE_NAME);
-  }
-
-  public HadoopContainer(@Nonnull DockerImageName dockerImageName) {
-    super(dockerImageName);
-    MAPPED_PORTS.forEach(this::addFixedExposedPort);
-    this.setPrivilegedMode(true);
-    this.withCreateContainerCmdModifier(
-        createContainerCmd -> 
createContainerCmd.withName("one-container-hadoop-cluster"));
-    WaitAllStrategy waitAllStrategy =
-        new WaitAllStrategy()
-            .withStrategy(
-                Wait.forHttp("/ws/v1/cluster/info")
-                    .forPort(8088)
-                    .forStatusCode(200)
-                    .withStartupTimeout(Duration.ofMinutes(8)))
-            .withStrategy(
-                Wait.forHttp("")
-                    .forPort(50070)
-                    .forStatusCode(200)
-                    .withStartupTimeout(Duration.ofMinutes(8)))
-            
.withStrategy(Wait.defaultWaitStrategy().withStartupTimeout(Duration.ofMinutes(8)))
-            .withStartupTimeout(Duration.ofMinutes(8));
-    this.waitingFor(waitAllStrategy);
-    this.withLogConsumer(new Slf4jLogConsumer(LOG));
-    this.withCommand("/etc/bootstrap.sh", "-d");
-  }
-
-  @Override
-  public void start() {
-    super.start();
-    this.waitUntilContainerStarted();
-  }
-}
diff --git 
a/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionClusterITest.java
 
b/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionClusterITest.java
deleted file mode 100644
index f84ddf66c..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/flink/FlinkStandaloneSessionClusterITest.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.flink;
-
-import org.apache.hc.client5.http.classic.methods.HttpGet;
-import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
-import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse;
-import org.apache.hc.client5.http.impl.classic.HttpClients;
-
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.io.IOException;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** Test for flink standalone session cluster env available. */
-class FlinkStandaloneSessionClusterITest {
-
-  private final FlinkStandaloneSessionCluster flinkStandaloneSessionCluster =
-      FlinkStandaloneSessionCluster.builder().build();
-
-  @BeforeEach
-  void up() {
-    flinkStandaloneSessionCluster.start();
-  }
-
-  @AfterEach
-  void down() {
-    flinkStandaloneSessionCluster.stop();
-  }
-
-  @Test
-  void testRestApiAvailable() throws IOException {
-    String url =
-        String.format("%s/%s", 
flinkStandaloneSessionCluster.getFlinkJobManagerUrl(), "config");
-    CloseableHttpClient httpClient = HttpClients.createDefault();
-    CloseableHttpResponse response = httpClient.execute(new HttpGet(url));
-    assertThat(response.getCode()).isEqualTo(200);
-  }
-}
diff --git 
a/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/hadoop/HadoopContainerTest.java
 
b/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/hadoop/HadoopContainerTest.java
deleted file mode 100644
index 961d76c43..000000000
--- 
a/streampark-tests/streampark-testcontainer/src/test/java/org/apache/streampark/testcontainer/hadoop/HadoopContainerTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.streampark.testcontainer.hadoop;
-
-import org.apache.hc.client5.http.classic.methods.HttpGet;
-import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
-import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse;
-import org.apache.hc.client5.http.impl.classic.HttpClients;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.Timeout;
-
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** Test for {@link HadoopContainer}. */
-class HadoopContainerTest {
-  static final HadoopContainer HADOOP_CONTAINER = new HadoopContainer();
-
-  @BeforeAll
-  static void setup() {
-    HADOOP_CONTAINER.start();
-  }
-
-  @AfterAll
-  static void teardown() {
-    HADOOP_CONTAINER.stop();
-  }
-
-  @Test
-  @Timeout(value = 8, unit = TimeUnit.MINUTES)
-  void testOverview() throws IOException {
-
-    String url = String.format("http://%s:%s/ws/v1/cluster/info";, 
HADOOP_CONTAINER.getHost(), 8088);
-    CloseableHttpClient httpClient = HttpClients.createDefault();
-    CloseableHttpResponse response = httpClient.execute(new HttpGet(url));
-    assertThat(response.getCode()).isEqualTo(200);
-
-    url = String.format("http://%s:%s";, HADOOP_CONTAINER.getHost(), 50070);
-    httpClient = HttpClients.createDefault();
-    response = httpClient.execute(new HttpGet(url));
-    assertThat(response.getCode()).isEqualTo(200);
-  }
-}


Reply via email to