This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch release-0.14.1-spark35-scala213
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit 2a5fd3cf999de19c4e15518e0657d4f4deb2e00a
Author: Lin Liu <[email protected]>
AuthorDate: Sat Feb 10 11:33:03 2024 -0800

    [HUDI-6902] Containerize the Azure CI (#10512)
    
    * [HUDI-6902] Containerize the Azure tests
    
    * remove warning message
---
 .github/workflows/bot.yml                          |  50 ++++--
 Dockerfile                                         |  31 ++++
 azure-pipelines-20230430.yml                       | 194 ++++++++++-----------
 .../hudi-metaserver/hudi-metaserver-server/pom.xml |  49 +++---
 pom.xml                                            |   1 +
 5 files changed, 189 insertions(+), 136 deletions(-)

diff --git a/.github/workflows/bot.yml b/.github/workflows/bot.yml
index aae83c3ebc0..6d9b7210f79 100644
--- a/.github/workflows/bot.yml
+++ b/.github/workflows/bot.yml
@@ -20,6 +20,11 @@ on:
     branches:
       - master
       - 'release-*'
+
+concurrency:
+  group: ${{ github.ref }}
+  cancel-in-progress: ${{ !contains(github.ref, 'master') }}
+
 env:
   MVN_ARGS: -e -ntp -B -V -Dgpg.skip -Djacoco.skip -Pwarn-log 
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn 
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn 
-Dmaven.wagon.httpconnectionManager.ttlSeconds=25 
-Dmaven.wagon.http.retryHandler.count=5
   SPARK_COMMON_MODULES: 
hudi-spark-datasource/hudi-spark,hudi-spark-datasource/hudi-spark-common
@@ -35,6 +40,7 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Check Binary Files
         run: ./scripts/release/validate_source_binary_files.sh
       - name: Check Copyright
@@ -86,12 +92,13 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Build Project
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
         run:
-          mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DskipTests=true $MVN_ARGS -am -pl 
"hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+          mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DskipTests=true $MVN_ARGS -am -pl 
"hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES"
       - name: Quickstart Test
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -105,7 +112,7 @@ jobs:
           SPARK_MODULES: ${{ matrix.sparkModules }}
         if: ${{ !endsWith(env.SPARK_PROFILE, '3.2') }} # skip test spark 3.2 
as it's covered by Azure CI
         run:
-          mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl 
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
       - name: FT - Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -143,7 +150,7 @@ jobs:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
         run:
-          ./mvnw clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-D"FLINK_PROFILE" -DskipTests=true -Phudi-platform-service $MVN_ARGS -am -pl 
hudi-hadoop-mr,hudi-client/hudi-java-client
+          ./mvnw clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-D"FLINK_PROFILE" -DskipTests=true -Phudi-platform-service 
-Pthrift-gen-source-with-script $MVN_ARGS -am -pl 
hudi-hadoop-mr,hudi-client/hudi-java-client
       - name: UT - hudi-hadoop-mr and hudi-client/hudi-java-client
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -175,6 +182,7 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Build Project
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -187,6 +195,7 @@ jobs:
           java-version: '17'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Quickstart Test
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -228,12 +237,13 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Build Project
         env:
           SCALA_PROFILE: 'scala-2.12'
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
         run:
-          mvn clean install -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl 
hudi-examples/hudi-examples-flink -am -Davro.version=1.10.0 -DskipTests=true 
$MVN_ARGS
+          mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl 
hudi-examples/hudi-examples-flink -am -Davro.version=1.10.0 -DskipTests=true 
$MVN_ARGS
       - name: Quickstart Test
         env:
           SCALA_PROFILE: 'scala-2.12'
@@ -246,7 +256,7 @@ jobs:
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
         if: ${{ endsWith(env.FLINK_PROFILE, '1.17') }}
         run: |
-          mvn clean install -Pintegration-tests -D"$SCALA_PROFILE" 
-D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am 
-Davro.version=1.10.0 -DskipTests=true $MVN_ARGS
+          mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE" 
-D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am 
-Davro.version=1.10.0 -DskipTests=true $MVN_ARGS
           mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" 
-pl hudi-flink-datasource/hudi-flink $MVN_ARGS
 
   docker-java17-test:
@@ -269,6 +279,7 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: UT/FT - Docker Test - OpenJDK 17
         env:
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
@@ -295,12 +306,9 @@ jobs:
             sparkProfile: 'spark3.3'
             sparkRuntime: 'spark3.3.2'
           - flinkProfile: 'flink1.16'
-            sparkProfile: 'spark3.3'
-            sparkRuntime: 'spark3.3.2'
-          - flinkProfile: 'flink1.15'
             sparkProfile: 'spark3.3'
             sparkRuntime: 'spark3.3.1'
-          - flinkProfile: 'flink1.14'
+          - flinkProfile: 'flink1.15'
             sparkProfile: 'spark3.2'
             sparkRuntime: 'spark3.2.3'
           - flinkProfile: 'flink1.13'
@@ -320,16 +328,17 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Build Project
         env:
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SCALA_PROFILE: 'scala-2.12'
         run: |
-          mvn clean package -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DdeployArtifacts=true -DskipTests=true $MVN_ARGS
+          mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Phudi-platform-service 
-Pthrift-gen-source-with-script
           # TODO remove the sudo below. It's a needed workaround as detailed 
in HUDI-5708.
           sudo chown -R "$USER:$(id -g -n)" 
hudi-platform-service/hudi-metaserver/target/generated-sources
-          mvn clean package -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" 
-DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl 
packaging/hudi-flink-bundle -am -Davro.version=1.10.0
+          mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" 
-DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl 
packaging/hudi-flink-bundle -am -Davro.version=1.10.0 -Phudi-platform-service 
-Pthrift-gen-source-with-script
       - name: IT - Bundle Validation - OpenJDK 8
         env:
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
@@ -368,13 +377,22 @@ jobs:
     strategy:
       matrix:
         include:
-          - flinkProfile: 'flink1.16'
+          - flinkProfile: 'flink1.18'
             sparkProfile: 'spark3'
+            sparkRuntime: 'spark3.5.0'
+          - flinkProfile: 'flink1.18'
+            sparkProfile: 'spark3.5'
+            sparkRuntime: 'spark3.5.0'
+          - flinkProfile: 'flink1.18'
+            sparkProfile: 'spark3.4'
+            sparkRuntime: 'spark3.4.0'
+          - flinkProfile: 'flink1.17'
+            sparkProfile: 'spark3.3'
             sparkRuntime: 'spark3.3.2'
-          - flinkProfile: 'flink1.15'
+          - flinkProfile: 'flink1.16'
             sparkProfile: 'spark3.3'
             sparkRuntime: 'spark3.3.1'
-          - flinkProfile: 'flink1.14'
+          - flinkProfile: 'flink1.15'
             sparkProfile: 'spark3.2'
             sparkRuntime: 'spark3.2.3'
           - flinkProfile: 'flink1.13'
@@ -394,6 +412,7 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: IT - Bundle Validation - OpenJDK 8
         env:
           FLINK_PROFILE: ${{ matrix.flinkProfile }}
@@ -433,12 +452,13 @@ jobs:
           java-version: '8'
           distribution: 'adopt'
           architecture: x64
+          cache: maven
       - name: Build Project
         env:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11'
         run:
-          mvn clean install $SCALA_PROFILE -D"$SPARK_PROFILE" 
-Pintegration-tests -DskipTests=true $MVN_ARGS
+          mvn clean install -T 2 $SCALA_PROFILE -D"$SPARK_PROFILE" 
-Pintegration-tests -DskipTests=true $MVN_ARGS
       - name: 'UT integ-test'
         env:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000000..f8d03877143
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Use a home made image as the base, which includes:
+# utuntu:latest
+# git
+# thrift
+# maven
+# java8
+# Use an official Ubuntu base image
+FROM apachehudi/hudi-ci-bundle-validation-base:azure_ci_test_base_new
+
+CMD ["java", "-version"]
+
+# Set the working directory to /app
+WORKDIR /hudi
+
+# Copy git repo into the working directory
+COPY . /hudi
\ No newline at end of file
diff --git a/azure-pipelines-20230430.yml b/azure-pipelines-20230430.yml
index 8fcded07443..c3e6b5c49ec 100644
--- a/azure-pipelines-20230430.yml
+++ b/azure-pipelines-20230430.yml
@@ -41,6 +41,7 @@ parameters:
     type: object
     default:
       - 'hudi-client/hudi-spark-client'
+      - 'hudi-spark-datasource/hudi-spark'
   - name: job3UTModules
     type: object
     default:
@@ -92,11 +93,12 @@ parameters:
       - '!hudi-flink-datasource/hudi-flink1.15.x'
       - '!hudi-flink-datasource/hudi-flink1.16.x'
       - '!hudi-flink-datasource/hudi-flink1.17.x'
+      - '!hudi-spark-datasource/hudi-spark'
 
 variables:
   BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.17'
   PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true -ntp 
-B -V -Pwarn-log 
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn 
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn'
-  MVN_OPTS_INSTALL: '-Phudi-platform-service -DskipTests $(BUILD_PROFILES) 
$(PLUGIN_OPTS) -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 
-Dmaven.wagon.http.retryHandler.count=5'
+  MVN_OPTS_INSTALL: '-DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS) 
-Dmaven.wagon.httpconnectionManager.ttlSeconds=25 
-Dmaven.wagon.http.retryHandler.count=5'
   MVN_OPTS_TEST: '-fae -Pwarn-log $(BUILD_PROFILES) $(PLUGIN_OPTS)'
   JOB1_MODULES: ${{ join(',',parameters.job1Modules) }}
   JOB2_MODULES: ${{ join(',',parameters.job2Modules) }}
@@ -106,128 +108,120 @@ variables:
 
 stages:
   - stage: test
+    variables:
+      - name: DOCKER_BUILDKIT
+        value: 1
     jobs:
       - job: UT_FT_1
         displayName: UT FT common & flink & UT client/spark-client
         timeoutInMinutes: '150'
         steps:
-          - task: Maven@4
-            displayName: maven install
+          - task: Docker@2
+            displayName: "login to docker"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'clean install'
-              options: $(MVN_OPTS_INSTALL)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-          - task: Maven@4
-            displayName: UT common flink client/spark-client
+              command: "login"
+              containerRegistry: "apachehudi-docker-hub"
+          - task: Docker@2
+            displayName: "load repo into image"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Punit-tests -pl 
$(JOB1_MODULES),hudi-client/hudi-spark-client
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - task: Maven@4
-            displayName: FT common flink
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'build'
+              Dockerfile: '**/Dockerfile'
+              ImageName: $(Build.BuildId)
+          - task: Docker@2
+            displayName: "UT FT common flink client/spark-client"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB1_MODULES)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - script: |
-              grep "testcase" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr 
| head -n 100
-            displayName: Top 100 long-running testcases
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'run'
+              arguments: >
+                -i 
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
+                /bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL)
+                && mvn test $(MVN_OPTS_TEST) -Punit-tests -pl 
$(JOB1_MODULES),hudi-client/hudi-spark-client
+                && mvn test $(MVN_OPTS_TEST) -Pfunctional-tests -pl 
$(JOB1_MODULES)
+                && grep \"testcase\" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'\"' ' { print $6,$4,$2 } ' | sort 
-nr | head -n 100"
       - job: UT_FT_2
-        displayName: FT client/spark-client
+        displayName: FT client/spark-client & hudi-spark-datasource/hudi-spark
         timeoutInMinutes: '150'
         steps:
-          - task: Maven@4
-            displayName: maven install
+          - task: Docker@2
+            displayName: "login to docker"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'clean install'
-              options: $(MVN_OPTS_INSTALL)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-          - task: Maven@4
-            displayName: FT client/spark-client
+              command: "login"
+              containerRegistry: "apachehudi-docker-hub"
+          - task: Docker@2
+            displayName: "load repo into image"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB2_MODULES)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - script: |
-              grep "testcase" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr 
| head -n 100
-            displayName: Top 100 long-running testcases
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'build'
+              Dockerfile: '**/Dockerfile'
+              ImageName: $(Build.BuildId)
+          - task: Docker@2
+            displayName: "FT client/spark-client & 
hudi-spark-datasource/hudi-spark"
+            inputs:
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'run'
+              arguments: >
+                -i 
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
+                /bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL)
+                && mvn test  $(MVN_OPTS_TEST) -Pfunctional-tests -pl 
$(JOB2_MODULES)
+                && grep \"testcase\" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'\"' ' { print $6,$4,$2 } ' | sort 
-nr | head -n 100"
       - job: UT_FT_3
         displayName: UT spark-datasource
         timeoutInMinutes: '240'
         steps:
-          - task: Maven@4
-            displayName: maven install
+          - task: Docker@2
+            displayName: "login to docker"
+            inputs:
+              command: "login"
+              containerRegistry: "apachehudi-docker-hub"
+          - task: Docker@2
+            displayName: "load repo into image"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'clean install'
-              options: $(MVN_OPTS_INSTALL)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-          - task: Maven@4
-            displayName: UT spark-datasource
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'build'
+              Dockerfile: '**/Dockerfile'
+              ImageName: $(Build.BuildId)
+          - task: Docker@2
+            displayName: "UT spark-datasource"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - script: |
-              grep "testcase" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr 
| head -n 100
-            displayName: Top 100 long-running testcases
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'run'
+              arguments: >
+                -i 
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
+                /bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL) && mvn 
test  $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES)
+                && grep \"testcase\" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'\"' ' { print $6,$4,$2 } ' | sort 
-nr | head -n 100"
       - job: UT_FT_4
         displayName: UT FT other modules
         timeoutInMinutes: '240'
         steps:
-          - task: Maven@4
-            displayName: maven install
+          - task: Docker@2
+            displayName: "login to docker hub"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'clean install'
-              options: $(MVN_OPTS_INSTALL)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-          - task: Maven@4
-            displayName: UT other modules
+              command: "login"
+              containerRegistry: "apachehudi-docker-hub"
+          - task: Docker@2
+            displayName: "load repo into image"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB4_UT_MODULES)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - task: Maven@4
-            displayName: FT other modules
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'build'
+              Dockerfile: '**/Dockerfile'
+              ImageName: $(Build.BuildId)
+          - task: Docker@2
+            displayName: "UT FT other modules"
             inputs:
-              mavenPomFile: 'pom.xml'
-              goals: 'test'
-              options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl 
$(JOB4_FT_MODULES)
-              publishJUnitResults: true
-              testResultsFiles: '**/surefire-reports/TEST-*.xml'
-              jdkVersionOption: '1.8'
-              mavenOptions: '-Xmx4g'
-          - script: |
-              grep "testcase" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr 
| head -n 100
-            displayName: Top 100 long-running testcases
+              containerRegistry: 'apachehudi-docker-hub'
+              repository: 'apachehudi/hudi-ci-bundle-validation-base'
+              command: 'run'
+              arguments: >
+                -i 
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
+                /bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL) 
-Phudi-platform-service -Pthrift-gen-source
+                && mvn test  $(MVN_OPTS_TEST) -Punit-tests -pl 
$(JOB4_UT_MODULES)
+                && mvn test  $(MVN_OPTS_TEST) -Pfunctional-tests -pl 
$(JOB4_UT_MODULES)
+                && grep \"testcase\" */target/surefire-reports/*.xml 
*/*/target/surefire-reports/*.xml | awk -F'\"' ' { print $6,$4,$2 } ' | sort 
-nr | head -n 100"
\ No newline at end of file
diff --git 
a/hudi-platform-service/hudi-metaserver/hudi-metaserver-server/pom.xml 
b/hudi-platform-service/hudi-metaserver/hudi-metaserver-server/pom.xml
index d593eae75ea..39bfc244176 100644
--- a/hudi-platform-service/hudi-metaserver/hudi-metaserver-server/pom.xml
+++ b/hudi-platform-service/hudi-metaserver/hudi-metaserver-server/pom.xml
@@ -92,6 +92,34 @@
                 </plugins>
             </build>
         </profile>
+        <profile>
+            <id>thrift-gen-source-with-script</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <version>1.6.0</version>
+                        <executions>
+                            <execution>
+                                <id>thrift-install-and-generate-source</id>
+                                <phase>generate-sources</phase>
+                                <goals>
+                                    <goal>exec</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            
<executable>${project.parent.basedir}/src/main/thrift/bin/thrift_binary.sh</executable>
+                            <arguments>
+                                <argument>${thrift.install.env}</argument>
+                            </arguments>
+                            <skip>false</skip>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
         <profile>
             <id>m1-mac</id>
             <properties>
@@ -108,27 +136,6 @@
 
     <build>
         <plugins>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>exec-maven-plugin</artifactId>
-                <version>1.6.0</version>
-                <executions>
-                    <execution>
-                        <id>thrift-install-and-generate-source</id>
-                        <phase>generate-sources</phase>
-                        <goals>
-                            <goal>exec</goal>
-                        </goals>
-                    </execution>
-                </executions>
-                <configuration>
-                    
<executable>${project.parent.basedir}/src/main/thrift/bin/thrift_binary.sh</executable>
-                    <arguments>
-                        <argument>${thrift.install.env}</argument>
-                    </arguments>
-                    <skip>false</skip>
-                </configuration>
-            </plugin>
             <plugin>
                 <groupId>org.jacoco</groupId>
                 <artifactId>jacoco-maven-plugin</artifactId>
diff --git a/pom.xml b/pom.xml
index f6c57bdba7c..9dd577d8a64 100644
--- a/pom.xml
+++ b/pom.xml
@@ -227,6 +227,7 @@
     <springboot.version>2.7.3</springboot.version>
     <spring.shell.version>2.1.1</spring.shell.version>
     <snappy.version>1.1.8.3</snappy.version>
+    <thrift.executable>/usr/local/bin/thrift</thrift.executable>
   </properties>
 
   <scm>

Reply via email to