This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 5e6795d0534 [MINOR] Improve CI to parallelize tests more and reduce
completion time (#13031)
5e6795d0534 is described below
commit 5e6795d053475844beefc47e9efa3f7454796dca
Author: Y Ethan Guo <[email protected]>
AuthorDate: Wed Mar 26 14:42:50 2025 -0700
[MINOR] Improve CI to parallelize tests more and reduce completion time
(#13031)
---
.github/workflows/bot.yml | 380 +++++++++++++++++++--
azure-pipelines-20230430.yml | 137 ++++++--
...e.java => HoodieSparkFunctionalTestSuiteA.java} | 3 +-
...e.java => HoodieSparkFunctionalTestSuiteB.java} | 5 +-
.../functional/TestNewHoodieParquetFileFormat.java | 2 +-
.../hudi/functional/ColumnStatIndexTestBase.scala | 14 +-
.../hudi/functional/TestColumnStatsIndex.scala | 5 +-
.../hudi/functional/TestPartitionStatsIndex.scala | 2 +-
pom.xml | 61 +++-
9 files changed, 540 insertions(+), 69 deletions(-)
diff --git a/.github/workflows/bot.yml b/.github/workflows/bot.yml
index b99632bec2b..d0ae4c8a540 100644
--- a/.github/workflows/bot.yml
+++ b/.github/workflows/bot.yml
@@ -30,6 +30,12 @@ concurrency:
env:
MVN_ARGS: -e -ntp -B -V -Dgpg.skip -Djacoco.skip -Pwarn-log
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn
-Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn
-Dmaven.wagon.httpconnectionManager.ttlSeconds=25
-Dmaven.wagon.http.retryHandler.count=5
SPARK_COMMON_MODULES:
hudi-spark-datasource/hudi-spark,hudi-spark-datasource/hudi-spark-common
+ JAVA_UT_FILTER1: -Dtest=!TestCOWDataSource,!TestMORDataSource
+ JAVA_UT_FILTER2: -Dtest=TestCOWDataSource,TestMORDataSource
+ SCALA_TEST_DML_FILTER: -DwildcardSuites=org.apache.spark.sql.hudi.dml
+ SCALA_TEST_OTHERS_FILTER:
-DwildcardSuites=org.apache.hudi,org.apache.spark.hudi,org.apache.spark.sql.avro,org.apache.spark.sql.execution,org.apache.spark.sql.hudi.analysis,org.apache.spark.sql.hudi.command,org.apache.spark.sql.hudi.common,org.apache.spark.sql.hudi.ddl,org.apache.spark.sql.hudi.procedure,org.apache.spark.sql.hudi.feature
+ FLINK_IT_FILTER1: -Dit.test=ITTestHoodieDataSource
+ FLINK_IT_FILTER2: -Dit.test=!ITTestHoodieDataSource
jobs:
validate-source:
@@ -53,7 +59,51 @@ jobs:
- name: RAT check
run: ./scripts/release/validate_source_rat.sh
- test-spark-java-tests:
+ test-spark-java-unit-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.3"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.3.x"
+
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.4"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.4.x"
+
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ - scalaProfile: "scala-2.13"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Java UT 1 - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
+ run:
+ mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+ test-spark-java-functional-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -94,15 +144,15 @@ jobs:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
run:
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl
hudi-examples/hudi-examples-spark $MVN_ARGS
- - name: Java UT - Common & Spark
+ - name: Java UT 2 - Common & Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
run:
- mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- - name: Java FT - Spark
+ mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTA - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
@@ -110,8 +160,69 @@ jobs:
if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
run:
mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTB - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
+ run:
+ mvn test -Pfunctional-tests-b -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+
+ test-spark-scala-dml-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.3"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.3.x"
+
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.4"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.4.x"
+
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ - scalaProfile: "scala-2.13"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Scala UT - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
+ run:
+ mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Scala FT - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
+ run:
+ mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- test-spark-scala-tests:
+ test-spark-scala-other-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -153,7 +264,7 @@ jobs:
SPARK_MODULES: ${{ matrix.sparkModules }}
if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
run:
- mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: Scala FT - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
@@ -161,7 +272,7 @@ jobs:
SPARK_MODULES: ${{ matrix.sparkModules }}
if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') ||
!endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as
it's covered by Azure CI
run:
- mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
test-hudi-hadoop-mr-and-hudi-java-client:
runs-on: ubuntu-latest
@@ -199,7 +310,47 @@ jobs:
run:
./mvnw test -Punit-tests -fae -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-D"FLINK_PROFILE" -pl hudi-hadoop-mr,hudi-client/hudi-java-client $MVN_ARGS
- test-spark-java17-java-tests:
+ test-spark-java17-java-unit-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.3"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.3.x"
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.4"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.4.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Set up JDK 17
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Java UT 1 - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+ test-spark-java17-java-functional-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -237,22 +388,30 @@ jobs:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
run:
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
hudi-examples/hudi-examples-spark $MVN_ARGS
- - name: Java UT - Common & Spark
+ - name: Java UT 2 - Common & Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- - name: Java FT - Spark
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTA - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTB - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- test-spark-java17-scala-tests:
+
+ test-spark-java17-scala-dml-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -290,16 +449,103 @@ jobs:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: Scala FT - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER
-DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- test-spark-java11-17-java-tests:
+ test-spark-java17-scala-other-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.3"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.3.x"
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.4"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.4.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Set up JDK 17
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Scala UT - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Scala FT - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER
-DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+ test-spark-java11-17-java-unit-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+ - scalaProfile: "scala-2.13"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Set up JDK 17
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Java UT 1 - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+ test-spark-java11-17-java-functional-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -337,22 +583,76 @@ jobs:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
run:
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
hudi-examples/hudi-examples-spark $MVN_ARGS
- - name: Java UT - Common & Spark
+ - name: Java UT 2 - Common & Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- - name: Java FT - Spark
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTA - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Java FTB - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+
+ test-spark-java11-17-scala-dml-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - scalaProfile: "scala-2.12"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+ - scalaProfile: "scala-2.13"
+ sparkProfile: "spark3.5"
+ sparkModules: "hudi-spark-datasource/hudi-spark3.5.x"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-DskipTests=true $MVN_ARGS -am -pl
"hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES"
+ - name: Set up JDK 17
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Scala UT - Common & Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ - name: Scala FT - Spark
+ env:
+ SCALA_PROFILE: ${{ matrix.scalaProfile }}
+ SPARK_PROFILE: ${{ matrix.sparkProfile }}
+ SPARK_MODULES: ${{ matrix.sparkModules }}
+ run:
+ mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER
-DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- test-spark-java11-17-scala-tests:
+ test-spark-java11-17-scala-other-tests:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -390,16 +690,16 @@ jobs:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE"
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: Scala FT - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
run:
- mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+ mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE"
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER
-DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- test-flink:
+ test-flink-1:
runs-on: ubuntu-latest
strategy:
matrix:
@@ -440,7 +740,39 @@ jobs:
FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }}
run:
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE"
-Davro.version="$FLINK_AVRO_VERSION" -pl hudi-examples/hudi-examples-flink
$MVN_ARGS
- - name: Integration Test
+ - name: Integration Test 1
+ env:
+ SCALA_PROFILE: 'scala-2.12'
+ FLINK_PROFILE: ${{ matrix.flinkProfile }}
+ FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }}
+ if: ${{ endsWith(env.FLINK_PROFILE, '1.20') }}
+ run: |
+ mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE"
-D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am
-Davro.version="$FLINK_AVRO_VERSION" -DskipTests=true $MVN_ARGS
+ mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE"
-Davro.version="$FLINK_AVRO_VERSION" $FLINK_IT_FILTER1 -pl
hudi-flink-datasource/hudi-flink $MVN_ARGS
+
+ test-flink-2:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - flinkProfile: "flink1.20"
+ flinkAvroVersion: "1.11.3"
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'temurin'
+ architecture: x64
+ - name: Build Project
+ env:
+ SCALA_PROFILE: 'scala-2.12'
+ FLINK_PROFILE: ${{ matrix.flinkProfile }}
+ FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }}
+ run:
+ mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl
hudi-examples/hudi-examples-flink -am -Davro.version="$FLINK_AVRO_VERSION"
-DskipTests=true $MVN_ARGS
+ - name: Integration Test 2
env:
SCALA_PROFILE: 'scala-2.12'
FLINK_PROFILE: ${{ matrix.flinkProfile }}
@@ -448,7 +780,7 @@ jobs:
if: ${{ endsWith(env.FLINK_PROFILE, '1.20') }}
run: |
mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE"
-D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am
-Davro.version="$FLINK_AVRO_VERSION" -DskipTests=true $MVN_ARGS
- mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE"
-Davro.version="$FLINK_AVRO_VERSION" -pl hudi-flink-datasource/hudi-flink
$MVN_ARGS
+ mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE"
-Davro.version="$FLINK_AVRO_VERSION" $FLINK_IT_FILTER2 -pl
hudi-flink-datasource/hudi-flink $MVN_ARGS
docker-java17-test:
runs-on: ubuntu-latest
diff --git a/azure-pipelines-20230430.yml b/azure-pipelines-20230430.yml
index aa75bb9d3dc..d7841440239 100644
--- a/azure-pipelines-20230430.yml
+++ b/azure-pipelines-20230430.yml
@@ -36,9 +36,10 @@ parameters:
- 'hudi-spark-datasource/hudi-spark3.5.x'
- 'hudi-spark-datasource/hudi-spark3-common'
- 'hudi-spark-datasource/hudi-spark-common'
- - name: job8UTModules
+ - name: job10UTModules
type: object
default:
+ - '!hudi-hadoop-common'
- '!hudi-hadoop-mr'
- '!hudi-client/hudi-java-client'
- '!hudi-client/hudi-spark-client'
@@ -54,7 +55,7 @@ parameters:
- '!hudi-spark-datasource/hudi-spark3-common'
- '!hudi-spark-datasource/hudi-spark-common'
- '!hudi-utilities'
- - name: job8FTModules
+ - name: job10FTModules
type: object
default:
- '!hudi-client/hudi-spark-client'
@@ -115,8 +116,8 @@ variables:
MVN_ARG_FUNCTIONAL_PACKAGE_TEST:
"-Dtest=\"$(JAVA_FUNCTIONAL_PACKAGE_TEST_FILTER)\""
MVN_ARG_NON_FUNCTIONAL_PACKAGE_TEST:
"-Dtest=\"!$(JAVA_FUNCTIONAL_PACKAGE_TEST_FILTER)\""
JOB6_SPARK_DDL_OTHERS_WILDCARD_SUITES: ${{
join(',',parameters.job6HudiSparkDdlOthersWildcardSuites) }}
- JOB8_UT_MODULES: ${{ join(',',parameters.job8UTModules) }}
- JOB8_FT_MODULES: ${{ join(',',parameters.job8FTModules) }}
+ JOB10_UT_MODULES: ${{ join(',',parameters.job10UTModules) }}
+ JOB10_FT_MODULES: ${{ join(',',parameters.job10FTModules) }}
JACOCO_AGENT_DESTFILE1_ARG: '-Djacoco.agent.dest.filename=jacoco1.exec'
JACOCO_AGENT_DESTFILE2_ARG: '-Djacoco.agent.dest.filename=jacoco2.exec'
JACOCO_AGENT_DESTFILE3_ARG: '-Djacoco.agent.dest.filename=jacoco3.exec'
@@ -129,7 +130,7 @@ stages:
value: 1
jobs:
- job: UT_FT_1
- displayName: UT FT client/spark-client
+ displayName: UT client/spark-client
timeoutInMinutes: '75'
steps:
- task: Maven@4
@@ -145,17 +146,7 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE1_ARG) -pl hudi-client/hudi-spark-client
- publishJUnitResults: false
- jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g'
- - task: Maven@4
- displayName: FT client/spark-client
- inputs:
- mavenPomFile: 'pom.xml'
- goals: 'test'
- # TODO(HUDI-9143): Investigate why Jacoco execution data file is
corrupt
- options: $(MVN_OPTS_TEST) -Pfunctional-tests
-Djacoco.agent.dest.filename=jacoco2.corrupt -pl hudi-client/hudi-spark-client
+ options: $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE2_ARG) -pl hudi-client/hudi-spark-client
publishJUnitResults: true
testResultsFiles: '**/surefire-reports/TEST-*.xml'
jdkVersionOption: '1.8'
@@ -174,7 +165,7 @@ stages:
grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_2
- displayName: FT hudi-spark
+ displayName: FTA hudi-spark
timeoutInMinutes: '75'
steps:
- task: Maven@4
@@ -186,7 +177,7 @@ stages:
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
- displayName: FT hudi-spark-datasource/hudi-spark
+ displayName: FTA hudi-spark-datasource/hudi-spark
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
@@ -358,8 +349,8 @@ stages:
grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_7
- displayName: UT FT Hudi Utilities
- timeoutInMinutes: '120'
+ displayName: UT Hudi Streamer & FT utilities
+ timeoutInMinutes: '75'
steps:
- task: Docker@2
displayName: "login to docker hub"
@@ -375,7 +366,7 @@ stages:
Dockerfile: '**/Dockerfile'
ImageName: $(Build.BuildId)
- task: Docker@2
- displayName: "UT FT Hudi Utilities"
+ displayName: "UT Hudi Streamer & FT utilities"
inputs:
containerRegistry: 'apachehudi-docker-hub'
repository: 'apachehudi/hudi-ci-bundle-validation-base'
@@ -384,7 +375,7 @@ stages:
-v $(Build.SourcesDirectory):/hudi
-i
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
/bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL)
-Phudi-platform-service -Pthrift-gen-source -pl hudi-utilities -am
- && mvn test $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE1_ARG) -DfailIfNoTests=false -DargLine="-Xmx4g" -pl
hudi-utilities
+ && mvn test $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE1_ARG) -Dtest="TestHoodieDeltaStreamer*"
-DfailIfNoTests=false -DargLine="-Xmx4g" -pl hudi-utilities
&& mvn test $(MVN_OPTS_TEST) -Pfunctional-tests
$(JACOCO_AGENT_DESTFILE2_ARG) -DfailIfNoTests=false -DargLine="-Xmx4g" -pl
hudi-utilities"
- task: PublishTestResults@2
displayName: 'Publish Test Results'
@@ -407,6 +398,100 @@ stages:
grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_8
+ displayName: UT hudi-hadoop-common & Hudi Utilities others
+ timeoutInMinutes: '75'
+ steps:
+ - task: Docker@2
+ displayName: "login to docker hub"
+ inputs:
+ command: "login"
+ containerRegistry: "apachehudi-docker-hub"
+ - task: Docker@2
+ displayName: "load repo into image"
+ inputs:
+ containerRegistry: 'apachehudi-docker-hub'
+ repository: 'apachehudi/hudi-ci-bundle-validation-base'
+ command: 'build'
+ Dockerfile: '**/Dockerfile'
+ ImageName: $(Build.BuildId)
+ - task: Docker@2
+ displayName: "UT hudi-hadoop-common & Hudi Utilities others"
+ inputs:
+ containerRegistry: 'apachehudi-docker-hub'
+ repository: 'apachehudi/hudi-ci-bundle-validation-base'
+ command: 'run'
+ arguments: >
+ -v $(Build.SourcesDirectory):/hudi
+ -i
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
+ /bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL)
-Phudi-platform-service -Pthrift-gen-source -pl hudi-utilities -am
+ && mvn test $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE1_ARG) -DargLine="-Xmx4g" -pl hudi-hadoop-common
+ && mvn test $(MVN_OPTS_TEST) -Punit-tests
$(JACOCO_AGENT_DESTFILE2_ARG) -Dtest="!TestHoodieDeltaStreamer*"
-DfailIfNoTests=false -DargLine="-Xmx4g" -pl hudi-utilities
+ - task: PublishTestResults@2
+ displayName: 'Publish Test Results'
+ inputs:
+ testResultsFormat: 'JUnit'
+ testResultsFiles: '**/surefire-reports/TEST-*.xml'
+ searchFolder: '$(Build.SourcesDirectory)'
+ failTaskOnFailedTests: true
+ - script: |
+ ./scripts/jacoco/download_jacoco.sh
+ ./scripts/jacoco/merge_jacoco_exec_files.sh
jacoco-lib/lib/jacococli.jar $(Build.SourcesDirectory)
+ displayName: 'Merge JaCoCo Execution Data Files'
+ - task: PublishBuildArtifacts@1
+ displayName: 'Publish Merged JaCoCo Execution Data File'
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/merged-jacoco.exec'
+ ArtifactName: 'merged-jacoco-$(Build.BuildId)-8'
+ publishLocation: 'Container'
+ - script: |
+ grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
+ displayName: Top 100 long-running testcases
+ - job: UT_FT_9
+ displayName: FT spark 2
+ timeoutInMinutes: '75'
+ steps:
+ - task: Maven@4
+ displayName: maven install
+ inputs:
+ mavenPomFile: 'pom.xml'
+ goals: 'clean install'
+ options: $(MVN_OPTS_INSTALL) -pl
hudi-client/hudi-spark-client,hudi-spark-datasource/hudi-spark -am
+ publishJUnitResults: false
+ jdkVersionOption: '1.8'
+ - task: Maven@4
+ displayName: FTB hudi-spark-datasource/hudi-spark
+ inputs:
+ mavenPomFile: 'pom.xml'
+ goals: 'test'
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests-b
$(JACOCO_AGENT_DESTFILE1_ARG) -pl hudi-spark-datasource/hudi-spark
+ publishJUnitResults: false
+ jdkVersionOption: '1.8'
+ mavenOptions: '-Xmx4g'
+ - task: Maven@4
+ displayName: FT client/spark-client
+ inputs:
+ mavenPomFile: 'pom.xml'
+ goals: 'test'
+ # TODO(HUDI-9143): Investigate why Jacoco execution data file is
corrupt
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests
-Djacoco.agent.dest.filename=jacoco2.corrupt -pl hudi-client/hudi-spark-client
+ publishJUnitResults: true
+ testResultsFiles: '**/surefire-reports/TEST-*.xml'
+ jdkVersionOption: '1.8'
+ mavenOptions: '-Xmx4g'
+ - script: |
+ ./scripts/jacoco/download_jacoco.sh
+ ./scripts/jacoco/merge_jacoco_exec_files.sh
jacoco-lib/lib/jacococli.jar $(Build.SourcesDirectory)
+ displayName: 'Merge JaCoCo Execution Data Files'
+ - task: PublishBuildArtifacts@1
+ displayName: 'Publish Merged JaCoCo Execution Data File'
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/merged-jacoco.exec'
+ ArtifactName: 'merged-jacoco-$(Build.BuildId)-9'
+ publishLocation: 'Container'
+ - script: |
+ grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
+ displayName: Top 100 long-running testcases
+ - job: UT_FT_10
displayName: UT FT common & other modules
timeoutInMinutes: '75'
steps:
@@ -433,8 +518,8 @@ stages:
-v $(Build.SourcesDirectory):/hudi
-i
docker.io/apachehudi/hudi-ci-bundle-validation-base:$(Build.BuildId)
/bin/bash -c "mvn clean install $(MVN_OPTS_INSTALL)
-Phudi-platform-service -Pthrift-gen-source
- && mvn test $(MVN_OPTS_TEST) -Punit-tests
-DfailIfNoTests=false -DargLine="-Xmx4g" $(JACOCO_AGENT_DESTFILE1_ARG) -pl
$(JOB8_UT_MODULES)
- && mvn test $(MVN_OPTS_TEST) -Pfunctional-tests
-DfailIfNoTests=false -DargLine="-Xmx4g" $(JACOCO_AGENT_DESTFILE2_ARG) -pl
$(JOB8_FT_MODULES)"
+ && mvn test $(MVN_OPTS_TEST) -Punit-tests
-DfailIfNoTests=false -DargLine="-Xmx4g" $(JACOCO_AGENT_DESTFILE1_ARG) -pl
$(JOB10_UT_MODULES)
+ && mvn test $(MVN_OPTS_TEST) -Pfunctional-tests
-DfailIfNoTests=false -DargLine="-Xmx4g" $(JACOCO_AGENT_DESTFILE2_ARG) -pl
$(JOB10_FT_MODULES)"
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
@@ -450,7 +535,7 @@ stages:
displayName: 'Publish Merged JaCoCo Execution Data File'
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/merged-jacoco.exec'
- ArtifactName: 'merged-jacoco-$(Build.BuildId)-8'
+ ArtifactName: 'merged-jacoco-$(Build.BuildId)-10'
publishLocation: 'Container'
- script: |
grep "testcase" */target/surefire-reports/*.xml
*/*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
| head -n 100
@@ -466,6 +551,8 @@ stages:
- UT_FT_6
- UT_FT_7
- UT_FT_8
+ - UT_FT_9
+ - UT_FT_10
steps:
- task: DownloadBuildArtifacts@0
displayName: 'Download JaCoCo Execution Data Files'
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
similarity index 95%
copy from
hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
copy to
hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
index 5b2f42a158e..84bddd30c6e 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
@@ -25,6 +25,5 @@ import org.junit.runner.RunWith;
@RunWith(JUnitPlatform.class)
@SelectPackages("org.apache.hudi.functional")
@IncludeTags("functional")
-public class HoodieSparkFunctionalTestSuite {
-
+public class HoodieSparkFunctionalTestSuiteA {
}
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
similarity index 93%
rename from
hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
rename to
hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
index 5b2f42a158e..b515bbf4f41 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuite.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
@@ -24,7 +24,6 @@ import org.junit.runner.RunWith;
@RunWith(JUnitPlatform.class)
@SelectPackages("org.apache.hudi.functional")
-@IncludeTags("functional")
-public class HoodieSparkFunctionalTestSuite {
-
+@IncludeTags("functional-b")
+public class HoodieSparkFunctionalTestSuiteB {
}
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
index bf4480a8d76..0053c3f4a97 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
@@ -40,7 +40,7 @@ import static
org.apache.hudi.common.model.HoodieTableType.COPY_ON_WRITE;
import static org.apache.hudi.common.model.HoodieTableType.MERGE_ON_READ;
import static org.junit.jupiter.api.Assertions.assertEquals;
-@Tag("functional")
+@Tag("functional-b")
public class TestNewHoodieParquetFileFormat extends TestBootstrapReadBase {
private static Stream<Arguments> testArgs() {
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/ColumnStatIndexTestBase.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/ColumnStatIndexTestBase.scala
index c6d8d1ce293..d2dcb796ac3 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/ColumnStatIndexTestBase.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/ColumnStatIndexTestBase.scala
@@ -28,20 +28,16 @@ import org.apache.hudi.common.model.{HoodieBaseFile,
HoodieFileGroup, HoodieLogF
import org.apache.hudi.common.table.{HoodieTableMetaClient,
TableSchemaResolver}
import org.apache.hudi.common.table.view.FileSystemViewManager
import org.apache.hudi.config.HoodieCompactionConfig
-import org.apache.hudi.functional.ColumnStatIndexTestBase.ColumnStatsTestCase
-import org.apache.hudi.functional.ColumnStatIndexTestBase.ColumnStatsTestParams
+import
org.apache.hudi.functional.ColumnStatIndexTestBase.{ColumnStatsTestCase,
ColumnStatsTestParams}
import org.apache.hudi.metadata.HoodieTableMetadataUtil
import
org.apache.hudi.metadata.HoodieTableMetadataUtil.PARTITION_NAME_COLUMN_STATS
import org.apache.hudi.storage.StoragePath
import org.apache.hudi.storage.hadoop.HadoopStorageConfiguration
-import org.apache.hudi.testutils.HoodieSparkClientTestBase
-import org.apache.hudi.testutils.LogFileColStatsTestUtil
+import org.apache.hudi.testutils.{HoodieSparkClientTestBase,
LogFileColStatsTestUtil}
import org.apache.avro.Schema
-import org.apache.spark.sql._
-import org.apache.spark.sql.DataFrame
-import org.apache.spark.sql.functions.lit
-import org.apache.spark.sql.functions.typedLit
+import org.apache.spark.sql.{DataFrame, _}
+import org.apache.spark.sql.functions.{lit, typedLit}
import org.apache.spark.sql.types._
import org.junit.jupiter.api._
import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
@@ -58,7 +54,7 @@ import scala.collection.JavaConverters._
import scala.collection.immutable.TreeSet
import scala.util.Random
-@Tag("functional")
+@Tag("functional-b")
class ColumnStatIndexTestBase extends HoodieSparkClientTestBase {
var spark: SparkSession = _
var dfList: Seq[DataFrame] = Seq()
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndex.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndex.scala
index b7f2506426d..29867ac68a8 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndex.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndex.scala
@@ -32,8 +32,7 @@ import
org.apache.hudi.common.table.timeline.versioning.v1.InstantFileNameGenera
import org.apache.hudi.common.table.view.FileSystemViewManager
import org.apache.hudi.common.testutils.HoodieTestUtils
import
org.apache.hudi.common.testutils.HoodieTestUtils.INSTANT_FILE_NAME_GENERATOR
-import org.apache.hudi.common.util.ParquetUtils
-import org.apache.hudi.common.util.StringUtils
+import org.apache.hudi.common.util.{ParquetUtils, StringUtils}
import org.apache.hudi.config.{HoodieCleanConfig, HoodieCompactionConfig,
HoodieWriteConfig}
import
org.apache.hudi.functional.ColumnStatIndexTestBase.{ColumnStatsTestCase,
ColumnStatsTestParams, WrapperCreator}
import
org.apache.hudi.metadata.HoodieTableMetadataUtil.PARTITION_NAME_COLUMN_STATS
@@ -62,7 +61,7 @@ import java.util.stream.Collectors
import scala.collection.JavaConverters._
import scala.collection.convert.ImplicitConversions.`collection
AsScalaIterable`
-@Tag("functional")
+@Tag("functional-b")
class TestColumnStatsIndex extends ColumnStatIndexTestBase {
val DEFAULT_COLUMNS_TO_INDEX = Seq(HoodieRecord.COMMIT_TIME_METADATA_FIELD,
HoodieRecord.RECORD_KEY_METADATA_FIELD,
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
index 193a9127a51..676b412efae 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
@@ -59,7 +59,7 @@ import scala.concurrent.duration.Duration
/**
* Test cases on partition stats index with Spark datasource.
*/
-@Tag("functional")
+@Tag("functional-b")
class TestPartitionStatsIndex extends PartitionStatsIndexTestBase {
val sqlTempTable = "hudi_tbl"
diff --git a/pom.xml b/pom.xml
index 0664bd492b0..6fd933fa521 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1989,9 +1989,11 @@
<configuration combine.self="append">
<skip>${skipUTs}</skip>
<forkedProcessExitTimeoutInSeconds>120</forkedProcessExitTimeoutInSeconds>
- <excludedGroups>functional</excludedGroups>
+ <excludedGroups>functional,functional-b</excludedGroups>
<excludes>
<exclude>**/*FunctionalTestSuite.java</exclude>
+ <exclude>**/*FunctionalTestSuiteA.java</exclude>
+ <exclude>**/*FunctionalTestSuiteB.java</exclude>
<exclude>**/IT*.java</exclude>
<exclude>**/testsuite/**/Test*.java</exclude>
</excludes>
@@ -2050,6 +2052,63 @@
<reuseForks>true</reuseForks>
<includes>
<include>**/*FunctionalTestSuite.java</include>
+ <include>**/*FunctionalTestSuiteA.java</include>
+ </includes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jacoco</groupId>
+ <artifactId>jacoco-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>prepare-agent</goal>
+ </goals>
+ <configuration>
+
<destFile>${project.build.directory}/jacoco-agent/${jacoco.agent.dest.filename}</destFile>
+ </configuration>
+ </execution>
+ <execution>
+ <id>post-functional-tests</id>
+ <phase>test</phase>
+ <goals>
+ <goal>report</goal>
+ </goals>
+ <configuration>
+
<outputDirectory>${project.reporting.outputDirectory}/jacoco-ft</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ <profile>
+ <id>functional-tests-b</id>
+ <properties>
+ <skipUTs>true</skipUTs>
+ <skipFTs>false</skipFTs>
+ <skipITs>true</skipITs>
+ </properties>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>${maven-surefire-plugin.version}</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven.surefire</groupId>
+ <artifactId>surefire-junit47</artifactId>
+ <version>${maven-surefire-plugin.version}</version>
+ </dependency>
+ </dependencies>
+ <configuration combine.self="append">
+ <skip>${skipFTs}</skip>
+ <forkCount>1</forkCount>
+ <reuseForks>true</reuseForks>
+ <includes>
+ <include>**/*FunctionalTestSuiteB.java</include>
</includes>
</configuration>
</plugin>