This is an automated email from the ASF dual-hosted git repository.

vhs pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 14a549f45c2c chore(ci): Add codecov coverage from tests running on 
Spark 4.0 (#18335)
14a549f45c2c is described below

commit 14a549f45c2ca713369177b6e8cc9c9d094c915e
Author: Y Ethan Guo <[email protected]>
AuthorDate: Tue Mar 17 20:58:05 2026 -0700

    chore(ci): Add codecov coverage from tests running on Spark 4.0 (#18335)
---
 .github/workflows/bot.yml | 75 ++++++++++++++++++++++++++++++++++++++++-------
 1 file changed, 65 insertions(+), 10 deletions(-)

diff --git a/.github/workflows/bot.yml b/.github/workflows/bot.yml
index 66d3dfe206eb..418b653ebdbc 100644
--- a/.github/workflows/bot.yml
+++ b/.github/workflows/bot.yml
@@ -634,7 +634,18 @@ jobs:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
+      - name: Generate merged coverage report
+        if: always()
+        run: ./scripts/jacoco/generate_merged_coverage_report.sh 
$GITHUB_WORKSPACE
+      - name: Upload coverage to Codecov
+        if: always()
+        uses: codecov/codecov-action@v5
+        with:
+          files: ./jacoco-report.xml
+          disable_search: true
+          flags: spark-java-tests
+          token: ${{ secrets.CODECOV_TOKEN }}
 
   test-spark-java17-java-tests-part2:
     runs-on: ubuntu-latest
@@ -669,21 +680,32 @@ jobs:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
         run:
-          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests -Dsurefire.failIfNoSpecifiedTests=false -pl 
hudi-examples/hudi-examples-spark $MVN_ARGS
+          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests -Dsurefire.failIfNoSpecifiedTests=false -pl 
hudi-examples/hudi-examples-spark $MVN_ARGS -Djacoco.skip=false
       - name: Java UT 2 - Common & Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
       - name: Java FTA - Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS 
-Djacoco.skip=false
+      - name: Generate merged coverage report
+        if: always()
+        run: ./scripts/jacoco/generate_merged_coverage_report.sh 
$GITHUB_WORKSPACE
+      - name: Upload coverage to Codecov
+        if: always()
+        uses: codecov/codecov-action@v5
+        with:
+          files: ./jacoco-report.xml
+          disable_search: true
+          flags: spark-java-tests
+          token: ${{ secrets.CODECOV_TOKEN }}
 
   test-spark-java17-java-tests-part3:
     runs-on: ubuntu-latest
@@ -719,14 +741,25 @@ jobs:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS 
-Djacoco.skip=false
       - name: Java FTC - Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Pfunctional-tests-c -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Pfunctional-tests-c -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS 
-Djacoco.skip=false
+      - name: Generate merged coverage report
+        if: always()
+        run: ./scripts/jacoco/generate_merged_coverage_report.sh 
$GITHUB_WORKSPACE
+      - name: Upload coverage to Codecov
+        if: always()
+        uses: codecov/codecov-action@v5
+        with:
+          files: ./jacoco-report.xml
+          disable_search: true
+          flags: spark-java-tests
+          token: ${{ secrets.CODECOV_TOKEN }}
 
   test-spark-java17-scala-dml-tests:
     runs-on: ubuntu-latest
@@ -762,14 +795,25 @@ jobs:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-Dtest=skipJavaTests $SCALA_TEST_DML_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
       - name: Scala FT - Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
+      - name: Generate merged coverage report
+        if: always()
+        run: ./scripts/jacoco/generate_merged_coverage_report.sh 
$GITHUB_WORKSPACE
+      - name: Upload coverage to Codecov
+        if: always()
+        uses: codecov/codecov-action@v5
+        with:
+          files: ./jacoco-report.xml
+          disable_search: true
+          flags: spark-scala-tests
+          token: ${{ secrets.CODECOV_TOKEN }}
 
   test-spark-java17-scala-other-tests:
     runs-on: ubuntu-latest
@@ -805,14 +849,25 @@ jobs:
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" 
-Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
       - name: Scala FT - Spark
         env:
           SCALA_PROFILE: ${{ matrix.scalaProfile }}
           SPARK_PROFILE: ${{ matrix.sparkProfile }}
           SPARK_MODULES: ${{ matrix.sparkModules }}
         run:
-          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
+          mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" 
-D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER 
-Dsurefire.failIfNoSpecifiedTests=false -pl 
"$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS -Djacoco.skip=false
+      - name: Generate merged coverage report
+        if: always()
+        run: ./scripts/jacoco/generate_merged_coverage_report.sh 
$GITHUB_WORKSPACE
+      - name: Upload coverage to Codecov
+        if: always()
+        uses: codecov/codecov-action@v5
+        with:
+          files: ./jacoco-report.xml
+          disable_search: true
+          flags: spark-scala-tests
+          token: ${{ secrets.CODECOV_TOKEN }}
 
   test-flink-1:
     runs-on: ubuntu-latest

Reply via email to