This is an automated email from the ASF dual-hosted git repository.
lihao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git
The following commit(s) were added to refs/heads/master by this push:
new 41bd32f2 [AURON #2017] [BUILD] Add Spark 4.x support to dev/reformat
script. (#2018)
41bd32f2 is described below
commit 41bd32f2aaa9d8af17f3bde98847a130690baac3
Author: slfan1989 <[email protected]>
AuthorDate: Mon Mar 2 10:23:59 2026 +0800
[AURON #2017] [BUILD] Add Spark 4.x support to dev/reformat script. (#2018)
<!--
- Start the PR title with the related issue ID, e.g. '[AURON #XXXX]
Short summary...'.
-->
### Which issue does this PR close?
Closes #2017
### Rationale for this change
With Spark 4.0 and 4.1 support added to the project, the `dev/reformat`
script needs to be updated to handle formatting and style checks for
these new versions. Spark 4.x requires JDK 17+ and Scala 2.13, while
Spark 3.x uses JDK 8 and Scala 2.12. The script should automatically
switch between these environments.
### What changes are included in this PR?
#### 1. Fix Flink Maven profile
- Before: -Pflink,flink-1.18
- After: -Pflink-1.18
- Reason: Avoid activating non-existent flink profile
#### 2.Add Spark 4.x support
- Add spark-4.0 and spark-4.1 to the version sweep list
- Auto-switch to scala-2.13 profile for Spark 4.x (Spark 4.x requires
Scala 2.13)
- Auto-switch to JDK 17 for Spark 4.x (Spark 4.x requires JDK 17+)
- Auto-switch back to JDK 8 for Spark 3.x versions
#### 3.Update CI workflow (.github/workflows/style.yml)
- Add JDK 17 setup alongside existing JDK 8
- Enable style check to work with both Spark 3.x and Spark 4.x versions
### Are there any user-facing changes?
No.
### How was this patch tested?
Verified automatic JDK switching works for Spark 3.x (JDK 8) and Spark
4.x (JDK 17)
---------
Signed-off-by: slfan1989 <[email protected]>
---
.github/workflows/style.yml | 12 +++++
dev/reformat | 105 +++++++++++++++++++++++++++++++++++++++++---
2 files changed, 112 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index 6f5ef9a9..cdf577ec 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -40,11 +40,23 @@ jobs:
steps:
- uses: actions/checkout@v6
- name: Setup JDK 8
+ id: jdk8
uses: actions/setup-java@v5
with:
distribution: 'adopt-hotspot'
java-version: 8
cache: 'maven'
check-latest: false
+ - name: Setup JDK 17
+ id: jdk17
+ uses: actions/setup-java@v5
+ with:
+ distribution: 'temurin'
+ java-version: 17
+ cache: 'maven'
+ check-latest: false
- run: |
./dev/reformat --check
+ env:
+ JAVA_HOME_8: ${{ steps.jdk8.outputs.java-home }}
+ JAVA_HOME_17: ${{ steps.jdk17.outputs.java-home }}
diff --git a/dev/reformat b/dev/reformat
index 5a463a63..1631cb03 100755
--- a/dev/reformat
+++ b/dev/reformat
@@ -32,7 +32,95 @@ done
MODE=pre
SCALA_PROFILE=scala-2.12
-function run_maven() {
+ORIGINAL_PATH="$PATH"
+
+function java_major() {
+ # Extract Java major version (8, 11, 17, ...) from a given java command.
+ local java_cmd="$1"
+ local version
+ version="$("${java_cmd}" -version 2>&1 | awk -F '"' '/version/ {print $2;
exit}')"
+ if [[ -z "$version" ]]; then
+ return 1
+ fi
+ if [[ "$version" == 1.* ]]; then
+ echo "${version#1.}" | cut -d. -f1
+ else
+ echo "${version%%.*}"
+ fi
+}
+
+function detect_java_home() {
+ # Resolve JAVA_HOME for target major version using common env vars or macOS
helper.
+ local target="$1"
+ local java_home=""
+ local var
+
+ if [[ "$target" == "8" ]]; then
+ for var in JAVA_HOME_8 JAVA_HOME_8_X64 JAVA_HOME_1_8 JAVA_HOME_1_8_X64; do
+ if [[ -n "${!var:-}" ]]; then
+ java_home="${!var}"
+ break
+ fi
+ done
+ elif [[ "$target" == "17" ]]; then
+ for var in JAVA_HOME_17 JAVA_HOME_17_X64; do
+ if [[ -n "${!var:-}" ]]; then
+ java_home="${!var}"
+ break
+ fi
+ done
+ fi
+
+ if [[ -z "$java_home" ]] && command -v /usr/libexec/java_home >/dev/null
2>&1; then
+ if [[ "$target" == "8" ]]; then
+ java_home="$(/usr/libexec/java_home -v 1.8 2>/dev/null || true)"
+ else
+ java_home="$(/usr/libexec/java_home -v 17 2>/dev/null || true)"
+ fi
+ fi
+
+ if [[ -z "$java_home" ]] && [[ -n "${JAVA_HOME:-}" ]]; then
+ local current_major
+ if [[ -x "${JAVA_HOME}/bin/java" ]]; then
+ current_major="$(java_major "${JAVA_HOME}/bin/java" || true)"
+ else
+ current_major=""
+ fi
+ if [[ "$current_major" == "$target" ]]; then
+ java_home="$JAVA_HOME"
+ fi
+ fi
+
+ echo "$java_home"
+}
+
+function switch_jdk() {
+ # Update JAVA_HOME/PATH for the requested major version, fail fast if
missing.
+ local target="$1"
+ local java_home
+
+ java_home="$(detect_java_home "$target")"
+ if [[ -z "$java_home" ]]; then
+ echo "JDK ${target} not found. Set JAVA_HOME_${target} or install JDK
${target}." >&2
+ exit 1
+ fi
+
+ export JAVA_HOME="$java_home"
+ export PATH="$JAVA_HOME/bin:$ORIGINAL_PATH"
+}
+
+function prepare_for_spark() {
+ # Spark 4.x requires Scala 2.13 + JDK 17; Spark 3.x uses Scala 2.12 + JDK 8.
+ local sparkver="$1"
+ if [[ "$sparkver" =~ ^spark-4\. ]]; then
+ SCALA_PROFILE=scala-2.13
+ switch_jdk 17
+ else
+ SCALA_PROFILE=scala-2.12
+ switch_jdk 8
+ fi
+}
+function run_maven_format() {
if [[ "$CHECK" == "true" ]]; then
"${PROJECT_DIR}"/build/mvn spotless:check compile test-compile
scalafix:scalafix -Dscalafix.mode=CHECK -Dscalafix.skipTest=true
-DskipBuildNative -P"${MODE}" -P"${SCALA_PROFILE}" "$@"
else
@@ -40,6 +128,11 @@ function run_maven() {
fi
}
+function run_maven_compile() {
+ # Only compile/test-compile to avoid Spotless/scalafmt conflicts across
Spark versions.
+ "${PROJECT_DIR}"/build/mvn compile test-compile -DskipBuildNative
-P"${MODE}" -P"${SCALA_PROFILE}" "$@"
+}
+
# check or format the rust code
if [[ "$CHECK" == "true" ]]; then
cargo fmt --check
@@ -48,16 +141,18 @@ else
cargo fmt --all -q --
fi
-# Check or format all code, including third-party code, with spark-3.5
+# Check or format all code, including third-party code, with spark-3.5
(formatting authority).
sparkver=spark-3.5
+prepare_for_spark "${sparkver}"
for celebornver in celeborn-0.5 celeborn-0.6
do
- run_maven -P"${sparkver}" -Pceleborn,"${celebornver}" -Puniffle,uniffle-0.10
-Ppaimon,paimon-1.2 -Pflink,flink-1.18 -Piceberg,iceberg-1.9
+ run_maven_format -P"${sparkver}" -Pceleborn,"${celebornver}"
-Puniffle,uniffle-0.10 -Ppaimon,paimon-1.2 -Pflink-1.18 -Piceberg,iceberg-1.9
done
-sparkvers=(spark-3.0 spark-3.1 spark-3.2 spark-3.3 spark-3.4)
+sparkvers=(spark-3.0 spark-3.1 spark-3.2 spark-3.3 spark-3.4 spark-4.0
spark-4.1)
for sparkver in "${sparkvers[@]}"
do
- run_maven -P"${sparkver}"
+ prepare_for_spark "${sparkver}"
+ run_maven_compile -P"${sparkver}"
done