This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch update-release-infra-branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git

commit 0e99202ed7b1be477f14ded121ef85b3b3097629
Author: Wenchen Fan <[email protected]>
AuthorDate: Wed Jan 21 17:45:55 2026 +0800

    [SPARK-XXXXX] Auto-detect Java version in release scripts
    
    ### What changes were proposed in this pull request?
    
    Updated the `init_java` function in `release-util.sh` to auto-detect and 
set the appropriate Java version based on the Spark version being built:
    - Spark < 4.0: Uses Java 8
    - Spark >= 4.0: Uses Java 17
    
    ### Why are the changes needed?
    
    This allows a single Docker image (with multiple Java versions installed) 
to build all Spark versions. Combined with updates to the master branch's 
Dockerfile to install both Java 8 and 17, this eliminates the need to maintain 
separate Dockerfiles for each branch.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Manual testing by building releases with the updated scripts.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Yes.
---
 dev/create-release/release-util.sh | 41 +++++++++++++++++++++++++++++++++++++-
 1 file changed, 40 insertions(+), 1 deletion(-)

diff --git a/dev/create-release/release-util.sh 
b/dev/create-release/release-util.sh
index acdecadeee33..ba9420419d7a 100755
--- a/dev/create-release/release-util.sh
+++ b/dev/create-release/release-util.sh
@@ -229,13 +229,52 @@ function is_dry_run {
   [[ $DRY_RUN = 1 ]]
 }
 
-# Initializes JAVA_VERSION to the version of the JVM in use.
+# Initializes JAVA_HOME and JAVA_VERSION based on Spark version.
+# For Spark 4.0+, use Java 17. For earlier versions, use Java 8.
+# This allows a single Docker image (with multiple Java versions) to build all 
Spark versions.
 function init_java {
+  # If JAVA_HOME is already set externally (e.g., via -j flag), use it
+  if [ -n "$JAVA_HOME" ] && [ -d "$JAVA_HOME" ]; then
+    echo "Using provided JAVA_HOME: $JAVA_HOME"
+  else
+    # Auto-detect appropriate Java version based on RELEASE_VERSION
+    # Spark 4.0+ requires Java 17, earlier versions use Java 8
+    if [[ -z "$RELEASE_VERSION" ]]; then
+      error "RELEASE_VERSION is not set. Cannot determine Java version."
+    fi
+
+    if [[ "$RELEASE_VERSION" < "4.0" ]]; then
+      echo "Detected Spark version $RELEASE_VERSION - using Java 8"
+      if [ -n "$JAVA8_HOME" ] && [ -d "$JAVA8_HOME" ]; then
+        export JAVA_HOME="$JAVA8_HOME"
+      elif [ -d "/usr/lib/jvm/java-8-openjdk-amd64" ]; then
+        export JAVA_HOME="/usr/lib/jvm/java-8-openjdk-amd64"
+      elif [ -d "/usr/lib/jvm/java-8-openjdk-arm64" ]; then
+        export JAVA_HOME="/usr/lib/jvm/java-8-openjdk-arm64"
+      else
+        error "Java 8 is required for Spark $RELEASE_VERSION but not found."
+      fi
+    else
+      echo "Detected Spark version $RELEASE_VERSION - using Java 17"
+      if [ -n "$JAVA17_HOME" ] && [ -d "$JAVA17_HOME" ]; then
+        export JAVA_HOME="$JAVA17_HOME"
+      elif [ -d "/usr/lib/jvm/java-17-openjdk-amd64" ]; then
+        export JAVA_HOME="/usr/lib/jvm/java-17-openjdk-amd64"
+      elif [ -d "/usr/lib/jvm/java-17-openjdk-arm64" ]; then
+        export JAVA_HOME="/usr/lib/jvm/java-17-openjdk-arm64"
+      else
+        error "Java 17 is required for Spark $RELEASE_VERSION but not found."
+      fi
+    fi
+  fi
+
   if [ -z "$JAVA_HOME" ]; then
     error "JAVA_HOME is not set."
   fi
+
   JAVA_VERSION=$("${JAVA_HOME}"/bin/javac -version 2>&1 | cut -d " " -f 2)
   export JAVA_VERSION
+  echo "Using Java version: $JAVA_VERSION (JAVA_HOME=$JAVA_HOME)"
 }
 
 # Initializes MVN_EXTRA_OPTS and SBT_OPTS depending on the JAVA_VERSION in 
use. Requires init_java.


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to