This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 64d1cc96fa9c [SPARK-46492][BUILD] Simplify the Java version check in 
`SparkBuild.scala`
64d1cc96fa9c is described below

commit 64d1cc96fa9c4bb3c437aca876d7755e2f4de223
Author: yangjie01 <[email protected]>
AuthorDate: Sat Dec 23 15:11:36 2023 -0800

    [SPARK-46492][BUILD] Simplify the Java version check in `SparkBuild.scala`
    
    ### What changes were proposed in this pull request?
    This pr aims to simplify the Java version check in `SparkBuild.scala` due 
to Apache Spark 4.0 minimum supports Java 17
    
    ### Why are the changes needed?
    Clean up no longer needed Java version checks.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44465 from LuciferYang/java-version-check.
    
    Lead-authored-by: yangjie01 <[email protected]>
    Co-authored-by: YangJie <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 project/SparkBuild.scala | 18 +++++-------------
 1 file changed, 5 insertions(+), 13 deletions(-)

diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 75d3a2cf18df..db546dcdd5bb 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -294,21 +294,16 @@ object SparkBuild extends PomBuild {
 
     javaOptions ++= {
       val versionParts = System.getProperty("java.version").split("[+.\\-]+", 
3)
-      var major = versionParts(0).toInt
+      val major = versionParts(0).toInt
       if (major >= 21) {
         Seq("--add-modules=jdk.incubator.vector", "-Dforeign.restricted=warn")
-      } else if (major >= 16) {
-        Seq("--add-modules=jdk.incubator.vector,jdk.incubator.foreign", 
"-Dforeign.restricted=warn")
       } else {
-        Seq.empty
+        Seq("--add-modules=jdk.incubator.vector,jdk.incubator.foreign", 
"-Dforeign.restricted=warn")
       }
     },
 
     (Compile / doc / javacOptions) ++= {
-      val versionParts = System.getProperty("java.version").split("[+.\\-]+", 
3)
-      var major = versionParts(0).toInt
-      if (major == 1) major = versionParts(1).toInt
-      if (major >= 8) Seq("-Xdoclint:all", "-Xdoclint:-missing") else Seq.empty
+      Seq("-Xdoclint:all", "-Xdoclint:-missing")
     },
 
     javaVersion := 
SbtPomKeys.effectivePom.value.getProperties.get("java.version").asInstanceOf[String],
@@ -1412,10 +1407,6 @@ object Unidoc {
     },
 
     (JavaUnidoc / unidoc / javacOptions) := {
-      val versionParts = System.getProperty("java.version").split("[+.\\-]+", 
3)
-      var major = versionParts(0).toInt
-      if (major == 1) major = versionParts(1).toInt
-
       Seq(
         "-windowtitle", "Spark " + version.value.replaceAll("-SNAPSHOT", "") + 
" JavaDoc",
         "-public",
@@ -1427,7 +1418,8 @@ object Unidoc {
         "-tag", "constructor:X",
         "-tag", "todo:X",
         "-tag", "groupname:X",
-      ) ++ { if (major >= 9) Seq("--ignore-source-errors", "-notree") else 
Seq.empty }
+        "--ignore-source-errors", "-notree"
+      )
     },
 
     // Use GitHub repository for Scaladoc source links


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to