This is an automated email from the ASF dual-hosted git repository.
ggal pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-livy.git
The following commit(s) were added to refs/heads/master by this push:
new f3e23ed0b [LIVY-1017] Support Java 17, set up JDK 17 based tests
f3e23ed0b is described below
commit f3e23ed0b34da9d44ec06c724d5d03c7cf15aebe
Author: György Gál <[email protected]>
AuthorDate: Wed Oct 22 10:29:20 2025 -0400
[LIVY-1017] Support Java 17, set up JDK 17 based tests
## What changes were proposed in this pull request?
Enable Livy to run with Java 17, set up unit test and integration test runs
with JDK 17.
## How was this patch tested?
Unit and integration tests.
---
.github/workflows/integration-tests.yaml | 8 ++++++++
.github/workflows/unit-tests.yaml | 10 +++++++++-
bin/livy-server | 15 ++++++++++++++
dev/docker/livy-dev-base/Dockerfile | 1 +
dev/spark/bin/spark-submit | 23 +++++++++++++++++++++-
.../apache/livy/test/framework/MiniCluster.scala | 13 +++++++++---
pom.xml | 22 ++++++++++++++++++---
7 files changed, 84 insertions(+), 8 deletions(-)
diff --git a/.github/workflows/integration-tests.yaml
b/.github/workflows/integration-tests.yaml
index 1dafec6ec..754f26f58 100644
--- a/.github/workflows/integration-tests.yaml
+++ b/.github/workflows/integration-tests.yaml
@@ -31,6 +31,11 @@ jobs:
maven_profile:
- "-Pscala-2.11 -Pspark2 -DskipRTests"
- "-Pscala-2.12 -Pspark3"
+ jdk_path:
+ - "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java"
+ include:
+ - maven_profile: "-Pscala-2.12 -Pspark3"
+ jdk_path: "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
steps:
-
name: Checkout
@@ -50,6 +55,9 @@ jobs:
if: ${{ contains(matrix.maven_profile, 'spark3') }}
# This can be removed once support for Python 2 and Spark 2 is removed
and the default python executable is python3
run: pyenv global 3 && echo "PYSPARK_PYTHON=$(which python3)" >>
"$GITHUB_ENV"
+ -
+ name: Set JDK version
+ run: update-alternatives --set java ${{ matrix.jdk_path }}
-
name: Build with Maven
run: mvn -Pthriftserver ${{ matrix.maven_profile }} -DskipTests
-Dmaven.javadoc.skip=true -B -V -e verify
diff --git a/.github/workflows/unit-tests.yaml
b/.github/workflows/unit-tests.yaml
index afddbf5d8..f946fd97c 100644
--- a/.github/workflows/unit-tests.yaml
+++ b/.github/workflows/unit-tests.yaml
@@ -30,6 +30,11 @@ jobs:
- "-Pscala-2.11 -Pspark2"
- "-Pscala-2.12 -Pspark2"
- "-Pscala-2.12 -Pspark3"
+ jdk_path:
+ - "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java"
+ include:
+ - maven_profile: "-Pscala-2.12 -Pspark3"
+ jdk_path: "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
steps:
-
name: Checkout
@@ -44,7 +49,10 @@ jobs:
key: ${{ runner.os }}-maven-${{ hashFiles('pom.xml', '*/pom.xml',
'thriftserver/*/pom.xml', 'core/*/pom.xml', 'repl/*/pom.xml',
'scala-api/*/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- -
+ -
+ name: Set JDK version
+ run: update-alternatives --set java ${{ matrix.jdk_path }}
+ -
name: Build with Maven
run: mvn -Pthriftserver ${{ matrix.maven_profile }} -DskipITs
-Dmaven.javadoc.skip=true -B -V -e verify
-
diff --git a/bin/livy-server b/bin/livy-server
index a0e2fb7e2..b463423dc 100755
--- a/bin/livy-server
+++ b/bin/livy-server
@@ -102,6 +102,21 @@ start_livy_server() {
LIVY_CLASSPATH="$LIVY_CLASSPATH:$YARN_CONF_DIR"
fi
+ # Required for Java 17 support
+ LIVY_SERVER_JAVA_OPTS+=" -XX:+IgnoreUnrecognizedVMOptions"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/java.lang=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/java.nio=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+="
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/sun.nio.ch=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/java.net=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/java.io=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-exports=java.base/sun.net.dns=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+=" --add-exports=java.base/sun.net.util=ALL-UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+="
--add-exports=java.base/sun.security.x509=ALL_UNNAMED"
+ LIVY_SERVER_JAVA_OPTS+="
--add-exports=java.base/sun.security.util=ALL-UNNAMED
+
command="$RUNNER $LIVY_SERVER_JAVA_OPTS -cp $LIVY_CLASSPATH:$CLASSPATH
org.apache.livy.server.LivyServer"
if [ $1 = "old" ]; then
diff --git a/dev/docker/livy-dev-base/Dockerfile
b/dev/docker/livy-dev-base/Dockerfile
index 25da08725..07711a5f5 100644
--- a/dev/docker/livy-dev-base/Dockerfile
+++ b/dev/docker/livy-dev-base/Dockerfile
@@ -35,6 +35,7 @@ RUN apt-get install -qq \
git \
libkrb5-dev \
openjdk-8-jdk-headless \
+ openjdk-17-jdk-headless \
r-base \
maven \
software-properties-common \
diff --git a/dev/spark/bin/spark-submit b/dev/spark/bin/spark-submit
index 7d78f032b..f3883cc37 100755
--- a/dev/spark/bin/spark-submit
+++ b/dev/spark/bin/spark-submit
@@ -82,5 +82,26 @@ if [ -n "$HADOOP_CONF_DIR" ]; then
DRIVER_CP="$HADOOP_CONF_DIR:$DRIVER_CP"
fi
-echo "Running Spark: " $JAVA_HOME/bin/java $DRIVER_OPTS
org.apache.spark.deploy.SparkSubmit "$@" >&2
+# Taken from Spark's extraJavaTestArgs / JavaModuleOptions
+# Since we are skipping SparkContext.supplementJavaModuleOptions(), we need to
add these
+DRIVER_OPTS+=" -XX:+IgnoreUnrecognizedVMOptions"
+DRIVER_OPTS+=" --add-opens=java.base/java.lang=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.io=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.net=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.nio=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.util=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.util.concurrent=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/sun.nio.ch=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/sun.nio.cs=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/sun.security.action=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
+DRIVER_OPTS+=" -Djdk.reflect.useDirectMethodHandle=false"
+DRIVER_OPTS+=" --add-exports=java.base/sun.net.dns=ALL-UNNAMED"
+DRIVER_OPTS+=" --add-exports=java.base/sun.net.util=ALL-UNNAMED"
+
+echo "Running Spark: " $JAVA_HOME/bin/java $DRIVER_OPTS -cp "$DRIVER_CP"
org.apache.spark.deploy.SparkSubmit "$@" >&2
exec $JAVA_HOME/bin/java $DRIVER_OPTS -cp "$DRIVER_CP"
org.apache.spark.deploy.SparkSubmit "$@"
diff --git
a/integration-test/src/main/scala/org/apache/livy/test/framework/MiniCluster.scala
b/integration-test/src/main/scala/org/apache/livy/test/framework/MiniCluster.scala
index e5b40871e..f1d51671a 100644
---
a/integration-test/src/main/scala/org/apache/livy/test/framework/MiniCluster.scala
+++
b/integration-test/src/main/scala/org/apache/livy/test/framework/MiniCluster.scala
@@ -220,6 +220,11 @@ class MiniCluster(config: Map[String, String]) extends
Cluster with MiniClusterU
filtered.mkString(File.pathSeparator)
}
+ private def extraJavaTestArgs: Seq[String] = {
+ Option(System.getProperty("extraJavaTestArgs"))
+ .map(_.split("\\s+").toSeq).getOrElse(Nil)
+ }
+
override def deploy(): Unit = {
if (_tempDir.exists()) {
FileUtils.deleteQuietly(_tempDir)
@@ -240,8 +245,10 @@ class MiniCluster(config: Map[String, String]) extends
Cluster with MiniClusterU
_configDir = mkdir("hadoop-conf")
saveProperties(config, new File(configDir, "cluster.conf"))
- hdfs = Some(start(MiniHdfsMain.getClass, new File(configDir,
"core-site.xml")))
- yarn = Some(start(MiniYarnMain.getClass, new File(configDir,
"yarn-site.xml")))
+ hdfs = Some(start(MiniHdfsMain.getClass, new File(configDir,
"core-site.xml"),
+ extraJavaTestArgs))
+ yarn = Some(start(MiniYarnMain.getClass, new File(configDir,
"yarn-site.xml"),
+ extraJavaTestArgs))
runLivy()
_hdfsScrathDir = fs.makeQualified(new Path("/"))
@@ -261,7 +268,7 @@ class MiniCluster(config: Map[String, String]) extends
Cluster with MiniClusterU
.map { args =>
Seq(args, s"-Djacoco.args=$args")
}.getOrElse(Nil)
- val localLivy = start(MiniLivyMain.getClass, confFile, extraJavaArgs =
jacocoArgs)
+ val localLivy = start(MiniLivyMain.getClass, confFile, jacocoArgs ++
extraJavaTestArgs)
val props = loadProperties(confFile)
_livyEndpoint = config.getOrElse("livyEndpoint",
props("livy.server.server-url"))
diff --git a/pom.xml b/pom.xml
index a88e449fa..e8588f9a6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -106,6 +106,21 @@
<scalatest.version>3.0.8</scalatest.version>
<scalatra.version>2.6.5</scalatra.version>
<java.version>1.8</java.version>
+ <extraJavaTestArgs>
+ -XX:+IgnoreUnrecognizedVMOptions
+ --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
+ --add-opens=java.base/java.lang=ALL-UNNAMED
+ --add-opens=java.base/java.nio=ALL-UNNAMED
+ --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
+ --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
+ --add-opens=java.base/java.net=ALL-UNNAMED
+ --add-opens=java.base/java.io=ALL-UNNAMED
+ --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
+ --add-exports=java.base/sun.net.dns=ALL-UNNAMED
+ --add-exports=java.base/sun.net.util=ALL-UNNAMED
+ --add-exports=java.base/sun.security.x509=ALL-UNNAMED
+ --add-exports=java.base/sun.security.util=ALL-UNNAMED
+ </extraJavaTestArgs>
<test.redirectToFile>true</test.redirectToFile>
<execution.root>${user.dir}</execution.root>
<spark.home>${execution.root}/dev/spark</spark.home>
@@ -837,7 +852,7 @@
</systemProperties>
<redirectTestOutputToFile>${test.redirectToFile}</redirectTestOutputToFile>
<useFile>${test.redirectToFile}</useFile>
- <argLine>${argLine} -Xmx2g</argLine>
+ <argLine>${argLine} -Xmx2g ${extraJavaTestArgs}</argLine>
<failIfNoTests>false</failIfNoTests>
</configuration>
</plugin>
@@ -845,7 +860,7 @@
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
- <version>2.0.0</version>
+ <version>2.2.0</version>
<configuration>
<environmentVariables>
<LIVY_TEST>true</LIVY_TEST>
@@ -864,12 +879,13 @@
<skipRTests>${skipRTests}</skipRTests>
<skipPySpark2Tests>${skipPySpark2Tests}</skipPySpark2Tests>
<skipPySpark3Tests>${skipPySpark3Tests}</skipPySpark3Tests>
+ <extraJavaTestArgs>${extraJavaTestArgs}</extraJavaTestArgs>
</systemProperties>
<stdout>D</stdout>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
- <argLine>${argLine} -Xmx2g</argLine>
+ <argLine>${argLine} -Xmx2g ${extraJavaTestArgs}</argLine>
</configuration>
<executions>
<execution>