This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4fc940e8dd1a [SPARK-55831][FOLLOWUP] Fix Maven CI by replacing with
dummy SPARK_HOME in test
4fc940e8dd1a is described below
commit 4fc940e8dd1af0fe611cfad98131b8400f92086b
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Mar 9 11:27:42 2026 +0800
[SPARK-55831][FOLLOWUP] Fix Maven CI by replacing with dummy SPARK_HOME in
test
### What changes were proposed in this pull request?
Create a dummy SPARK_HOME for test, so that the YARN test won't depend on
`assembly` package.
### Why are the changes needed?
```
- YARN AM JavaOptions *** FAILED ***
java.lang.IllegalStateException: Library directory
'/home/runner/work/spark/spark/assembly/target/scala-2.13/jars' does not exist;
make sure Spark is built.
at
org.apache.spark.launcher.CommandBuilderUtils.checkState(CommandBuilderUtils.java:230)
at
org.apache.spark.launcher.CommandBuilderUtils.findJarsDir(CommandBuilderUtils.java:320)
at
org.apache.spark.launcher.YarnCommandBuilderUtils$.findJarsDir(YarnCommandBuilderUtils.scala:36)
at
org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:711)
at
org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:529)
at
org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1042)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:569)
...
```
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass both Maven and SBT CI.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #54651 from pan3793/SPARK-55831-followup.
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.../org/apache/spark/deploy/yarn/ClientSuite.scala | 113 +++++++++++----------
1 file changed, 59 insertions(+), 54 deletions(-)
diff --git
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index 918cb790bdc9..29e5cecb3179 100644
---
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.deploy.yarn
import java.io.{File, FileInputStream, FileNotFoundException, FileOutputStream}
import java.net.URI
-import java.nio.file.Paths
+import java.nio.file.{Files, Paths}
import java.util.Properties
import java.util.concurrent.ConcurrentHashMap
@@ -760,59 +760,64 @@ class ClientSuite extends SparkFunSuite
test("YARN AM JavaOptions") {
Seq("client", "cluster").foreach { deployMode =>
- withTempDir { stagingDir =>
- val sparkConf = new SparkConfWithEnv(
- Map("SPARK_HOME" -> System.getProperty("spark.test.home")))
- .set(SUBMIT_DEPLOY_MODE, deployMode)
- .set(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS, "-Dx=1 -Dy=2")
- .set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dz=3")
- .set(AM_DEFAULT_JAVA_OPTIONS, "-Da=1 -Db=2")
- .set(AM_JAVA_OPTIONS, "-Dc=3")
-
- val client = createClient(sparkConf)
- val appIdField = classOf[Client]
- .getDeclaredField("org$apache$spark$deploy$yarn$Client$$appId")
- appIdField.setAccessible(true)
- // A dummy ApplicationId impl, only `toString` method will be called
- // in Client.createContainerLaunchContext
- appIdField.set(client, new ApplicationId {
- override def getId: Int = 1
- override def setId(i: Int): Unit = {}
- override def getClusterTimestamp: Long = 1770077136288L
- override def setClusterTimestamp(l: Long): Unit = {}
- override def build(): Unit = {}
- override def toString: String = "application_1770077136288_0001"
- })
- val stagingDirPathField = classOf[Client]
-
.getDeclaredField("org$apache$spark$deploy$yarn$Client$$stagingDirPath")
- stagingDirPathField.setAccessible(true)
- stagingDirPathField.set(client, new Path(stagingDir.getAbsolutePath))
- val _createContainerLaunchContext =
-
PrivateMethod[ContainerLaunchContext](Symbol("createContainerLaunchContext"))
- val containerLaunchContext = client invokePrivate
_createContainerLaunchContext()
-
- val commands = containerLaunchContext.getCommands.asScala
- deployMode match {
- case "client" =>
- // In client mode, spark.yarn.am.defaultJavaOptions and
spark.yarn.am.extraJavaOptions
- // should be set in AM container command JAVA_OPTIONS
- commands should contain("'-Da=1'")
- commands should contain("'-Db=2'")
- commands should contain("'-Dc=3'")
- commands should not contain "'-Dx=1'"
- commands should not contain "'-Dy=2'"
- commands should not contain "'-Dz=3'"
- case "cluster" =>
- // In cluster mode, spark.driver.defaultJavaOptions and
spark.driver.extraJavaOptions
- // should be set in AM container command JAVA_OPTIONS
- commands should not contain "'-Da=1'"
- commands should not contain "'-Db=2'"
- commands should not contain "'-Dc=3'"
- commands should contain ("'-Dx=1'")
- commands should contain ("'-Dy=2'")
- commands should contain ("'-Dz=3'")
- case m =>
- fail(s"Unexpected deploy mode: $m")
+ withTempDir { sparkHome =>
+ // Create jars dir and RELEASE file to avoid IllegalStateException.
+ Files.createDirectory(Paths.get(sparkHome.getPath, "jars"))
+ Files.createFile(Paths.get(sparkHome.getPath, "RELEASE"))
+
+ withTempDir { stagingDir =>
+ val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" ->
sparkHome.getAbsolutePath))
+ .set(SUBMIT_DEPLOY_MODE, deployMode)
+ .set(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS, "-Dx=1 -Dy=2")
+ .set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dz=3")
+ .set(AM_DEFAULT_JAVA_OPTIONS, "-Da=1 -Db=2")
+ .set(AM_JAVA_OPTIONS, "-Dc=3")
+
+ val client = createClient(sparkConf)
+ val appIdField = classOf[Client]
+ .getDeclaredField("org$apache$spark$deploy$yarn$Client$$appId")
+ appIdField.setAccessible(true)
+ // A dummy ApplicationId impl, only `toString` method will be called
+ // in Client.createContainerLaunchContext
+ appIdField.set(client, new ApplicationId {
+ override def getId: Int = 1
+ override def setId(i: Int): Unit = {}
+ override def getClusterTimestamp: Long = 1770077136288L
+ override def setClusterTimestamp(l: Long): Unit = {}
+ override def build(): Unit = {}
+ override def toString: String = "application_1770077136288_0001"
+ })
+ val stagingDirPathField = classOf[Client]
+
.getDeclaredField("org$apache$spark$deploy$yarn$Client$$stagingDirPath")
+ stagingDirPathField.setAccessible(true)
+ stagingDirPathField.set(client, new Path(stagingDir.getAbsolutePath))
+ val _createContainerLaunchContext =
+
PrivateMethod[ContainerLaunchContext](Symbol("createContainerLaunchContext"))
+ val containerLaunchContext = client invokePrivate
_createContainerLaunchContext()
+
+ val commands = containerLaunchContext.getCommands.asScala
+ deployMode match {
+ case "client" =>
+ // In client mode, spark.yarn.am.defaultJavaOptions and
spark.yarn.am.extraJavaOptions
+ // should be set in AM container command JAVA_OPTIONS
+ commands should contain("'-Da=1'")
+ commands should contain("'-Db=2'")
+ commands should contain("'-Dc=3'")
+ commands should not contain "'-Dx=1'"
+ commands should not contain "'-Dy=2'"
+ commands should not contain "'-Dz=3'"
+ case "cluster" =>
+ // In cluster mode, spark.driver.defaultJavaOptions and
spark.driver.extraJavaOptions
+ // should be set in AM container command JAVA_OPTIONS
+ commands should not contain "'-Da=1'"
+ commands should not contain "'-Db=2'"
+ commands should not contain "'-Dc=3'"
+ commands should contain ("'-Dx=1'")
+ commands should contain ("'-Dy=2'")
+ commands should contain ("'-Dz=3'")
+ case m =>
+ fail(s"Unexpected deploy mode: $m")
+ }
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]