Github user mateiz commented on a diff in the pull request:
https://github.com/apache/spark/pull/664#discussion_r12315419
--- Diff: core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala ---
@@ -130,50 +145,51 @@ object SparkSubmit {
childArgs += ("--class", appArgs.mainClass)
}
+ // Make sure YARN is included in our build if we're trying to use it
if (clusterManager == YARN) {
- // The choice of class is arbitrary, could use any spark-yarn class
if (!Utils.classIsLoadable("org.apache.spark.deploy.yarn.Client") &&
!Utils.isTesting) {
- val msg = "Could not load YARN classes. This copy of Spark may not
have been compiled " +
- "with YARN support."
- throw new Exception(msg)
+ printErrorAndExit("Could not load YARN classes. " +
+ "This copy of Spark may not have been compiled with YARN
support.")
}
}
// Special flag to avoid deprecation warnings at the client
sysProps("SPARK_SUBMIT") = "true"
+ // A list of rules to map each argument to system properties or
command-line options in
+ // each deploy mode; we iterate through these below
val options = List[OptionAssigner](
- new OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp
= "spark.master"),
- new OptionAssigner(appArgs.driverExtraClassPath, STANDALONE | YARN,
true,
+ OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp =
"spark.master"),
+ OptionAssigner(appArgs.driverExtraClassPath, STANDALONE | YARN, true,
sysProp = "spark.driver.extraClassPath"),
- new OptionAssigner(appArgs.driverExtraJavaOptions, STANDALONE |
YARN, true,
+ OptionAssigner(appArgs.driverExtraJavaOptions, STANDALONE | YARN,
true,
sysProp = "spark.driver.extraJavaOptions"),
- new OptionAssigner(appArgs.driverExtraLibraryPath, STANDALONE |
YARN, true,
+ OptionAssigner(appArgs.driverExtraLibraryPath, STANDALONE | YARN,
true,
sysProp = "spark.driver.extraLibraryPath"),
- new OptionAssigner(appArgs.driverMemory, YARN, true, clOption =
"--driver-memory"),
- new OptionAssigner(appArgs.name, YARN, true, clOption = "--name"),
- new OptionAssigner(appArgs.queue, YARN, true, clOption = "--queue"),
- new OptionAssigner(appArgs.queue, YARN, false, sysProp =
"spark.yarn.queue"),
- new OptionAssigner(appArgs.numExecutors, YARN, true, clOption =
"--num-executors"),
- new OptionAssigner(appArgs.numExecutors, YARN, false, sysProp =
"spark.executor.instances"),
- new OptionAssigner(appArgs.executorMemory, YARN, true, clOption =
"--executor-memory"),
- new OptionAssigner(appArgs.executorMemory, STANDALONE | MESOS |
YARN, false,
+ OptionAssigner(appArgs.driverMemory, YARN, true, clOption =
"--driver-memory"),
+ OptionAssigner(appArgs.name, YARN, true, clOption = "--name"),
+ OptionAssigner(appArgs.queue, YARN, true, clOption = "--queue"),
+ OptionAssigner(appArgs.queue, YARN, false, sysProp =
"spark.yarn.queue"),
+ OptionAssigner(appArgs.numExecutors, YARN, true, clOption =
"--num-executors"),
+ OptionAssigner(appArgs.numExecutors, YARN, false, sysProp =
"spark.executor.instances"),
+ OptionAssigner(appArgs.executorMemory, YARN, true, clOption =
"--executor-memory"),
+ OptionAssigner(appArgs.executorMemory, STANDALONE | MESOS | YARN,
false,
sysProp = "spark.executor.memory"),
- new OptionAssigner(appArgs.driverMemory, STANDALONE, true, clOption
= "--memory"),
- new OptionAssigner(appArgs.driverCores, STANDALONE, true, clOption =
"--cores"),
- new OptionAssigner(appArgs.executorCores, YARN, true, clOption =
"--executor-cores"),
- new OptionAssigner(appArgs.executorCores, YARN, false, sysProp =
"spark.executor.cores"),
- new OptionAssigner(appArgs.totalExecutorCores, STANDALONE | MESOS,
false,
+ OptionAssigner(appArgs.driverMemory, STANDALONE, true, clOption =
"--memory"),
+ OptionAssigner(appArgs.driverCores, STANDALONE, true, clOption =
"--cores"),
+ OptionAssigner(appArgs.executorCores, YARN, true, clOption =
"--executor-cores"),
+ OptionAssigner(appArgs.executorCores, YARN, false, sysProp =
"spark.executor.cores"),
+ OptionAssigner(appArgs.totalExecutorCores, STANDALONE | MESOS, false,
sysProp = "spark.cores.max"),
- new OptionAssigner(appArgs.files, YARN, false, sysProp =
"spark.yarn.dist.files"),
- new OptionAssigner(appArgs.files, YARN, true, clOption = "--files"),
- new OptionAssigner(appArgs.archives, YARN, false, sysProp =
"spark.yarn.dist.archives"),
- new OptionAssigner(appArgs.archives, YARN, true, clOption =
"--archives"),
- new OptionAssigner(appArgs.jars, YARN, true, clOption = "--addJars"),
- new OptionAssigner(appArgs.files, LOCAL | STANDALONE | MESOS, true,
sysProp = "spark.files"),
- new OptionAssigner(appArgs.jars, LOCAL | STANDALONE | MESOS, false,
sysProp = "spark.jars"),
- new OptionAssigner(appArgs.name, LOCAL | STANDALONE | MESOS, false,
- sysProp = "spark.app.name")
+ OptionAssigner(appArgs.files, YARN, false, sysProp =
"spark.yarn.dist.files"),
+ OptionAssigner(appArgs.files, YARN, true, clOption = "--files"),
+ OptionAssigner(appArgs.archives, YARN, false, sysProp =
"spark.yarn.dist.archives"),
+ OptionAssigner(appArgs.archives, YARN, true, clOption =
"--archives"),
+ OptionAssigner(appArgs.jars, YARN, true, clOption = "--addJars"),
+ OptionAssigner(appArgs.files, LOCAL | STANDALONE | MESOS, false,
sysProp = "spark.files"),
--- End diff --
I added this line to set spark.files even when running in client mode --
not sure why it wasn't set before, but it's the correct thing to do.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---