Updated Branches: refs/heads/master 2a05403a7 -> 59f475c79
Workers should use working directory as spark home if it's not specified Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/00a3f7ee Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/00a3f7ee Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/00a3f7ee Branch: refs/heads/master Commit: 00a3f7eec584fa52266ff0fe3e85bbe7603183a6 Parents: 3fcc68b Author: Patrick Wendell <pwend...@gmail.com> Authored: Wed Jan 15 10:58:02 2014 -0800 Committer: Patrick Wendell <pwend...@gmail.com> Committed: Wed Jan 15 11:05:36 2014 -0800 ---------------------------------------------------------------------- core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/00a3f7ee/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 5182dcb..312560d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -209,8 +209,11 @@ private[spark] class Worker( logWarning("Invalid Master (" + masterUrl + ") attempted to launch executor.") } else { logInfo("Asked to launch executor %s/%d for %s".format(appId, execId, appDesc.name)) + // TODO (pwendell): We shuld make sparkHome an Option[String] in + // ApplicationDescription to be more explicit about this. + val effectiveSparkHome = Option(execSparkHome_).getOrElse(sparkHome.getAbsolutePath) val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_, - self, workerId, host, new File(execSparkHome_), workDir, akkaUrl, ExecutorState.RUNNING) + self, workerId, host, new File(effectiveSparkHome), workDir, akkaUrl, ExecutorState.RUNNING) executors(appId + "/" + execId) = manager manager.start() coresUsed += cores_