Repository: spark
Updated Branches:
  refs/heads/branch-1.5 dc058f2ff -> 8fb6696cd


[SPARK-11555] spark on yarn spark-class --num-workers doesn't work

I tested the various options with both spark-submit and spark-class of 
specifying number of executors in both client and cluster mode where it applied.

--num-workers, --num-executors, spark.executor.instances, 
SPARK_EXECUTOR_INSTANCES, default nothing supplied

Author: Thomas Graves <tgra...@staydecay.corp.gq1.yahoo.com>

Closes #9523 from tgravescs/SPARK-11555.

(cherry picked from commit f6680cdc5d2912dea9768ef5c3e2cc101b06daf8)
Signed-off-by: Marcelo Vanzin <van...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8fb6696c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8fb6696c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8fb6696c

Branch: refs/heads/branch-1.5
Commit: 8fb6696cd6e066957e34e1123df5691d8e4682d2
Parents: dc058f2
Author: Thomas Graves <tgra...@staydecay.corp.gq1.yahoo.com>
Authored: Fri Nov 6 15:24:33 2015 -0800
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Fri Nov 6 15:24:58 2015 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/deploy/yarn/ClientArguments.scala  | 2 +-
 .../org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala    | 7 +++++--
 2 files changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/8fb6696c/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
index 1165061..a9f4374 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
@@ -81,7 +81,7 @@ private[spark] class ClientArguments(args: Array[String], 
sparkConf: SparkConf)
       .orNull
     // If dynamic allocation is enabled, start at the configured initial 
number of executors.
     // Default to minExecutors if no initialExecutors is set.
-    numExecutors = 
YarnSparkHadoopUtil.getInitialTargetExecutorNumber(sparkConf)
+    numExecutors = 
YarnSparkHadoopUtil.getInitialTargetExecutorNumber(sparkConf, numExecutors)
     principal = Option(principal)
       .orElse(sparkConf.getOption("spark.yarn.principal"))
       .orNull

http://git-wip-us.apache.org/repos/asf/spark/blob/8fb6696c/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
index 991c909..73023d7 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
@@ -355,8 +355,11 @@ object YarnSparkHadoopUtil {
   /**
    * Getting the initial target number of executors depends on whether dynamic 
allocation is
    * enabled.
+   * If not using dynamic allocation it gets the number of executors reqeusted 
by the user.
    */
-  def getInitialTargetExecutorNumber(conf: SparkConf): Int = {
+  def getInitialTargetExecutorNumber(
+      conf: SparkConf,
+      numExecutors: Int = DEFAULT_NUMBER_EXECUTORS): Int = {
     if (Utils.isDynamicAllocationEnabled(conf)) {
       val minNumExecutors = 
conf.getInt("spark.dynamicAllocation.minExecutors", 0)
       val initialNumExecutors =
@@ -369,7 +372,7 @@ object YarnSparkHadoopUtil {
       initialNumExecutors
     } else {
       val targetNumExecutors =
-        
sys.env.get("SPARK_EXECUTOR_INSTANCES").map(_.toInt).getOrElse(DEFAULT_NUMBER_EXECUTORS)
+        
sys.env.get("SPARK_EXECUTOR_INSTANCES").map(_.toInt).getOrElse(numExecutors)
       // System property can override environment variable.
       conf.getInt("spark.executor.instances", targetNumExecutors)
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to