Repository: spark
Updated Branches:
  refs/heads/branch-1.0 9754d1b12 -> 5f48721ec


SPARK-1588.  Restore SPARK_YARN_USER_ENV and SPARK_JAVA_OPTS for YARN.

Author: Sandy Ryza <sa...@cloudera.com>

Closes #586 from sryza/sandy-spark-1588 and squashes the following commits:

35eb38e [Sandy Ryza] Scalify
b361684 [Sandy Ryza] SPARK-1588.  Restore SPARK_YARN_USER_ENV and 
SPARK_JAVA_OPTS for YARN.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5f48721e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5f48721e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5f48721e

Branch: refs/heads/branch-1.0
Commit: 5f48721ec29284b0b9527b2650cf8889c46ec0a0
Parents: 9754d1b
Author: Sandy Ryza <sa...@cloudera.com>
Authored: Tue Apr 29 12:54:02 2014 -0700
Committer: Patrick Wendell <pwend...@gmail.com>
Committed: Tue Apr 29 12:54:56 2014 -0700

----------------------------------------------------------------------
 .../org/apache/spark/deploy/yarn/ClientBase.scala   | 16 +++++++++++++---
 .../spark/deploy/yarn/ExecutorRunnableUtil.scala    |  4 ++--
 2 files changed, 15 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5f48721e/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
----------------------------------------------------------------------
diff --git 
a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala 
b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index f2be821..27a518c 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -263,9 +263,13 @@ trait ClientBase extends Logging {
     distCacheMgr.setDistFilesEnv(env)
     distCacheMgr.setDistArchivesEnv(env)
 
-    // Allow users to specify some environment variables.
-    YarnSparkHadoopUtil.setEnvFromInputString(env, 
System.getenv("SPARK_YARN_USER_ENV"),
-      File.pathSeparator)
+    sys.env.get("SPARK_YARN_USER_ENV").foreach { userEnvs =>
+      // Allow users to specify some environment variables.
+      YarnSparkHadoopUtil.setEnvFromInputString(env, userEnvs, 
File.pathSeparator)
+
+      // Pass SPARK_YARN_USER_ENV itself to the AM so it can use it to set up 
executor environments.
+      env("SPARK_YARN_USER_ENV") = userEnvs
+    }
 
     env
   }
@@ -322,6 +326,12 @@ trait ClientBase extends Logging {
       JAVA_OPTS += "-XX:CMSIncrementalDutyCycle=10"
     }
 
+    // SPARK_JAVA_OPTS is deprecated, but for backwards compatibility:
+    sys.env.get("SPARK_JAVA_OPTS").foreach { opts =>
+      sparkConf.set("spark.executor.extraJavaOptions", opts)
+      sparkConf.set("spark.driver.extraJavaOptions", opts)
+    }
+
     // TODO: it might be nicer to pass these as an internal environment 
variable rather than
     // as Java options, due to complications with string parsing of nested 
quotes.
     if (args.amClass == classOf[ExecutorLauncher].getName) {

http://git-wip-us.apache.org/repos/asf/spark/blob/5f48721e/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
----------------------------------------------------------------------
diff --git 
a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
 
b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
index 7d07f6f..96f8aa9 100644
--- 
a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
+++ 
b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
@@ -71,8 +71,8 @@ trait ExecutorRunnableUtil extends Logging {
     /*
         else {
           // If no java_opts specified, default to using 
-XX:+CMSIncrementalMode
-          // It might be possible that other modes/config is being done in 
SPARK_JAVA_OPTS, so we dont
-          // want to mess with it.
+          // It might be possible that other modes/config is being done in 
spark.executor.extraJavaOptions,
+          // so we dont want to mess with it.
           // In our expts, using (default) throughput collector has severe 
perf ramnifications in
           // multi-tennent machines
           // The options are based on

Reply via email to