Repository: spark Updated Branches: refs/heads/master 2afdaa980 -> 5bcb9a7ff
[SPARK-18417][YARN] Define 'spark.yarn.am.port' in yarn config object ## What changes were proposed in this pull request? This PR is to define 'spark.yarn.am.port' in yarn config.scala just like other Yarn configurations. That makes code easier to maintain. ## How was this patch tested? Build passed & tested some Yarn unit tests. Author: Weiqing Yang <[email protected]> Closes #15858 from weiqingy/yarn. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5bcb9a7f Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5bcb9a7f Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5bcb9a7f Branch: refs/heads/master Commit: 5bcb9a7ff4bdd7dac75481a951cd7da2133a2e2d Parents: 2afdaa9 Author: Weiqing Yang <[email protected]> Authored: Tue Nov 15 13:14:17 2016 -0800 Committer: Marcelo Vanzin <[email protected]> Committed: Tue Nov 15 13:14:17 2016 -0800 ---------------------------------------------------------------------- .../scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala | 2 +- yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/5bcb9a7f/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala ---------------------------------------------------------------------- diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index 918cc2d..0378ef4 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -429,7 +429,7 @@ private[spark] class ApplicationMaster( } private def runExecutorLauncher(securityMgr: SecurityManager): Unit = { - val port = sparkConf.getInt("spark.yarn.am.port", 0) + val port = sparkConf.get(AM_PORT) rpcEnv = RpcEnv.create("sparkYarnAM", Utils.localHostName, port, sparkConf, securityMgr, clientMode = true) val driverRef = waitForSparkDriver() http://git-wip-us.apache.org/repos/asf/spark/blob/5bcb9a7f/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala ---------------------------------------------------------------------- diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala index ca8c890..666cb45 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala @@ -40,6 +40,11 @@ package object config { .timeConf(TimeUnit.MILLISECONDS) .createOptional + private[spark] val AM_PORT = + ConfigBuilder("spark.yarn.am.port") + .intConf + .createWithDefault(0) + private[spark] val EXECUTOR_ATTEMPT_FAILURE_VALIDITY_INTERVAL_MS = ConfigBuilder("spark.yarn.executor.failuresValidityInterval") .doc("Interval after which Executor failures will be considered independent and not " + --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
