Repository: spark Updated Branches: refs/heads/master 00461bb91 -> 4a91806a4
[SPARK-13413] Remove SparkContext.metricsSystem ## What changes were proposed in this pull request? This patch removes SparkContext.metricsSystem. SparkContext.metricsSystem returns MetricsSystem, which is a private class. I think it was added by accident. In addition, I also removed an unused private[spark] method schedulerBackend setter. ## How was the this patch tested? N/A. Author: Reynold Xin <[email protected]> This patch had conflicts when merged, resolved by Committer: Josh Rosen <[email protected]> Closes #11282 from rxin/SPARK-13413. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4a91806a Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4a91806a Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4a91806a Branch: refs/heads/master Commit: 4a91806a45a48432c3ea4c2aaa553177952673e9 Parents: 00461bb Author: Reynold Xin <[email protected]> Authored: Mon Feb 22 14:01:35 2016 -0800 Committer: Josh Rosen <[email protected]> Committed: Mon Feb 22 14:01:35 2016 -0800 ---------------------------------------------------------------------- core/src/main/scala/org/apache/spark/SparkContext.scala | 9 ++------- project/MimaExcludes.scala | 6 +++++- 2 files changed, 7 insertions(+), 8 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/4a91806a/core/src/main/scala/org/apache/spark/SparkContext.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index c001df3..cd7eed3 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -297,9 +297,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli val sparkUser = Utils.getCurrentUserName() private[spark] def schedulerBackend: SchedulerBackend = _schedulerBackend - private[spark] def schedulerBackend_=(sb: SchedulerBackend): Unit = { - _schedulerBackend = sb - } private[spark] def taskScheduler: TaskScheduler = _taskScheduler private[spark] def taskScheduler_=(ts: TaskScheduler): Unit = { @@ -322,8 +319,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli def applicationId: String = _applicationId def applicationAttemptId: Option[String] = _applicationAttemptId - def metricsSystem: MetricsSystem = if (_env != null) _env.metricsSystem else null - private[spark] def eventLogger: Option[EventLoggingListener] = _eventLogger private[spark] def executorAllocationManager: Option[ExecutorAllocationManager] = @@ -514,9 +509,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli // The metrics system for Driver need to be set spark.app.id to app ID. // So it should start after we get app ID from the task scheduler and set spark.app.id. - metricsSystem.start() + _env.metricsSystem.start() // Attach the driver metrics servlet handler to the web ui after the metrics system is started. - metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler))) + _env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler))) _eventLogger = if (isEventLogEnabled) { http://git-wip-us.apache.org/repos/asf/spark/blob/4a91806a/project/MimaExcludes.scala ---------------------------------------------------------------------- diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala index 97a1e8b..746223f 100644 --- a/project/MimaExcludes.scala +++ b/project/MimaExcludes.scala @@ -261,9 +261,13 @@ object MimaExcludes { ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets"), ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets$default$3"), ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.impl.GraphImpl.mapReduceTriplets") - ) ++Seq( + ) ++ Seq( // SPARK-13426 Remove the support of SIMR ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkMasterRegex.SIMR_REGEX") + ) ++ Seq( + // SPARK-13413 Remove SparkContext.metricsSystem/schedulerBackend_ setter + ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.metricsSystem"), + ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.schedulerBackend_=") ) case v if v.startsWith("1.6") => Seq( --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
