This is an automated email from the ASF dual-hosted git repository.
mridulm80 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new b7d99e3 [SPARK-36615][CORE] Register shutdown hook earlier when start
SC
b7d99e3 is described below
commit b7d99e3eea5f9c0b3d11ec578d6aa0720c256eeb
Author: Angerszhuuuu <[email protected]>
AuthorDate: Tue Sep 21 13:23:14 2021 -0500
[SPARK-36615][CORE] Register shutdown hook earlier when start SC
### What changes were proposed in this pull request?
Since user always use ctrl+c to stop a starting SC when register with yarn
in client mode when resources are tight.
In this time, SC have not register the Shutdown hook, this cause we won't
invoke `sc.stop()` when exit the application.
We should register the ShutdownHook earlier when starting a SparkContext.
### Why are the changes needed?
Make sure we will invoke `sc.stop()` when kill a starting SparkContext
application.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Not need
Closes #33869 from AngersZhuuuu/SPARK-36615.
Authored-by: Angerszhuuuu <[email protected]>
Signed-off-by: Mridul Muralidharan <mridul<at>gmail.com>
---
.../main/scala/org/apache/spark/SparkContext.scala | 29 +++++++++++-----------
1 file changed, 15 insertions(+), 14 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 3404a0f..e27499a15 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -645,20 +645,6 @@ class SparkContext(config: SparkConf) extends Logging {
// Attach the driver metrics servlet handler to the web ui after the
metrics system is started.
_env.metricsSystem.getServletHandlers.foreach(handler =>
ui.foreach(_.attachHandler(handler)))
- // Post init
- _taskScheduler.postStartHook()
- if (isLocal) {
- _env.metricsSystem.registerSource(Executor.executorSourceLocalModeOnly)
- }
- _env.metricsSystem.registerSource(_dagScheduler.metricsSource)
- _env.metricsSystem.registerSource(new
BlockManagerSource(_env.blockManager))
- _env.metricsSystem.registerSource(new JVMCPUSource())
- _executorMetricsSource.foreach(_.register(_env.metricsSystem))
- _executorAllocationManager.foreach { e =>
- _env.metricsSystem.registerSource(e.executorAllocationManagerSource)
- }
- appStatusSource.foreach(_env.metricsSystem.registerSource(_))
- _plugins.foreach(_.registerMetrics(applicationId))
// Make sure the context is stopped if the user forgets about it. This
avoids leaving
// unfinished event logs around after the JVM exits cleanly. It doesn't
help if the JVM
// is killed, though.
@@ -673,6 +659,21 @@ class SparkContext(config: SparkConf) extends Logging {
logWarning("Ignoring Exception while stopping SparkContext from
shutdown hook", e)
}
}
+
+ // Post init
+ _taskScheduler.postStartHook()
+ if (isLocal) {
+ _env.metricsSystem.registerSource(Executor.executorSourceLocalModeOnly)
+ }
+ _env.metricsSystem.registerSource(_dagScheduler.metricsSource)
+ _env.metricsSystem.registerSource(new
BlockManagerSource(_env.blockManager))
+ _env.metricsSystem.registerSource(new JVMCPUSource())
+ _executorMetricsSource.foreach(_.register(_env.metricsSystem))
+ _executorAllocationManager.foreach { e =>
+ _env.metricsSystem.registerSource(e.executorAllocationManagerSource)
+ }
+ appStatusSource.foreach(_env.metricsSystem.registerSource(_))
+ _plugins.foreach(_.registerMetrics(applicationId))
} catch {
case NonFatal(e) =>
logError("Error initializing SparkContext.", e)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]