[
https://issues.apache.org/jira/browse/SPARK-26362?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16722028#comment-16722028
]
ASF GitHub Bot commented on SPARK-26362:
asfgit closed pull request #23311: [SPARK-26362][CORE] Remove
'spark.driver.allowMultipleContexts' to disallow multiple creation of
SparkContexts
URL: https://github.com/apache/spark/pull/23311
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 696dafda6d1ec..09cc346db0ed2 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -64,9 +64,8 @@ import org.apache.spark.util.logging.DriverLogger
* Main entry point for Spark functionality. A SparkContext represents the
connection to a Spark
* cluster, and can be used to create RDDs, accumulators and broadcast
variables on that cluster.
*
- * Only one SparkContext may be active per JVM. You must `stop()` the active
SparkContext before
- * creating a new one. This limitation may eventually be removed; see
SPARK-2243 for more details.
- *
+ * @note Only one `SparkContext` should be active per JVM. You must `stop()`
the
+ * active `SparkContext` before creating a new one.
* @param config a Spark Config object describing the application
configuration. Any settings in
* this config overrides the default configs as well as system properties.
*/
@@ -75,14 +74,10 @@ class SparkContext(config: SparkConf) extends Logging {
// The call site where this SparkContext was constructed.
private val creationSite: CallSite = Utils.getCallSite()
- // If true, log warnings instead of throwing exceptions when multiple
SparkContexts are active
- private val allowMultipleContexts: Boolean =
-config.getBoolean("spark.driver.allowMultipleContexts", false)
-
// In order to prevent multiple SparkContexts from being active at the same
time, mark this
// context as having started construction.
// NOTE: this must be placed at the beginning of the SparkContext
constructor.
- SparkContext.markPartiallyConstructed(this, allowMultipleContexts)
+ SparkContext.markPartiallyConstructed(this)
val startTime = System.currentTimeMillis()
@@ -2392,7 +2387,7 @@ class SparkContext(config: SparkConf) extends Logging {
// In order to prevent multiple SparkContexts from being active at the same
time, mark this
// context as having finished construction.
// NOTE: this must be placed at the end of the SparkContext constructor.
- SparkContext.setActiveContext(this, allowMultipleContexts)
+ SparkContext.setActiveContext(this)
}
/**
@@ -2409,18 +2404,18 @@ object SparkContext extends Logging {
private val SPARK_CONTEXT_CONSTRUCTOR_LOCK = new Object()
/**
- * The active, fully-constructed SparkContext. If no SparkContext is
active, then this is `null`.
+ * The active, fully-constructed SparkContext. If no SparkContext is active,
then this is `null`.
*
- * Access to this field is guarded by SPARK_CONTEXT_CONSTRUCTOR_LOCK.
+ * Access to this field is guarded by `SPARK_CONTEXT_CONSTRUCTOR_LOCK`.
*/
private val activeContext: AtomicReference[SparkContext] =
new AtomicReference[SparkContext](null)
/**
- * Points to a partially-constructed SparkContext if some thread is in the
SparkContext
+ * Points to a partially-constructed SparkContext if another thread is in
the SparkContext
* constructor, or `None` if no SparkContext is being constructed.
*
- * Access to this field is guarded by SPARK_CONTEXT_CONSTRUCTOR_LOCK
+ * Access to this field is guarded by `SPARK_CONTEXT_CONSTRUCTOR_LOCK`.
*/
private var contextBeingConstructed: Option[SparkContext] = None
@@ -2428,24 +2423,16 @@ object SparkContext extends Logging {
* Called to ensure that no other SparkContext is running in this JVM.
*
* Throws an exception if a running context is detected and logs a warning
if another thread is
- * constructing a SparkContext. This warning is necessary because the
current locking scheme
+ * constructing a SparkContext. This warning is necessary because the
current locking scheme
* prevents us from reliably distinguishing between cases where another
context is being
* constructed and cases where another constructor threw an exception.
*/
- private def assertNoOtherContextIsRunning(
- sc: SparkContext,
- allowMultipleContexts: Boolean): Unit = {
+ private def assertNoOtherContextIsRunning(sc: SparkContext): Unit = {
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {