Github user hbhanawat commented on a diff in the pull request:
https://github.com/apache/spark/pull/11723#discussion_r57308967
--- Diff: core/src/main/scala/org/apache/spark/SparkContext.scala ---
@@ -2443,8 +2443,34 @@ object SparkContext extends Logging {
"in the form mesos://zk://host:port. Current Master URL will
stop working in Spark 2.0.")
createTaskScheduler(sc, "mesos://" + zkUrl, deployMode)
- case _ =>
- throw new SparkException("Could not parse Master URL: '" + master
+ "'")
+ case masterUrl =>
+ val cm = getClusterManager(masterUrl) match {
+ case Some(clusterMgr) => clusterMgr
+ case None => throw new SparkException("Could not parse Master
URL: '" + master + "'")
+ }
+ try {
+ val scheduler = cm.createTaskScheduler(sc)
+ val backend = cm.createSchedulerBackend(sc, scheduler)
+ cm.initialize(scheduler, backend)
+ (backend, scheduler)
+ } catch {
+ case e: Exception => {
+ throw new SparkException("External scheduler cannot be
instantiated", e)
+ }
+ }
+ }
+ }
+
+ private def getClusterManager(url: String):
Option[ExternalClusterManager] = {
+ val loader = Utils.getContextOrSparkClassLoader
+ val serviceLoader =
ServiceLoader.load(classOf[ExternalClusterManager], loader)
+
+ serviceLoader.asScala.filter(_.canCreate(url)).toList match {
+ // exactly one registered manager
+ case head :: Nil => Some(head)
+ case Nil => None
+ case multipleMgrs => sys.error(s"Multiple Cluster Managers
registered " +
--- End diff --
Done.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]