ivoson commented on code in PR #36716:
URL: https://github.com/apache/spark/pull/36716#discussion_r904912500
##########
core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala:
##########
@@ -336,9 +340,23 @@ object ResourceProfile extends Logging {
private def getDefaultExecutorResources(conf: SparkConf): Map[String,
ExecutorResourceRequest] = {
val ereqs = new ExecutorResourceRequests()
- val cores = conf.get(EXECUTOR_CORES)
- ereqs.cores(cores)
- val memory = conf.get(EXECUTOR_MEMORY)
+
+ val isStandalone =
conf.getOption("spark.master").exists(_.startsWith("spark://"))
+ val isLocalCluster =
conf.getOption("spark.master").exists(_.startsWith("local-cluster"))
+ // By default, standalone executors take all available cores, do not have
a specific value.
+ val cores = if (isStandalone || isLocalCluster) {
+ conf.getOption(EXECUTOR_CORES.key).map(_.toInt)
+ } else {
+ Some(conf.get(EXECUTOR_CORES))
+ }
+ cores.foreach(ereqs.cores)
+
+ // Setting all resources here, cluster managers will take the resources
they respect.
Review Comment:
Thanks for the suggestion, will remove it.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]