tgravescs commented on a change in pull request #35504:
URL: https://github.com/apache/spark/pull/35504#discussion_r817734660
##########
File path:
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala
##########
@@ -213,10 +213,62 @@ class BasicDriverFeatureStepSuite extends SparkFunSuite {
assert(mem === s"${expected}Mi")
val systemProperties = step.getAdditionalPodSystemProperties()
- assert(systemProperties(MEMORY_OVERHEAD_FACTOR.key) ===
expectedFactor.toString)
+ assert(systemProperties(DRIVER_MEMORY_OVERHEAD_FACTOR.key) ===
expectedFactor.toString)
}
}
+ test(s"SPARK-38194: memory overhead factor precendence") {
+ // Choose a driver memory where the default memory overhead is >
MEMORY_OVERHEAD_MIN_MIB
+ val driverMem =
+ ResourceProfile.MEMORY_OVERHEAD_MIN_MIB /
DRIVER_MEMORY_OVERHEAD_FACTOR.defaultValue.get * 2
+
+ // main app resource, overhead factor
+ val sparkConf = new SparkConf(false)
+ .set(CONTAINER_IMAGE, "spark-driver:latest")
+ .set(DRIVER_MEMORY.key, s"${driverMem.toInt}m")
+
+ // New config should take precedence
+ sparkConf.set(DRIVER_MEMORY_OVERHEAD_FACTOR, 0.2)
+ sparkConf.set(MEMORY_OVERHEAD_FACTOR, 0.3)
+ val expectedFactor = 0.2
Review comment:
move this up and use expected Factor above on line 231 to set it also.
##########
File path:
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala
##########
@@ -213,10 +213,62 @@ class BasicDriverFeatureStepSuite extends SparkFunSuite {
assert(mem === s"${expected}Mi")
val systemProperties = step.getAdditionalPodSystemProperties()
- assert(systemProperties(MEMORY_OVERHEAD_FACTOR.key) ===
expectedFactor.toString)
+ assert(systemProperties(DRIVER_MEMORY_OVERHEAD_FACTOR.key) ===
expectedFactor.toString)
}
}
+ test(s"SPARK-38194: memory overhead factor precendence") {
+ // Choose a driver memory where the default memory overhead is >
MEMORY_OVERHEAD_MIN_MIB
+ val driverMem =
+ ResourceProfile.MEMORY_OVERHEAD_MIN_MIB /
DRIVER_MEMORY_OVERHEAD_FACTOR.defaultValue.get * 2
+
+ // main app resource, overhead factor
+ val sparkConf = new SparkConf(false)
+ .set(CONTAINER_IMAGE, "spark-driver:latest")
+ .set(DRIVER_MEMORY.key, s"${driverMem.toInt}m")
+
+ // New config should take precedence
+ sparkConf.set(DRIVER_MEMORY_OVERHEAD_FACTOR, 0.2)
+ sparkConf.set(MEMORY_OVERHEAD_FACTOR, 0.3)
+ val expectedFactor = 0.2
+
+ val conf = KubernetesTestConf.createDriverConf(
+ sparkConf = sparkConf)
+ val step = new BasicDriverFeatureStep(conf)
+ val pod = step.configurePod(SparkPod.initialPod())
+ val mem =
amountAndFormat(pod.container.getResources.getRequests.get("memory"))
+ val expected = (driverMem + driverMem * expectedFactor).toInt
+ assert(mem === s"${expected}Mi")
+
+ val systemProperties = step.getAdditionalPodSystemProperties()
+ assert(systemProperties(DRIVER_MEMORY_OVERHEAD_FACTOR.key) === "0.2")
Review comment:
use expectedFactor
##########
File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
##########
@@ -105,6 +105,18 @@ package object config {
.bytesConf(ByteUnit.MiB)
.createOptional
+ private[spark] val DRIVER_MEMORY_OVERHEAD_FACTOR =
+ ConfigBuilder("spark.driver.memoryOverheadFactor")
+ .doc("Fraction of driver memory to be allocated as additional non-heap
memory per driver " +
+ "process in cluster mode. This is memory that accounts for things like
VM overheads, " +
+ "interned strings, other native overheads, etc. This tends to grow
with the container " +
+ "size. This value is ignored if spark.driver.memoryOverhead is set
directly.")
Review comment:
Right so what I meant here is that since we deprecated
spark.kubernetes.memoryOverheadFactor we should remove it from the
running-on-kubernetes documentation and then the description of 0.40 for
non-JVM jobs needs to move here. ie On Kubernetes the default is 0.10 for
JVM-based jobs and 0.40 for non-JVM jobs.
##########
File path:
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala
##########
@@ -441,6 +441,59 @@ class BasicExecutorFeatureStepSuite extends SparkFunSuite
with BeforeAndAfter {
))
}
+ test(s"SPARK-38194: memory overhead factor precendence") {
+ // Choose an executor memory where the default memory overhead is >
MEMORY_OVERHEAD_MIN_MIB
+ val defaultFactor = EXECUTOR_MEMORY_OVERHEAD_FACTOR.defaultValue.get
+ val executorMem = ResourceProfile.MEMORY_OVERHEAD_MIN_MIB / defaultFactor
* 2
+
+ // main app resource, overhead factor
+ val sparkConf = new SparkConf(false)
+ .set(CONTAINER_IMAGE, "spark-driver:latest")
+ .set(EXECUTOR_MEMORY.key, s"${executorMem.toInt}m")
+
+ // New config should take precedence
+ sparkConf.set(EXECUTOR_MEMORY_OVERHEAD_FACTOR, 0.2)
+ sparkConf.set(MEMORY_OVERHEAD_FACTOR, 0.3)
+ val expectedFactor = 0.2
Review comment:
similar comment move this up and use it where you have 0.2 hardcoded
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]