dongjoon-hyun commented on code in PR #53338:
URL: https://github.com/apache/spark/pull/53338#discussion_r2607562212
##########
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala:
##########
@@ -128,4 +128,52 @@ class KubernetesClientUtilsSuite extends SparkFunSuite
with BeforeAndAfter {
.build()
assert(outputConfigMap === expectedConfigMap)
}
+
+ test("SPARK-54605: check custom driver env appended as expected when config
map size is " +
+ "below threshold.") {
+ val input = Map("spark-env.sh" -> "test12345\ntest12345", "testConf.1" ->
"test123456")
+ val sparkConf = testSetup(input.map(f => f._1 ->
f._2.getBytes(StandardCharsets.UTF_8)))
+ .set(Config.CONFIG_MAP_MAXSIZE.key, "90")
+ val conf = KubernetesTestConf.createDriverConf(
+ sparkConf = sparkConf,
+ environment = Map("AAA" -> "value1", "BBB" -> "value2")
+ )
+ val confFileMap = KubernetesClientUtils.overrideDefaultSparkEnv(conf,
input)
+ val expectedConfFileMap = Map(
+ "spark-env.sh" -> "test12345\ntest12345\nexport AAA=value1\nexport
BBB=value2\n",
+ "testConf.1" -> "test123456")
+ assert(confFileMap === expectedConfFileMap)
+ }
+
+ test("SPARK-54605: check custom driver env not appended when config map size
is " +
+ "larger than threshold.") {
+ val input = Map("spark-env.sh" -> "test12345\ntest12345", "testConf.1" ->
"test123456")
+ val sparkConf = testSetup(input.map(f => f._1 ->
f._2.getBytes(StandardCharsets.UTF_8)))
+ .set(Config.CONFIG_MAP_MAXSIZE.key, "87")
+ val conf = KubernetesTestConf.createDriverConf(
+ sparkConf = sparkConf,
+ environment = Map("AAA" -> "value1", "BBB" -> "value2")
+ )
+ val confFileMap = KubernetesClientUtils.overrideDefaultSparkEnv(conf,
input)
+ val expectedConfFileMap = Map(
+ "spark-env.sh" -> "test12345\ntest12345",
+ "testConf.1" -> "test123456")
+ assert(confFileMap === expectedConfFileMap)
+ }
+
+ test("SPARK-54605: check custom driver env not appended when config map size
is " +
+ "equal to threshold.") {
Review Comment:
Well, this is equal to the first test case, isn't it? When we have a test
case, `positive` and `negative` is enough. If we need a boundary condition, we
can keep this test case and delete the first one.
##########
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala:
##########
@@ -128,4 +128,52 @@ class KubernetesClientUtilsSuite extends SparkFunSuite
with BeforeAndAfter {
.build()
assert(outputConfigMap === expectedConfigMap)
}
+
+ test("SPARK-54605: check custom driver env appended as expected when config
map size is " +
+ "below threshold.") {
+ val input = Map("spark-env.sh" -> "test12345\ntest12345", "testConf.1" ->
"test123456")
+ val sparkConf = testSetup(input.map(f => f._1 ->
f._2.getBytes(StandardCharsets.UTF_8)))
+ .set(Config.CONFIG_MAP_MAXSIZE.key, "90")
+ val conf = KubernetesTestConf.createDriverConf(
+ sparkConf = sparkConf,
+ environment = Map("AAA" -> "value1", "BBB" -> "value2")
+ )
+ val confFileMap = KubernetesClientUtils.overrideDefaultSparkEnv(conf,
input)
+ val expectedConfFileMap = Map(
+ "spark-env.sh" -> "test12345\ntest12345\nexport AAA=value1\nexport
BBB=value2\n",
+ "testConf.1" -> "test123456")
+ assert(confFileMap === expectedConfFileMap)
+ }
+
+ test("SPARK-54605: check custom driver env not appended when config map size
is " +
+ "larger than threshold.") {
+ val input = Map("spark-env.sh" -> "test12345\ntest12345", "testConf.1" ->
"test123456")
+ val sparkConf = testSetup(input.map(f => f._1 ->
f._2.getBytes(StandardCharsets.UTF_8)))
+ .set(Config.CONFIG_MAP_MAXSIZE.key, "87")
+ val conf = KubernetesTestConf.createDriverConf(
+ sparkConf = sparkConf,
+ environment = Map("AAA" -> "value1", "BBB" -> "value2")
+ )
+ val confFileMap = KubernetesClientUtils.overrideDefaultSparkEnv(conf,
input)
+ val expectedConfFileMap = Map(
+ "spark-env.sh" -> "test12345\ntest12345",
+ "testConf.1" -> "test123456")
+ assert(confFileMap === expectedConfFileMap)
+ }
+
+ test("SPARK-54605: check custom driver env not appended when config map size
is " +
+ "equal to threshold.") {
Review Comment:
Well, this is equal to the first test case, isn't it? When we have a test
case, `positive` and `negative` is enough. If we need a boundary condition, we
can keep this test case and delete the first one.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]