Yikun commented on a change in pull request #35215:
URL: https://github.com/apache/spark/pull/35215#discussion_r787343899



##########
File path: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
##########
@@ -337,4 +337,32 @@ class ClientSuite extends SparkFunSuite with 
BeforeAndAfter {
     submissionClient.run()
     verify(loggingPodStatusWatcher).watchOrStop(kconf.namespace + ":driver")
   }
+
+  test("driver's confimap.metadata.namespace set to conf.namespace otherwise 
default") {
+    val submissionClient = new Client(
+      kconf,
+      driverBuilder,
+      kubernetesClient,
+      loggingPodStatusWatcher)
+    assert(submissionClient.createConfigs()._3.getMetadata.getNamespace === 
"default")

Review comment:
       ```suggestion
   
assert(submissionClient.createConfigMap(BUILT_KUBERNETES_SPEC)._2.getMetadata.getNamespace
 === "default")
   ```
   
   The test will also simple

##########
File path: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
##########
@@ -131,6 +131,22 @@ class KubernetesClusterSchedulerBackendSuite extends 
SparkFunSuite with BeforeAn
       pollEvents)
   }
 
+  test("executor's configmap.metadata.namespace set to conf.namespace 
otherwise default") {
+    
assert(schedulerBackendUnderTest.createConfigMap().getMetadata.getNamespace === 
"default")
+
+    sparkConf.set(KUBERNETES_NAMESPACE, "namespace")

Review comment:
       better to create new sparkConf to avoid the impact on potential followup 
testcase

##########
File path: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
##########
@@ -337,4 +337,32 @@ class ClientSuite extends SparkFunSuite with 
BeforeAndAfter {
     submissionClient.run()
     verify(loggingPodStatusWatcher).watchOrStop(kconf.namespace + ":driver")
   }
+
+  test("driver's confimap.metadata.namespace set to conf.namespace otherwise 
default") {

Review comment:
       nit: test("SPARK-37916: driver's confimap.metadata.namespace set to 
conf.namespace otherwise default") {

##########
File path: 
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala
##########
@@ -100,12 +100,18 @@ private[spark] class Client(
     kubernetesClient: KubernetesClient,
     watcher: LoggingPodStatusWatcher) extends Logging {
 
-  def run(): Unit = {
+  private[spark] def createConfigs(): (KubernetesDriverSpec, Map[String, 
String], ConfigMap) = {

Review comment:
       I guess this refeactor is for test, we'd better only separate configMap 
part, and then,  still keep resolvedDriverSpec in run, like this
   
   ```suggestion
     private[spark] def createConfigMap(
       driverSpec: KubernetesDriverSpec): (Map[String, String], ConfigMap) = {
       val configMapName = KubernetesClientUtils.configMapNameDriver
       val confFilesMap = 
KubernetesClientUtils.buildSparkConfDirFilesMap(configMapName,
         conf.sparkConf, driverSpec.systemProperties)
       val configMap = KubernetesClientUtils
         .buildConfigMap(configMapName, confFilesMap, conf.namespace)
       (confFilesMap, configMap)
     }
   
     def run(): Unit = {
       val resolvedDriverSpec = builder.buildFromFeatures(conf, 
kubernetesClient)
       val configMapName = KubernetesClientUtils.configMapNameDriver
       val (confFilesMap, configMap) = createConfigMap(resolvedDriverSpec)
   ```

##########
File path: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
##########
@@ -337,4 +337,32 @@ class ClientSuite extends SparkFunSuite with 
BeforeAndAfter {
     submissionClient.run()
     verify(loggingPodStatusWatcher).watchOrStop(kconf.namespace + ":driver")
   }
+
+  test("driver's confimap.metadata.namespace set to conf.namespace otherwise 
default") {
+    val submissionClient = new Client(
+      kconf,
+      driverBuilder,
+      kubernetesClient,
+      loggingPodStatusWatcher)
+    assert(submissionClient.createConfigs()._3.getMetadata.getNamespace === 
"default")
+
+    val sparkConf = new SparkConf(false)
+      .set(Config.KUBERNETES_NAMESPACE, "namespace")
+
+    val kconf2 = KubernetesTestConf.createDriverConf(
+      sparkConf = sparkConf,
+      resourceNamePrefix = Some(KUBERNETES_RESOURCE_PREFIX)

Review comment:
       I guess `resourceNamePrefix` is redundant.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to