Github user ifilonenko commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22911#discussion_r233650115
  
    --- Diff: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStepSuite.scala
 ---
    @@ -0,0 +1,164 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.spark.deploy.k8s.features
    +
    +import java.io.File
    +import java.nio.charset.StandardCharsets.UTF_8
    +
    +import scala.collection.JavaConverters._
    +
    +import com.google.common.io.Files
    +import io.fabric8.kubernetes.api.model.{ConfigMap, Secret}
    +import org.mockito.Mockito._
    +
    +import org.apache.spark.{SparkConf, SparkFunSuite}
    +import org.apache.spark.deploy.k8s._
    +import org.apache.spark.deploy.k8s.Config._
    +import org.apache.spark.deploy.k8s.Constants._
    +import org.apache.spark.deploy.k8s.submit.JavaMainAppResource
    +import org.apache.spark.internal.config._
    +import org.apache.spark.util.Utils
    +
    +class KerberosConfDriverFeatureStepSuite extends SparkFunSuite {
    +
    +  import KubernetesFeaturesTestUtils._
    +  import SecretVolumeUtils._
    +
    +  private val tmpDir = Utils.createTempDir()
    +
    +  test("mount krb5 config map if defined") {
    +    val configMap = "testConfigMap"
    +    val step = createStep(
    +      new SparkConf(false).set(KUBERNETES_KERBEROS_KRB5_CONFIG_MAP, 
configMap))
    +
    +    checkPodForKrbConf(step.configurePod(SparkPod.initialPod()), configMap)
    +    assert(step.getAdditionalPodSystemProperties().isEmpty)
    +    
assert(filter[ConfigMap](step.getAdditionalKubernetesResources()).isEmpty)
    +  }
    +
    +  test("create krb5.conf config map if local config provided") {
    +    val krbConf = File.createTempFile("krb5", ".conf", tmpDir)
    +    Files.write("some data", krbConf, UTF_8)
    +
    +    val sparkConf = new SparkConf(false)
    +      .set(KUBERNETES_KERBEROS_KRB5_FILE, krbConf.getAbsolutePath())
    +    val step = createStep(sparkConf)
    +
    +    val confMap = 
filter[ConfigMap](step.getAdditionalKubernetesResources()).head
    +    assert(confMap.getData().keySet().asScala === Set(krbConf.getName()))
    +
    +    checkPodForKrbConf(step.configurePod(SparkPod.initialPod()), 
confMap.getMetadata().getName())
    +    assert(step.getAdditionalPodSystemProperties().isEmpty)
    +  }
    +
    +  test("create keytab secret if client keytab file used") {
    +    val keytab = File.createTempFile("keytab", ".bin", tmpDir)
    +    Files.write("some data", keytab, UTF_8)
    +
    +    val sparkConf = new SparkConf(false)
    +      .set(KEYTAB, keytab.getAbsolutePath())
    +      .set(PRINCIPAL, "alice")
    +    val step = createStep(sparkConf)
    +
    +    val pod = step.configurePod(SparkPod.initialPod())
    +    assert(podHasVolume(pod.pod, KERBEROS_KEYTAB_VOLUME))
    +    assert(containerHasVolume(pod.container, KERBEROS_KEYTAB_VOLUME, 
KERBEROS_KEYTAB_MOUNT_POINT))
    +
    +    assert(step.getAdditionalPodSystemProperties().keys === 
Set(KEYTAB.key))
    +
    +    val secret = 
filter[Secret](step.getAdditionalKubernetesResources()).head
    +    assert(secret.getData().keySet().asScala === Set(keytab.getName()))
    +  }
    +
    +  test("do nothing if container-local keytab used") {
    +    val sparkConf = new SparkConf(false)
    +      .set(KEYTAB, "local:/my.keytab")
    +      .set(PRINCIPAL, "alice")
    +    val step = createStep(sparkConf)
    +
    +    val initial = SparkPod.initialPod()
    +    assert(step.configurePod(initial) === initial)
    +    assert(step.getAdditionalPodSystemProperties().isEmpty)
    +    assert(step.getAdditionalKubernetesResources().isEmpty)
    +  }
    +
    +  test("mount delegation tokens if provided") {
    +    val dtSecret = "tokenSecret"
    +    val sparkConf = new SparkConf(false)
    +      .set(KUBERNETES_KERBEROS_DT_SECRET_NAME, dtSecret)
    +      .set(KUBERNETES_KERBEROS_DT_SECRET_ITEM_KEY, "dtokens")
    +    val step = createStep(sparkConf)
    +
    +    checkPodForTokens(step.configurePod(SparkPod.initialPod()), dtSecret)
    +    assert(step.getAdditionalPodSystemProperties().isEmpty)
    +    assert(step.getAdditionalKubernetesResources().isEmpty)
    +  }
    +
    +  test("create delegation tokens if needed") {
    +    val step = spy(createStep(new SparkConf(false)))
    +    doReturn(Array[Byte](0x4, 0x2)).when(step).createDelegationTokens()
    --- End diff --
    
    Should we check the contents of the token in the unit test? 


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to