Github user ifilonenko commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20669#discussion_r174902563
  
    --- Diff: 
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala
 ---
    @@ -110,30 +109,29 @@ private[spark] class Client(
         for (nextStep <- submissionSteps) {
           currentDriverSpec = nextStep.configureDriver(currentDriverSpec)
         }
    -
    -    val resolvedDriverJavaOpts = currentDriverSpec
    -      .driverSparkConf
    -      // Remove this as the options are instead extracted and set 
individually below using
    -      // environment variables with prefix SPARK_JAVA_OPT_.
    -      .remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
    -      .getAll
    -      .map {
    -        case (confKey, confValue) => s"-D$confKey=$confValue"
    -      } ++ 
driverJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
    -    val driverJavaOptsEnvs: Seq[EnvVar] = 
resolvedDriverJavaOpts.zipWithIndex.map {
    -      case (option, index) =>
    -        new EnvVarBuilder()
    -          .withName(s"$ENV_JAVA_OPT_PREFIX$index")
    -          .withValue(option)
    -          .build()
    -    }
    -
    +    val configMapName = s"$kubernetesResourceNamePrefix-driver-conf-map"
    +    val configMap = buildConfigMap(configMapName, 
currentDriverSpec.driverSparkConf)
    +    // The include of the ENV_VAR for "SPARK_CONF_DIR" is to allow for the
    +    // Spark command builder to pickup on the Java Options present in the 
ConfigMap
         val resolvedDriverContainer = new 
ContainerBuilder(currentDriverSpec.driverContainer)
    -      .addAllToEnv(driverJavaOptsEnvs.asJava)
    +      .addNewEnv()
    +        .withName(SPARK_CONF_DIR_ENV)
    +        .withValue(SPARK_CONF_PATH)
    --- End diff --
    
    Do the executors require a SPARK_CONF_DIR directory to be defined as well? 


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to