Github user tgravescs commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r23306196
  
    --- Diff: 
launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java ---
    @@ -0,0 +1,461 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.launcher;
    +
    +import java.io.BufferedReader;
    +import java.io.File;
    +import java.io.FileFilter;
    +import java.io.FileInputStream;
    +import java.io.InputStreamReader;
    +import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.Properties;
    +import java.util.jar.JarFile;
    +import java.util.regex.Pattern;
    +
    +/**
    + * Basic functionality for launchers - objects that encapsulate knowledge 
about how to build the
    + * commands to run a Spark application or service. This class is not meant 
to be extended by user
    + * code.
    + */
    +public abstract class AbstractLauncher<T extends AbstractLauncher> extends 
LauncherCommon {
    +
    +  private static final String ENV_SPARK_HOME = "SPARK_HOME";
    +  private static final String DEFAULT_PROPERTIES_FILE = 
"spark-defaults.conf";
    +  static final String DEFAULT_MEM = "512m";
    +
    +  String javaHome;
    +  String sparkHome;
    +  String propertiesFile;
    +  final Map<String, String> conf;
    +  final Map<String, String> launcherEnv;
    +
    +  AbstractLauncher() {
    +    this(Collections.<String, String>emptyMap());
    +  }
    +
    +  protected AbstractLauncher(Map<String, String> env) {
    +    this.conf = new HashMap<String, String>();
    +    this.launcherEnv = new HashMap<String, String>(env);
    +  }
    +
    +  @SuppressWarnings("unchecked")
    +  private final T THIS = (T) this;
    +
    +  /** Set a custom JAVA_HOME for launching the Spark application. */
    +  public T setJavaHome(String javaHome) {
    +    checkNotNull(javaHome, "javaHome");
    +    this.javaHome = javaHome;
    +    return THIS;
    +  }
    +
    +  /** Set a custom Spark installation location for the application. */
    +  public T setSparkHome(String sparkHome) {
    +    checkNotNull(sparkHome, "sparkHome");
    +    launcherEnv.put(ENV_SPARK_HOME, sparkHome);
    +    return THIS;
    +  }
    +
    +  /** Set a custom properties file with Spark configuration for the 
application. */
    +  public T setPropertiesFile(String path) {
    +    checkNotNull(path, "path");
    +    this.propertiesFile = path;
    +    return THIS;
    +  }
    +
    +  /** Set a single configuration value for the application. */
    +  public T setConf(String key, String value) {
    +    checkNotNull(key, "key");
    +    checkNotNull(value, "value");
    +    checkArgument(key.startsWith("spark."), "'key' must start with 
'spark.'");
    +    conf.put(key, value);
    +    return THIS;
    +  }
    +
    +  /**
    +   * Launchers should implement this to create the command to be executed. 
This method should
    +   * also update the environment map with any environment variables needed 
by the child process.
    +   * <p/>
    +   * Note that this method is a no-op in the base class, even though 
subclasses in this package
    +   * really must implement it. This approach was taken to allow this 
method to be package private
    +   * while still allowing CommandUtils.scala to extend this class for its 
use.
    +   *
    +   * @param env Map containing environment variables to set for the Spark 
job.
    +   */
    +  List<String> buildLauncherCommand(Map<String, String> env) throws 
IOException {
    +    throw new UnsupportedOperationException("Subclasses must implement 
this method.");
    +  }
    +
    +  /**
    +   * Prepares the launcher command for execution from a shell script. This 
is used by the `Main`
    +   * class to service the scripts shipped with the Spark distribution.
    +   */
    +  List<String> buildShellCommand() throws IOException {
    +    Map<String, String> childEnv = new HashMap<String, 
String>(launcherEnv);
    +    List<String> cmd = buildLauncherCommand(childEnv);
    +    return isWindows() ? prepareForWindows(cmd, childEnv) : 
prepareForBash(cmd, childEnv);
    +  }
    +
    +  /**
    +   * Loads the configuration file for the application, if it exists. This 
is  either the
    --- End diff --
    
    extra space after is



---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to