Github user pwendell commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r24963051
  
    --- Diff: 
launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java ---
    @@ -0,0 +1,499 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.launcher;
    +
    +import java.io.BufferedReader;
    +import java.io.File;
    +import java.io.FileFilter;
    +import java.io.FileInputStream;
    +import java.io.InputStreamReader;
    +import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.Properties;
    +import java.util.jar.JarFile;
    +import java.util.regex.Pattern;
    +
    +/**
    + * Basic functionality for launchers - objects that encapsulate knowledge 
about how to build the
    + * commands to run a Spark application or service. This class is not meant 
to be extended by user
    + * code.
    + */
    +public abstract class AbstractLauncher<T extends AbstractLauncher> extends 
LauncherCommon {
    +
    +  private static final String ENV_SPARK_HOME = "SPARK_HOME";
    +  private static final String DEFAULT_PROPERTIES_FILE = 
"spark-defaults.conf";
    +  static final String DEFAULT_MEM = "512m";
    +
    +  String javaHome;
    +  String sparkHome;
    +  String propertiesFile;
    +  final Map<String, String> conf;
    +  final Map<String, String> launcherEnv;
    +
    +  AbstractLauncher() {
    +    this(Collections.<String, String>emptyMap());
    +  }
    +
    +  protected AbstractLauncher(Map<String, String> env) {
    +    this.conf = new HashMap<String, String>();
    +    this.launcherEnv = new HashMap<String, String>(env);
    +  }
    +
    +  @SuppressWarnings("unchecked")
    +  private final T THIS = (T) this;
    +
    +  /**
    +   * Set a custom JAVA_HOME for launching the Spark application.
    +   *
    +   * @param javaHome Path to the JAVA_HOME to use.
    +   * @return This launcher.
    +   */
    +  public T setJavaHome(String javaHome) {
    +    checkNotNull(javaHome, "javaHome");
    +    this.javaHome = javaHome;
    +    return THIS;
    +  }
    +
    +  /**
    +   * Set a custom Spark installation location for the application.
    +   *
    +   * @param sparkHome Path to the Spark installation to use.
    +   * @return This launcher.
    +   */
    +  public T setSparkHome(String sparkHome) {
    +    checkNotNull(sparkHome, "sparkHome");
    +    launcherEnv.put(ENV_SPARK_HOME, sparkHome);
    +    return THIS;
    +  }
    +
    +  /**
    +   * Set a custom properties file with Spark configuration for the 
application.
    +   *
    +   * @param path Path to custom properties file to use.
    +   * @return This launcher.
    +   */
    +  public T setPropertiesFile(String path) {
    +    checkNotNull(path, "path");
    +    this.propertiesFile = path;
    +    return THIS;
    +  }
    +
    +  /**
    +   * Set a single configuration value for the application.
    +   *
    +   * @param key Configuration key.
    +   * @param value The value to use.
    +   * @return This launcher.
    +   */
    +  public T setConf(String key, String value) {
    +    checkNotNull(key, "key");
    +    checkNotNull(value, "value");
    +    checkArgument(key.startsWith("spark."), "'key' must start with 
'spark.'");
    +    conf.put(key, value);
    +    return THIS;
    +  }
    +
    +  /**
    +   * Launchers should implement this to create the command to be executed. 
This method should
    +   * also update the environment map with any environment variables needed 
by the child process.
    +   * <p/>
    +   * Note that this method is a no-op in the base class, even though 
subclasses in this package
    +   * really must implement it. This approach was taken to allow this 
method to be package private
    +   * while still allowing CommandUtils.scala to extend this class for its 
use.
    +   *
    +   * @param env Map containing environment variables to set for the Spark 
job.
    +   */
    +  List<String> buildLauncherCommand(Map<String, String> env) throws 
IOException {
    +    throw new UnsupportedOperationException("Subclasses must implement 
this method.");
    +  }
    +
    +  /**
    +   * Prepares the launcher command for execution from a shell script. This 
is used by the `Main`
    +   * class to service the scripts shipped with the Spark distribution.
    +   */
    +  List<String> buildShellCommand() throws IOException {
    +    Map<String, String> childEnv = new HashMap<String, 
String>(launcherEnv);
    +    List<String> cmd = buildLauncherCommand(childEnv);
    +    return isWindows() ? prepareForWindows(cmd, childEnv) : 
prepareForBash(cmd, childEnv);
    +  }
    +
    +  /**
    +   * Loads the configuration file for the application, if it exists. This 
is either the
    +   * user-specified properties file, or the spark-defaults.conf file under 
the Spark configuration
    +   * directory.
    +   */
    +  Properties loadPropertiesFile() throws IOException {
    +    Properties props = new Properties();
    +    File propsFile;
    +    if (propertiesFile != null) {
    +      propsFile = new File(propertiesFile);
    +      checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", 
propertiesFile);
    +    } else {
    +      propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
    +    }
    +
    +    if (propsFile.isFile()) {
    +      FileInputStream fd = null;
    +      try {
    +        fd = new FileInputStream(propsFile);
    +        props.load(new InputStreamReader(fd, "UTF-8"));
    +      } finally {
    +        if (fd != null) {
    +          try {
    +            fd.close();
    +          } catch (IOException e) {
    +            // Ignore.
    +          }
    +        }
    +      }
    +    }
    +
    +    return props;
    +  }
    +
    +  String getSparkHome() {
    +    String path = getenv(ENV_SPARK_HOME);
    +    checkState(path != null,
    +      "Spark home not found; set it explicitly or use the SPARK_HOME 
environment variable.");
    +    return path;
    +  }
    +
    +  protected List<String> buildJavaCommand(String extraClassPath) throws 
IOException {
    +    List<String> cmd = new ArrayList<String>();
    +    if (javaHome == null) {
    +      cmd.add(join(File.separator, System.getProperty("java.home"), "bin", 
"java"));
    +    } else {
    +      cmd.add(join(File.separator, javaHome, "bin", "java"));
    +    }
    +
    +    // Load extra JAVA_OPTS from conf/java-opts, if it exists.
    +    File javaOpts = new File(join(File.separator, getConfDir(), 
"java-opts"));
    +    if (javaOpts.isFile()) {
    +      BufferedReader br = new BufferedReader(new InputStreamReader(
    +          new FileInputStream(javaOpts), "UTF-8"));
    +      try {
    +        String line;
    +        while ((line = br.readLine()) != null) {
    +          addOptionString(cmd, line);
    +        }
    +      } finally {
    +        br.close();
    +      }
    +    }
    +
    +    cmd.add("-cp");
    +    cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
    +    return cmd;
    +  }
    +
    +  /**
    +   * Adds the default perm gen size option for Spark if the VM requires it 
and the user hasn't
    +   * set it.
    +   */
    +  protected void addPermGenSizeOpt(List<String> cmd) {
    +    // Don't set MaxPermSize for Java 8 and later.
    +    String[] version = System.getProperty("java.version").split("\\.");
    +    if (Integer.parseInt(version[0]) > 1 || Integer.parseInt(version[1]) > 
7) {
    +      return;
    +    }
    +
    +    for (String arg : cmd) {
    +      if (arg.startsWith("-XX:MaxPermSize=")) {
    +        return;
    +      }
    +    }
    +
    +    cmd.add("-XX:MaxPermSize=128m");
    +  }
    +
    +  protected void addOptionString(List<String> cmd, String options) {
    +    if (!isEmpty(options)) {
    +      for (String opt : parseOptionString(options)) {
    +        cmd.add(opt);
    +      }
    +    }
    +  }
    +
    +  /**
    +   * Builds the classpath for the application. Returns a list with one 
classpath entry per element;
    +   * each entry is formatted in the way expected by 
<i>java.net.URLClassLoader</i> (more
    +   * specifically, with trailing slashes for directories).
    +   */
    +  List<String> buildClassPath(String appClassPath) throws IOException {
    --- End diff --
    
    This seems like a function that will be difficult to make backwards 
compatible, if the intent is at some point to expose this to users.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to