Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r25309024
  
    --- Diff: 
launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java ---
    @@ -0,0 +1,359 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.launcher;
    +
    +import java.io.BufferedReader;
    +import java.io.File;
    +import java.io.FileFilter;
    +import java.io.FileInputStream;
    +import java.io.InputStreamReader;
    +import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.Properties;
    +import java.util.jar.JarFile;
    +import java.util.regex.Pattern;
    +
    +import static org.apache.spark.launcher.CommandBuilderUtils.*;
    +
    +/**
    + * Abstract command builder that defines common functionality for all 
builders.
    + */
    +abstract class AbstractCommandBuilder {
    +
    +  boolean verbose;
    +  String appName;
    +  String appResource;
    +  String deployMode;
    +  String javaHome;
    +  String mainClass;
    +  String master;
    +  String propertiesFile;
    +  final List<String> appArgs;
    +  final List<String> jars;
    +  final List<String> files;
    +  final List<String> pyFiles;
    +  final Map<String, String> childEnv;
    +  final Map<String, String> conf;
    +
    +  public AbstractCommandBuilder() {
    +    this(Collections.<String, String>emptyMap());
    +  }
    +
    +  public AbstractCommandBuilder(Map<String, String> env) {
    +    this.appArgs = new ArrayList<String>();
    +    this.childEnv = new HashMap<String, String>(env);
    +    this.conf = new HashMap<String, String>();
    +    this.files = new ArrayList<String>();
    +    this.jars = new ArrayList<String>();
    +    this.pyFiles = new ArrayList<String>();
    +  }
    +
    +  /**
    +   * Builds the command like to execute.
    +   *
    +   * @param env A map containing environment variables for the child 
process. It may already contain
    +   *            entries defined by the user (such as SPARK_HOME, or those 
defined by the
    +   *            SparkLauncher constructor that takes an environment), and 
may be modified to
    +   *            include other variables needed by the process to be 
executed.
    +   */
    +  abstract List<String> buildCommand(Map<String, String> env) throws 
IOException;
    +
    +  List<String> buildJavaCommand(String extraClassPath) throws IOException {
    +    List<String> cmd = new ArrayList<String>();
    +    if (javaHome == null) {
    +      cmd.add(join(File.separator, System.getProperty("java.home"), "bin", 
"java"));
    +    } else {
    +      cmd.add(join(File.separator, javaHome, "bin", "java"));
    +    }
    +
    +    // Load extra JAVA_OPTS from conf/java-opts, if it exists.
    +    File javaOpts = new File(join(File.separator, getConfDir(), 
"java-opts"));
    +    if (javaOpts.isFile()) {
    +      BufferedReader br = new BufferedReader(new InputStreamReader(
    +          new FileInputStream(javaOpts), "UTF-8"));
    +      try {
    +        String line;
    +        while ((line = br.readLine()) != null) {
    +          addOptionString(cmd, line);
    +        }
    +      } finally {
    +        br.close();
    +      }
    +    }
    +
    +    cmd.add("-cp");
    +    cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
    +    return cmd;
    +  }
    +
    +  /**
    +   * Adds the default perm gen size option for Spark if the VM requires it 
and the user hasn't
    +   * set it.
    +   */
    +  void addPermGenSizeOpt(List<String> cmd) {
    +    // Don't set MaxPermSize for Java 8 and later.
    +    String[] version = System.getProperty("java.version").split("\\.");
    +    if (Integer.parseInt(version[0]) > 1 || Integer.parseInt(version[1]) > 
7) {
    +      return;
    +    }
    +
    +    for (String arg : cmd) {
    +      if (arg.startsWith("-XX:MaxPermSize=")) {
    +        return;
    +      }
    +    }
    +
    +    cmd.add("-XX:MaxPermSize=128m");
    +  }
    +
    +  void addOptionString(List<String> cmd, String options) {
    +    if (!isEmpty(options)) {
    +      for (String opt : parseOptionString(options)) {
    +        cmd.add(opt);
    +      }
    +    }
    +  }
    +
    +  /**
    +   * Builds the classpath for the application. Returns a list with one 
classpath entry per element;
    +   * each entry is formatted in the way expected by 
<i>java.net.URLClassLoader</i> (more
    +   * specifically, with trailing slashes for directories).
    +   */
    +  List<String> buildClassPath(String appClassPath) throws IOException {
    +    String sparkHome = getSparkHome();
    +    String scala = getScalaVersion();
    +
    +    List<String> cp = new ArrayList<String>();
    +    addToClassPath(cp, getenv("SPARK_CLASSPATH"));
    +    addToClassPath(cp, appClassPath);
    +
    +    addToClassPath(cp, getConfDir());
    +
    +    boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES"));
    +    boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
    +    if (prependClasses || isTesting) {
    +      List<String> projects = Arrays.asList("core", "repl", "mllib", 
"bagel", "graphx",
    +        "streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", 
"sql/hive-thriftserver",
    +        "yarn", "launcher");
    +      if (prependClasses) {
    +        System.err.println(
    +          "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled 
Spark classes ahead of " +
    +          "assembly.");
    +        for (String project : projects) {
    +          addToClassPath(cp, 
String.format("%s/%s/target/scala-%s/classes", sparkHome, project,
    +            scala));
    +        }
    +      }
    +      if (isTesting) {
    +        for (String project : projects) {
    +          addToClassPath(cp, 
String.format("%s/%s/target/scala-%s/test-classes", sparkHome,
    +            project, scala));
    +        }
    +      }
    +
    +      // Add this path to include jars that are shaded in the final 
deliverable created during
    +      // the maven build. These jars are copied to this directory during 
the build.
    +      addToClassPath(cp, String.format("%s/core/target/jars/*", 
sparkHome));
    +    }
    +
    +    String assembly = findAssembly(scala);
    +    addToClassPath(cp, assembly);
    +
    +    // When Hive support is needed, Datanucleus jars must be included on 
the classpath. Datanucleus
    +    // jars do not work if only included in the uber jar as plugin.xml 
metadata is lost. Both sbt
    +    // and maven will populate "lib_managed/jars/" with the datanucleus 
jars when Spark is built
    +    // with Hive, so first check if the datanucleus jars exist, and then 
ensure the current Spark
    +    // assembly is built for Hive, before actually populating the 
CLASSPATH with the jars.
    +    //
    +    // This block also serves as a check for SPARK-1703, when the assembly 
jar is built with
    +    // Java 7 and ends up with too many files, causing issues with other 
JDK versions.
    +    boolean needsDataNucleus = false;
    +    JarFile assemblyJar = null;
    +    try {
    +      assemblyJar = new JarFile(assembly);
    +      needsDataNucleus = 
assemblyJar.getEntry("org/apache/hadoop/hive/ql/exec/") != null;
    +    } catch (IOException ioe) {
    +      if (ioe.getMessage().indexOf("invalid CEN header") >= 0) {
    +        System.err.println(
    +          "Loading Spark jar failed.\n" +
    +          "This is likely because Spark was compiled with Java 7 and 
run\n" +
    +          "with Java 6 (see SPARK-1703). Please use Java 7 to run Spark\n" 
+
    +          "or build Spark with Java 6.");
    +        System.exit(1);
    +      } else {
    +        throw ioe;
    +      }
    +    } finally {
    +      if (assemblyJar != null) {
    +        try {
    +          assemblyJar.close();
    +        } catch (IOException e) {
    +          // Ignore.
    +        }
    +      }
    +    }
    +
    +    if (needsDataNucleus) {
    +      System.err.println("Spark assembly has been built with Hive, 
including Datanucleus jars " +
    +        "in classpath.");
    +      File libdir;
    +      if (new File(sparkHome, "RELEASE").isFile()) {
    +        libdir = new File(sparkHome, "lib");
    +      } else {
    +        libdir = new File(sparkHome, "lib_managed/jars");
    +      }
    +
    +      checkState(libdir.isDirectory(), "Library directory '%s' does not 
exist.",
    +        libdir.getAbsolutePath());
    +      for (File jar : libdir.listFiles()) {
    +        if (jar.getName().startsWith("datanucleus-")) {
    +          addToClassPath(cp, jar.getAbsolutePath());
    +        }
    +      }
    +    }
    +
    +    addToClassPath(cp, getenv("HADOOP_CONF_DIR"));
    +    addToClassPath(cp, getenv("YARN_CONF_DIR"));
    +    addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH"));
    +    return cp;
    +  }
    +
    +  /**
    +   * Adds entries to the classpath.
    +   *
    +   * @param cp List where to appended the new classpath entries.
    +   * @param entries New classpath entries (separated by 
File.pathSeparator).
    +   */
    +  private void addToClassPath(List<String> cp, String entries) {
    +    if (isEmpty(entries)) {
    +      return;
    +    }
    +    String[] split = entries.split(Pattern.quote(File.pathSeparator));
    +    for (String entry : split) {
    +      if (!isEmpty(entry)) {
    +        if (new File(entry).isDirectory() && 
!entry.endsWith(File.separator)) {
    +          entry += File.separator;
    +        }
    +        cp.add(entry);
    +      }
    +    }
    +  }
    +
    +  String getScalaVersion() {
    +    String scala = getenv("SPARK_SCALA_VERSION");
    +    if (scala != null) {
    +      return scala;
    +    }
    +
    +    String sparkHome = getSparkHome();
    +    File scala210 = new File(sparkHome, "assembly/target/scala-2.10");
    +    File scala211 = new File(sparkHome, "assembly/target/scala-2.11");
    +    if (scala210.isDirectory() && scala211.isDirectory()) {
    +      checkState(false,
    +        "Presence of build for both scala versions (2.10 and 2.11) 
detected.\n" +
    +        "Either clean one of them or set SPARK_SCALA_VERSION in your 
environment.");
    +    } else if (scala210.isDirectory()) {
    +      return "2.10";
    +    } else {
    +      checkState(scala211.isDirectory(), "Cannot find any assembly build 
directories.");
    +      return "2.11";
    +    }
    +
    +    throw new IllegalStateException("Should not reach here.");
    +  }
    +
    +  String getSparkHome() {
    +    String path = getenv(ENV_SPARK_HOME);
    +    checkState(path != null,
    +      "Spark home not found; set it explicitly or use the SPARK_HOME 
environment variable.");
    +    return path;
    +  }
    +
    +  /**
    +   * Loads the configuration file for the application, if it exists. This 
is either the
    +   * user-specified properties file, or the spark-defaults.conf file under 
the Spark configuration
    +   * directory.
    +   */
    +  Properties loadPropertiesFile() throws IOException {
    +    Properties props = new Properties();
    +    File propsFile;
    +    if (propertiesFile != null) {
    +      propsFile = new File(propertiesFile);
    +      checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", 
propertiesFile);
    +    } else {
    +      propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
    +    }
    +
    +    if (propsFile.isFile()) {
    +      FileInputStream fd = null;
    +      try {
    +        fd = new FileInputStream(propsFile);
    +        props.load(new InputStreamReader(fd, "UTF-8"));
    +      } finally {
    +        if (fd != null) {
    +          try {
    +            fd.close();
    +          } catch (IOException e) {
    +            // Ignore.
    +          }
    +        }
    +      }
    +    }
    --- End diff --
    
    
https://github.com/apache/spark/pull/3916/files#diff-25d5b90ef2767a711eebc1ccf019bf48R302
    
        if (propertiesFile != null) {
          propsFile = new File(propertiesFile);
          checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", 
propertiesFile);
        } else {
          propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
        }



---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to