Repository: spark
Updated Branches:
  refs/heads/master db81b9d89 -> e84815dc3


[SPARK-7733] [CORE] [BUILD] Update build, code to use Java 7 for 1.5.0+

Update build to use Java 7, and remove some comments and special-case support 
for Java 6.

Author: Sean Owen <so...@cloudera.com>

Closes #6265 from srowen/SPARK-7733 and squashes the following commits:

59bda4e [Sean Owen] Update build to use Java 7, and remove some comments and 
special-case support for Java 6


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e84815dc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e84815dc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e84815dc

Branch: refs/heads/master
Commit: e84815dc333a69368a48e0152f02934980768a14
Parents: db81b9d
Author: Sean Owen <so...@cloudera.com>
Authored: Sun Jun 7 20:18:13 2015 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Sun Jun 7 20:18:13 2015 +0100

----------------------------------------------------------------------
 bin/spark-class                                   | 18 ------------------
 .../apache/spark/util/MutableURLClassLoader.scala |  4 ++--
 .../main/scala/org/apache/spark/util/Utils.scala  |  3 +--
 .../spark/util/collection/SorterSuite.scala       |  3 ---
 docs/building-spark.md                            |  6 +-----
 docs/index.md                                     |  2 +-
 docs/programming-guide.md                         |  2 +-
 make-distribution.sh                              | 16 ----------------
 pom.xml                                           |  2 +-
 .../apache/spark/unsafe/PlatformDependent.java    |  3 +--
 10 files changed, 8 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/bin/spark-class
----------------------------------------------------------------------
diff --git a/bin/spark-class b/bin/spark-class
index 7bb1afe..2b59e5d 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -58,24 +58,6 @@ fi
 
 SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
 
-# Verify that versions of java used to build the jars and run Spark are 
compatible
-if [ -n "$JAVA_HOME" ]; then
-  JAR_CMD="$JAVA_HOME/bin/jar"
-else
-  JAR_CMD="jar"
-fi
-
-if [ $(command -v "$JAR_CMD") ] ; then
-  jar_error_check=$("$JAR_CMD" -tf "$SPARK_ASSEMBLY_JAR" 
nonexistent/class/path 2>&1)
-  if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
-    echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
-    echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
-    echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
-    echo "or build Spark with Java 6." 1>&2
-    exit 1
-  fi
-fi
-
 LAUNCH_CLASSPATH="$SPARK_ASSEMBLY_JAR"
 
 # Add the launcher build dir to the classpath if requested.

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/core/src/main/scala/org/apache/spark/util/MutableURLClassLoader.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/util/MutableURLClassLoader.scala 
b/core/src/main/scala/org/apache/spark/util/MutableURLClassLoader.scala
index 1e0ba5c..169489d 100644
--- a/core/src/main/scala/org/apache/spark/util/MutableURLClassLoader.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutableURLClassLoader.scala
@@ -52,8 +52,8 @@ private[spark] class ChildFirstURLClassLoader(urls: 
Array[URL], parent: ClassLoa
    * Used to implement fine-grained class loading locks similar to what is 
done by Java 7. This
    * prevents deadlock issues when using non-hierarchical class loaders.
    *
-   * Note that due to Java 6 compatibility (and some issues with implementing 
class loaders in
-   * Scala), Java 7's `ClassLoader.registerAsParallelCapable` method is not 
called.
+   * Note that due to some issues with implementing class loaders in
+   * Scala, Java 7's `ClassLoader.registerAsParallelCapable` method is not 
called.
    */
   private val locks = new ConcurrentHashMap[String, Object]()
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 5f13241..153ece6 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1295,8 +1295,7 @@ private[spark] object Utils extends Logging {
       } catch {
         case t: Throwable =>
           if (originalThrowable != null) {
-            // We could do originalThrowable.addSuppressed(t), but it's
-            // not available in JDK 1.6.
+            originalThrowable.addSuppressed(t)
             logWarning(s"Suppressing exception in finally: " + t.getMessage, t)
             throw originalThrowable
           } else {

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala 
b/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala
index 72fd6da..b2f5d90 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala
@@ -103,9 +103,6 @@ class SorterSuite extends SparkFunSuite {
    * has the keys and values alternating. The basic Java sorts work only on 
the keys, so the
    * real Java solution is to make Tuple2s to store the keys and values and 
sort an array of
    * those, while the Sorter approach can work directly on the input data 
format.
-   *
-   * Note that the Java implementation varies tremendously between Java 6 and 
Java 7, when
-   * the Java sort changed from merge sort to TimSort.
    */
   ignore("Sorter benchmark for key-value pairs") {
     val numElements = 25000000 // 25 mil

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/docs/building-spark.md
----------------------------------------------------------------------
diff --git a/docs/building-spark.md b/docs/building-spark.md
index 78cb908..2128fdf 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -7,11 +7,7 @@ redirect_from: "building-with-maven.html"
 * This will become a table of contents (this text will be scraped).
 {:toc}
 
-Building Spark using Maven requires Maven 3.0.4 or newer and Java 6+.
-
-**Note:** Building Spark with Java 7 or later can create JAR files that may 
not be
-readable with early versions of Java 6, due to the large number of files in 
the JAR
-archive. Build with Java 6 if this is an issue for your deployment.
+Building Spark using Maven requires Maven 3.0.4 or newer and Java 7+.
 
 # Building with `build/mvn`
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/docs/index.md
----------------------------------------------------------------------
diff --git a/docs/index.md b/docs/index.md
index fac071d..7939657 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -20,7 +20,7 @@ Spark runs on both Windows and UNIX-like systems (e.g. Linux, 
Mac OS). It's easy
 locally on one machine --- all you need is to have `java` installed on your 
system `PATH`,
 or the `JAVA_HOME` environment variable pointing to a Java installation.
 
-Spark runs on Java 6+, Python 2.6+ and R 3.1+. For the Scala API, Spark 
{{site.SPARK_VERSION}} uses
+Spark runs on Java 7+, Python 2.6+ and R 3.1+. For the Scala API, Spark 
{{site.SPARK_VERSION}} uses
 Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala 
version 
 ({{site.SCALA_BINARY_VERSION}}.x).
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/docs/programming-guide.md
----------------------------------------------------------------------
diff --git a/docs/programming-guide.md b/docs/programming-guide.md
index 10f474f..d5ff416 100644
--- a/docs/programming-guide.md
+++ b/docs/programming-guide.md
@@ -54,7 +54,7 @@ import org.apache.spark.SparkConf
 
 <div data-lang="java"  markdown="1">
 
-Spark {{site.SPARK_VERSION}} works with Java 6 and higher. If you are using 
Java 8, Spark supports
+Spark {{site.SPARK_VERSION}} works with Java 7 and higher. If you are using 
Java 8, Spark supports
 [lambda 
expressions](http://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html)
 for concisely writing functions, otherwise you can use the classes in the
 
[org.apache.spark.api.java.function](api/java/index.html?org/apache/spark/api/java/function/package-summary.html)
 package.

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/make-distribution.sh
----------------------------------------------------------------------
diff --git a/make-distribution.sh b/make-distribution.sh
index a2b0c43..9f063da 100755
--- a/make-distribution.sh
+++ b/make-distribution.sh
@@ -141,22 +141,6 @@ SPARK_HIVE=$("$MVN" help:evaluate 
-Dexpression=project.activeProfiles -pl sql/hi
     # because we use "set -o pipefail"
     echo -n)
 
-JAVA_CMD="$JAVA_HOME"/bin/java
-JAVA_VERSION=$("$JAVA_CMD" -version 2>&1)
-if [[ ! "$JAVA_VERSION" =~ "1.6" && -z "$SKIP_JAVA_TEST" ]]; then
-  echo "***NOTE***: JAVA_HOME is not set to a JDK 6 installation. The 
resulting"
-  echo "            distribution may not work well with PySpark and will not 
run"
-  echo "            with Java 6 (See SPARK-1703 and SPARK-1911)."
-  echo "            This test can be disabled by adding --skip-java-test."
-  echo "Output from 'java -version' was:"
-  echo "$JAVA_VERSION"
-  read -p "Would you like to continue anyways? [y,n]: " -r
-  if [[ ! "$REPLY" =~ ^[Yy]$ ]]; then
-    echo "Okay, exiting."
-    exit 1
-  fi
-fi
-
 if [ "$NAME" == "none" ]; then
   NAME=$SPARK_HADOOP_VERSION
 fi

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e65448e..67b6375 100644
--- a/pom.xml
+++ b/pom.xml
@@ -116,7 +116,7 @@
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
     <akka.group>com.typesafe.akka</akka.group>
     <akka.version>2.3.11</akka.version>
-    <java.version>1.6</java.version>
+    <java.version>1.7</java.version>
     <sbt.project.name>spark</sbt.project.name>
     <mesos.version>0.21.1</mesos.version>
     <mesos.classifier>shaded-protobuf</mesos.classifier>

http://git-wip-us.apache.org/repos/asf/spark/blob/e84815dc/unsafe/src/main/java/org/apache/spark/unsafe/PlatformDependent.java
----------------------------------------------------------------------
diff --git 
a/unsafe/src/main/java/org/apache/spark/unsafe/PlatformDependent.java 
b/unsafe/src/main/java/org/apache/spark/unsafe/PlatformDependent.java
index 24b2892..192c671 100644
--- a/unsafe/src/main/java/org/apache/spark/unsafe/PlatformDependent.java
+++ b/unsafe/src/main/java/org/apache/spark/unsafe/PlatformDependent.java
@@ -25,8 +25,7 @@ public final class PlatformDependent {
 
   /**
    * Facade in front of {@link sun.misc.Unsafe}, used to avoid directly 
exposing Unsafe outside of
-   * this package. This also lets us aovid accidental use of deprecated 
methods or methods that
-   * aren't present in Java 6.
+   * this package. This also lets us avoid accidental use of deprecated 
methods.
    */
   public static final class UNSAFE {
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to