Repository: spark
Updated Branches:
  refs/heads/branch-1.5 d97af68af -> 1a6f0af9f


[SPARK-9980] [BUILD] Fix SBT publishLocal error due to invalid characters in doc

Tiny modification to a few comments ```sbt publishLocal``` work again.

Author: Herman van Hovell <[email protected]>

Closes #8209 from hvanhovell/SPARK-9980.

(cherry picked from commit a85fb6c07fdda5c74d53d6373910dcf5db3ff111)
Signed-off-by: Sean Owen <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1a6f0af9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1a6f0af9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1a6f0af9

Branch: refs/heads/branch-1.5
Commit: 1a6f0af9f28519c4edf55225efcca772c0ae4803
Parents: d97af68
Author: Herman van Hovell <[email protected]>
Authored: Sat Aug 15 10:46:04 2015 +0100
Committer: Sean Owen <[email protected]>
Committed: Sat Aug 15 10:46:16 2015 +0100

----------------------------------------------------------------------
 .../java/org/apache/spark/unsafe/map/BytesToBytesMap.java    | 6 +++---
 .../apache/spark/examples/ml/JavaDeveloperApiExample.java    | 4 ++--
 .../examples/streaming/JavaStatefulNetworkWordCount.java     | 2 +-
 launcher/src/main/java/org/apache/spark/launcher/Main.java   | 4 ++--
 .../org/apache/spark/launcher/SparkClassCommandBuilder.java  | 2 +-
 .../main/java/org/apache/spark/launcher/SparkLauncher.java   | 6 +++---
 .../org/apache/spark/launcher/SparkSubmitCommandBuilder.java | 4 ++--
 .../org/apache/spark/launcher/SparkSubmitOptionParser.java   | 8 ++++----
 .../org/apache/spark/unsafe/memory/TaskMemoryManager.java    | 2 +-
 9 files changed, 19 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java 
b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
index 5f3a4fc..b24eed3 100644
--- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
+++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
@@ -92,9 +92,9 @@ public final class BytesToBytesMap {
 
   /**
    * The maximum number of keys that BytesToBytesMap supports. The hash table 
has to be
-   * power-of-2-sized and its backing Java array can contain at most (1 << 30) 
elements, since
-   * that's the largest power-of-2 that's less than Integer.MAX_VALUE. We need 
two long array
-   * entries per key, giving us a maximum capacity of (1 << 29).
+   * power-of-2-sized and its backing Java array can contain at most (1 
&lt;&lt; 30) elements,
+   * since that's the largest power-of-2 that's less than Integer.MAX_VALUE. 
We need two long array
+   * entries per key, giving us a maximum capacity of (1 &lt;&lt; 29).
    */
   @VisibleForTesting
   static final int MAX_CAPACITY = (1 << 29);

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java
 
b/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java
index 3f1fe90..a377694 100644
--- 
a/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java
+++ 
b/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java
@@ -124,7 +124,7 @@ class MyJavaLogisticRegression
 
   /**
    * Param for max number of iterations
-   * <p/>
+   * <p>
    * NOTE: The usual way to add a parameter to a model or algorithm is to 
include:
    * - val myParamName: ParamType
    * - def getMyParamName
@@ -222,7 +222,7 @@ class MyJavaLogisticRegressionModel
   /**
    * Create a copy of the model.
    * The copy is shallow, except for the embedded paramMap, which gets a deep 
copy.
-   * <p/>
+   * <p>
    * This is used for the defaul implementation of [[transform()]].
    *
    * In Java, we have to make this method public since Java does not 
understand Scala's protected

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java
 
b/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java
index 02f58f4..99b63a2 100644
--- 
a/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java
+++ 
b/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java
@@ -45,7 +45,7 @@ import 
org.apache.spark.streaming.api.java.JavaStreamingContext;
  * Usage: JavaStatefulNetworkWordCount <hostname> <port>
  * <hostname> and <port> describe the TCP server that Spark Streaming would 
connect to receive
  * data.
- * <p/>
+ * <p>
  * To run this on your local machine, you need to first run a Netcat server
  * `$ nc -lk 9999`
  * and then run the example

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/launcher/src/main/java/org/apache/spark/launcher/Main.java
----------------------------------------------------------------------
diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java 
b/launcher/src/main/java/org/apache/spark/launcher/Main.java
index 62492f9..a4e3acc 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/Main.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java
@@ -32,7 +32,7 @@ class Main {
 
   /**
    * Usage: Main [class] [class args]
-   * <p/>
+   * <p>
    * This CLI works in two different modes:
    * <ul>
    *   <li>"spark-submit": if <i>class</i> is 
"org.apache.spark.deploy.SparkSubmit", the
@@ -42,7 +42,7 @@ class Main {
    *
    * This class works in tandem with the "bin/spark-class" script on Unix-like 
systems, and
    * "bin/spark-class2.cmd" batch script on Windows to execute the final 
command.
-   * <p/>
+   * <p>
    * On Unix-like systems, the output is a list of command arguments, 
separated by the NULL
    * character. On Windows, the output is a command line suitable for direct 
execution from the
    * script.

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
----------------------------------------------------------------------
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
 
b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
index 5f95e2c..931a24c 100644
--- 
a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
+++ 
b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
@@ -28,7 +28,7 @@ import static org.apache.spark.launcher.CommandBuilderUtils.*;
 
 /**
  * Command builder for internal Spark classes.
- * <p/>
+ * <p>
  * This class handles building the command to launch all internal Spark 
classes except for
  * SparkSubmit (which is handled by {@link SparkSubmitCommandBuilder} class.
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
----------------------------------------------------------------------
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java 
b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
index 03c9358..5799340 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
@@ -193,7 +193,7 @@ public class SparkLauncher {
    * Adds a no-value argument to the Spark invocation. If the argument is 
known, this method
    * validates whether the argument is indeed a no-value argument, and throws 
an exception
    * otherwise.
-   * <p/>
+   * <p>
    * Use this method with caution. It is possible to create an invalid Spark 
command by passing
    * unknown arguments to this method, since those are allowed for forward 
compatibility.
    *
@@ -211,10 +211,10 @@ public class SparkLauncher {
    * Adds an argument with a value to the Spark invocation. If the argument 
name corresponds to
    * a known argument, the code validates that the argument actually expects a 
value, and throws
    * an exception otherwise.
-   * <p/>
+   * <p>
    * It is safe to add arguments modified by other methods in this class (such 
as
    * {@link #setMaster(String)} - the last invocation will be the one to take 
effect.
-   * <p/>
+   * <p>
    * Use this method with caution. It is possible to create an invalid Spark 
command by passing
    * unknown arguments to this method, since those are allowed for forward 
compatibility.
    *

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
----------------------------------------------------------------------
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
 
b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 4f354ce..fc87814 100644
--- 
a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ 
b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -25,11 +25,11 @@ import static 
org.apache.spark.launcher.CommandBuilderUtils.*;
 
 /**
  * Special command builder for handling a CLI invocation of SparkSubmit.
- * <p/>
+ * <p>
  * This builder adds command line parsing compatible with SparkSubmit. It 
handles setting
  * driver-side options and special parsing behavior needed for the 
special-casing certain internal
  * Spark applications.
- * <p/>
+ * <p>
  * This class has also some special features to aid launching pyspark.
  */
 class SparkSubmitCommandBuilder extends AbstractCommandBuilder {

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
----------------------------------------------------------------------
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java 
b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
index 5779eb3..6767cc5 100644
--- 
a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+++ 
b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
@@ -23,7 +23,7 @@ import java.util.regex.Pattern;
 
 /**
  * Parser for spark-submit command line options.
- * <p/>
+ * <p>
  * This class encapsulates the parsing code for spark-submit command line 
options, so that there
  * is a single list of options that needs to be maintained (well, sort of, but 
it makes it harder
  * to break things).
@@ -80,10 +80,10 @@ class SparkSubmitOptionParser {
    * This is the canonical list of spark-submit options. Each entry in the 
array contains the
    * different aliases for the same option; the first element of each entry is 
the "official"
    * name of the option, passed to {@link #handle(String, String)}.
-   * <p/>
+   * <p>
    * Options not listed here nor in the "switch" list below will result in a 
call to
    * {@link $#handleUnknown(String)}.
-   * <p/>
+   * <p>
    * These two arrays are visible for tests.
    */
   final String[][] opts = {
@@ -130,7 +130,7 @@ class SparkSubmitOptionParser {
 
   /**
    * Parse a list of spark-submit command line options.
-   * <p/>
+   * <p>
    * See SparkSubmitArguments.scala for a more formal description of available 
options.
    *
    * @throws IllegalArgumentException If an error is found during parsing.

http://git-wip-us.apache.org/repos/asf/spark/blob/1a6f0af9/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java
----------------------------------------------------------------------
diff --git 
a/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java 
b/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java
index ca70d7f..97b2c93 100644
--- a/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java
+++ b/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java
@@ -60,7 +60,7 @@ public class TaskMemoryManager {
 
   /**
    * Maximum supported data page size (in bytes). In principle, the maximum 
addressable page size is
-   * (1L << OFFSET_BITS) bytes, which is 2+ petabytes. However, the on-heap 
allocator's maximum page
+   * (1L &lt;&lt; OFFSET_BITS) bytes, which is 2+ petabytes. However, the 
on-heap allocator's maximum page
    * size is limited by the maximum amount of data that can be stored in a  
long[] array, which is
    * (2^32 - 1) * 8 bytes (or 16 gigabytes). Therefore, we cap this at 16 
gigabytes.
    */


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to