[SPARK-14011][CORE][SQL] Enable `LineLength` Java checkstyle rule

## What changes were proposed in this pull request?

[Spark Coding Style 
Guide](https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide)
 has 100-character limit on lines, but it's disabled for Java since 11/09/15. 
This PR enables **LineLength** checkstyle again. To help that, this also 
introduces **RedundantImport** and **RedundantModifier**, too. The following is 
the diff on `checkstyle.xml`.

```xml
-        <!-- TODO: 11/09/15 disabled - the lengths are currently > 100 in many 
places -->
-        <!--
         <module name="LineLength">
             <property name="max" value="100"/>
             <property name="ignorePattern" value="^package.*|^import.*|a 
href|href|http://|https://|ftp://"/>
         </module>
-        -->
         <module name="NoLineWrap"/>
         <module name="EmptyBlock">
             <property name="option" value="TEXT"/>
 -167,5 +164,7
         </module>
         <module name="CommentsIndentation"/>
         <module name="UnusedImports"/>
+        <module name="RedundantImport"/>
+        <module name="RedundantModifier"/>
```

## How was this patch tested?

Currently, `lint-java` is disabled in Jenkins. It needs a manual test.
After passing the Jenkins tests, `dev/lint-java` should passes locally.

Author: Dongjoon Hyun <[email protected]>

Closes #11831 from dongjoon-hyun/SPARK-14011.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/20fd2541
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/20fd2541
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/20fd2541

Branch: refs/heads/master
Commit: 20fd254101553cb5a4c932c8d03064899112bee6
Parents: e474088
Author: Dongjoon Hyun <[email protected]>
Authored: Mon Mar 21 07:58:57 2016 +0000
Committer: Sean Owen <[email protected]>
Committed: Mon Mar 21 07:58:57 2016 +0000

----------------------------------------------------------------------
 .../apache/spark/network/TransportContext.java  |   3 +-
 .../spark/network/client/StreamCallback.java    |   6 +-
 .../network/client/TransportClientFactory.java  |   2 +-
 .../apache/spark/network/protocol/Message.java  |   4 +-
 .../spark/network/protocol/RequestMessage.java  |   2 -
 .../spark/network/protocol/ResponseMessage.java |   2 -
 .../apache/spark/network/sasl/SaslMessage.java  |   4 +-
 .../network/server/OneForOneStreamManager.java  |   4 +-
 .../network/server/TransportChannelHandler.java |   4 +-
 .../org/apache/spark/network/util/ByteUnit.java |   2 +-
 .../util/SystemPropertyConfigProvider.java      |   2 -
 .../network/util/TransportFrameDecoder.java     |   2 +-
 .../network/sasl/ShuffleSecretManager.java      |   1 -
 .../shuffle/ExternalShuffleBlockHandler.java    |   3 +-
 .../shuffle/ExternalShuffleBlockResolver.java   |   4 +-
 .../network/shuffle/RetryingBlockFetcher.java   |   2 +-
 .../shuffle/protocol/BlockTransferMessage.java  |   4 +-
 .../network/sasl/SaslIntegrationSuite.java      |   3 +-
 .../ExternalShuffleBlockHandlerSuite.java       |   3 +-
 .../spark/util/sketch/Murmur3_x86_32.java       |   2 +-
 .../spark/unsafe/bitset/BitSetMethods.java      |   3 +-
 .../java/JavaSparkContextVarargsWorkaround.java |   3 +-
 .../spark/api/java/function/DoubleFunction.java |   2 +-
 .../spark/api/java/function/Function2.java      |   2 +-
 .../spark/api/java/function/Function3.java      |   2 +-
 .../spark/api/java/function/PairFunction.java   |   2 +-
 .../apache/spark/memory/TaskMemoryManager.java  |   6 +-
 .../sort/BypassMergeSortShuffleWriter.java      |   2 +-
 .../shuffle/sort/ShuffleExternalSorter.java     |   2 +-
 .../shuffle/sort/ShuffleInMemorySorter.java     |   4 +-
 .../apache/spark/shuffle/sort/SpillInfo.java    |   2 +-
 .../spark/shuffle/sort/UnsafeShuffleWriter.java |   6 +-
 .../apache/spark/status/api/v1/TaskSorting.java |   2 +-
 .../spark/unsafe/map/BytesToBytesMap.java       |   2 +-
 .../unsafe/sort/UnsafeExternalSorter.java       |   4 +-
 .../unsafe/sort/UnsafeSortDataFormat.java       |   3 +-
 .../unsafe/sort/UnsafeSorterSpillMerger.java    |   4 +-
 .../java/org/apache/spark/JavaAPISuite.java     | 464 ++++++++++---------
 .../map/AbstractBytesToBytesMapSuite.java       |   9 +-
 .../unsafe/sort/UnsafeExternalSorterSuite.java  |   6 +-
 .../unsafe/sort/UnsafeInMemorySorterSuite.java  |   4 +-
 dev/checkstyle-suppressions.xml                 |  10 +-
 dev/checkstyle.xml                              |   5 +-
 .../org/apache/spark/examples/JavaPageRank.java |  15 +-
 .../apache/spark/examples/JavaWordCount.java    |  26 +-
 .../JavaDecisionTreeClassificationExample.java  |   5 +-
 .../examples/ml/JavaDeveloperApiExample.java    |   6 +-
 ...avaGradientBoostedTreeClassifierExample.java |   3 +-
 .../JavaBinaryClassificationMetricsExample.java |   3 +-
 .../mllib/JavaIsotonicRegressionExample.java    |   6 +-
 .../mllib/JavaStreamingTestExample.java         |   1 -
 .../streaming/JavaDirectKafkaWordCount.java     |   3 +-
 .../examples/streaming/JavaQueueStream.java     |   1 -
 .../JavaRecoverableNetworkWordCount.java        |   9 +-
 .../streaming/JavaStatefulNetworkWordCount.java |   3 +-
 .../streaming/JavaKinesisWordCountASL.java      |   5 +-
 .../spark/launcher/AbstractCommandBuilder.java  |   2 +-
 .../spark/launcher/CommandBuilderUtils.java     |   2 +-
 .../apache/spark/launcher/SparkAppHandle.java   |   2 +-
 .../JavaLogisticRegressionSuite.java            |   1 -
 .../ml/classification/JavaOneVsRestSuite.java   |   3 +-
 .../ml/feature/JavaStopWordsRemoverSuite.java   |   3 +-
 .../sql/execution/UnsafeExternalRowSorter.java  |   2 +-
 .../UnsafeFixedWidthAggregationMap.java         |   4 +-
 .../sql/execution/UnsafeKVExternalSorter.java   |   2 +-
 .../sql/execution/vectorized/ColumnVector.java  |   6 +-
 .../sql/execution/vectorized/ColumnarBatch.java |  44 +-
 .../vectorized/OffHeapColumnVector.java         |  90 ++--
 .../vectorized/OnHeapColumnVector.java          |  92 ++--
 .../apache/spark/sql/JavaApplySchemaSuite.java  |  13 +-
 .../apache/spark/sql/JavaDataFrameSuite.java    |   3 +-
 .../org/apache/spark/sql/hive/test/Complex.java |   2 +-
 .../apache/spark/streaming/JavaAPISuite.java    | 113 +++--
 .../JavaStreamingListenerAPISuite.java          |   6 +-
 74 files changed, 579 insertions(+), 505 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
index 238710d..5320b28 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
@@ -43,7 +43,8 @@ import org.apache.spark.network.util.TransportFrameDecoder;
 
 /**
  * Contains the context to create a {@link TransportServer}, {@link 
TransportClientFactory}, and to
- * setup Netty Channel pipelines with a {@link 
org.apache.spark.network.server.TransportChannelHandler}.
+ * setup Netty Channel pipelines with a
+ * {@link org.apache.spark.network.server.TransportChannelHandler}.
  *
  * There are two communication protocols that the TransportClient provides, 
control-plane RPCs and
  * data-plane "chunk fetching". The handling of the RPCs is performed outside 
of the scope of the

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 29e6a30..d322aec 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,9 +21,9 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 
 /**
- * Callback for streaming data. Stream data will be offered to the {@link 
#onData(String, ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link 
#onComplete(String)} will be
- * called.
+ * Callback for streaming data. Stream data will be offered to the
+ * {@link #onData(String, ByteBuffer)} method as it arrives. Once all the 
stream data is received,
+ * {@link #onComplete(String)} will be called.
  * <p>
  * The network library guarantees that a single thread will call these methods 
at a time, but
  * different call may be made by different threads.

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
index 1008c67..f179bad 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
@@ -64,7 +64,7 @@ public class TransportClientFactory implements Closeable {
     TransportClient[] clients;
     Object[] locks;
 
-    public ClientPool(int size) {
+    ClientPool(int size) {
       clients = new TransportClient[size];
       locks = new Object[size];
       for (int i = 0; i < size; i++) {

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
index 66f5b8b..434935a 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
@@ -33,7 +33,7 @@ public interface Message extends Encodable {
   boolean isBodyInFrame();
 
   /** Preceding every serialized Message is its type, which allows us to 
deserialize it. */
-  public static enum Type implements Encodable {
+  enum Type implements Encodable {
     ChunkFetchRequest(0), ChunkFetchSuccess(1), ChunkFetchFailure(2),
     RpcRequest(3), RpcResponse(4), RpcFailure(5),
     StreamRequest(6), StreamResponse(7), StreamFailure(8),
@@ -41,7 +41,7 @@ public interface Message extends Encodable {
 
     private final byte id;
 
-    private Type(int id) {
+    Type(int id) {
       assert id < 128 : "Cannot have more than 128 message types";
       this.id = (byte) id;
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
index 31b15bb..b85171e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
@@ -17,8 +17,6 @@
 
 package org.apache.spark.network.protocol;
 
-import org.apache.spark.network.protocol.Message;
-
 /** Messages from the client to the server. */
 public interface RequestMessage extends Message {
   // token interface

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
index 6edffd1..194e6d9 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
@@ -17,8 +17,6 @@
 
 package org.apache.spark.network.protocol;
 
-import org.apache.spark.network.protocol.Message;
-
 /** Messages from the server to the client. */
 public interface ResponseMessage extends Message {
   // token interface

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
index e52b526..7331c2b 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
@@ -36,11 +36,11 @@ class SaslMessage extends AbstractMessage {
 
   public final String appId;
 
-  public SaslMessage(String appId, byte[] message) {
+  SaslMessage(String appId, byte[] message) {
     this(appId, Unpooled.wrappedBuffer(message));
   }
 
-  public SaslMessage(String appId, ByteBuf message) {
+  SaslMessage(String appId, ByteBuf message) {
     super(new NettyManagedBuffer(message), true);
     this.appId = appId;
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
index ea9e735..e2222ae 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
@@ -32,8 +32,8 @@ import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.client.TransportClient;
 
 /**
- * StreamManager which allows registration of an 
Iterator&lt;ManagedBuffer&gt;, which are individually
- * fetched as chunks by the client. Each registered buffer is one chunk.
+ * StreamManager which allows registration of an 
Iterator&lt;ManagedBuffer&gt;, which are
+ * individually fetched as chunks by the client. Each registered buffer is one 
chunk.
  */
 public class OneForOneStreamManager extends StreamManager {
   private final Logger logger = 
LoggerFactory.getLogger(OneForOneStreamManager.class);

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
index 18a9b78..f222337 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
@@ -141,8 +141,8 @@ public class TransportChannelHandler extends 
SimpleChannelInboundHandler<Message
           if (responseHandler.numOutstandingRequests() > 0) {
             String address = NettyUtils.getRemoteAddress(ctx.channel());
             logger.error("Connection to {} has been quiet for {} ms while 
there are outstanding " +
-              "requests. Assuming connection is dead; please adjust 
spark.network.timeout if this " +
-              "is wrong.", address, requestTimeoutNs / 1000 / 1000);
+              "requests. Assuming connection is dead; please adjust 
spark.network.timeout if " +
+              "this is wrong.", address, requestTimeoutNs / 1000 / 1000);
             client.timeOut();
             ctx.close();
           } else if (closeIdleConnections) {

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
index a2f0183..e097714 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
@@ -24,7 +24,7 @@ public enum ByteUnit {
   TiB ((long) Math.pow(1024L, 4L)),
   PiB ((long) Math.pow(1024L, 5L));
 
-  private ByteUnit(long multiplier) {
+  ByteUnit(long multiplier) {
     this.multiplier = multiplier;
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
index 5f20b70..f15ec8d 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
@@ -19,8 +19,6 @@ package org.apache.spark.network.util;
 
 import java.util.NoSuchElementException;
 
-import org.apache.spark.network.util.ConfigProvider;
-
 /** Uses System properties to obtain config values. */
 public class SystemPropertyConfigProvider extends ConfigProvider {
   @Override

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
index 3f7024a..bd1830e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
@@ -205,7 +205,7 @@ public class TransportFrameDecoder extends 
ChannelInboundHandlerAdapter {
     return interceptor != null;
   }
 
-  public static interface Interceptor {
+  public interface Interceptor {
 
     /**
      * Handles data received from the remote end.

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
index cdce297..268cb40 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.network.sasl;
 
-import java.lang.Override;
 import java.nio.ByteBuffer;
 import java.util.concurrent.ConcurrentHashMap;
 

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
index f22187a..f8d03b3 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
@@ -52,7 +52,8 @@ public class ExternalShuffleBlockHandler extends RpcHandler {
   final ExternalShuffleBlockResolver blockManager;
   private final OneForOneStreamManager streamManager;
 
-  public ExternalShuffleBlockHandler(TransportConf conf, File 
registeredExecutorFile) throws IOException {
+  public ExternalShuffleBlockHandler(TransportConf conf, File 
registeredExecutorFile)
+    throws IOException {
     this(new OneForOneStreamManager(),
       new ExternalShuffleBlockResolver(conf, registeredExecutorFile));
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
index 460110d..ce5c68e 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
@@ -423,7 +423,9 @@ public class ExternalShuffleBlockResolver {
     public final int major;
     public final int minor;
 
-    @JsonCreator public StoreVersion(@JsonProperty("major") int major, 
@JsonProperty("minor") int minor) {
+    @JsonCreator public StoreVersion(
+      @JsonProperty("major") int major,
+      @JsonProperty("minor") int minor) {
       this.major = major;
       this.minor = minor;
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
index 4bb0498..d81cf86 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
@@ -46,7 +46,7 @@ public class RetryingBlockFetcher {
    * Used to initiate the first fetch for all blocks, and subsequently for 
retrying the fetch on any
    * remaining blocks.
    */
-  public static interface BlockFetchStarter {
+  public interface BlockFetchStarter {
     /**
      * Creates a new BlockFetcher to fetch the given block ids which may do 
some synchronous
      * bootstrapping followed by fully asynchronous block fetching.

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockTransferMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockTransferMessage.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockTransferMessage.java
index 21c0ff4..9af6759 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockTransferMessage.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockTransferMessage.java
@@ -40,13 +40,13 @@ public abstract class BlockTransferMessage implements 
Encodable {
   protected abstract Type type();
 
   /** Preceding every serialized message is its type, which allows us to 
deserialize it. */
-  public static enum Type {
+  public enum Type {
     OPEN_BLOCKS(0), UPLOAD_BLOCK(1), REGISTER_EXECUTOR(2), STREAM_HANDLE(3), 
REGISTER_DRIVER(4),
     HEARTBEAT(5);
 
     private final byte id;
 
-    private Type(int id) {
+    Type(int id) {
       assert id < 128 : "Cannot have more than 128 message types";
       this.id = (byte) id;
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java
index 5322fcd..5bf9924 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/sasl/SaslIntegrationSuite.java
@@ -212,7 +212,8 @@ public class SaslIntegrationSuite {
       };
 
       String[] blockIds = { "shuffle_2_3_4", "shuffle_6_7_8" };
-      OneForOneBlockFetcher fetcher = new OneForOneBlockFetcher(client1, 
"app-2", "0", blockIds, listener);
+      OneForOneBlockFetcher fetcher =
+          new OneForOneBlockFetcher(client1, "app-2", "0", blockIds, listener);
       fetcher.start();
       blockFetchLatch.await();
       checkSecurityException(exception.get());

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
index 9379412..c2e0b74 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
@@ -113,7 +113,8 @@ public class ExternalShuffleBlockHandlerSuite {
       // pass
     }
 
-    ByteBuffer unexpectedMsg = new UploadBlock("a", "e", "b", new byte[1], new 
byte[2]).toByteBuffer();
+    ByteBuffer unexpectedMsg = new UploadBlock("a", "e", "b", new byte[1],
+      new byte[2]).toByteBuffer();
     try {
       handler.receive(client, unexpectedMsg, callback);
       fail("Should have thrown");

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/sketch/src/main/java/org/apache/spark/util/sketch/Murmur3_x86_32.java
----------------------------------------------------------------------
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/Murmur3_x86_32.java 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/Murmur3_x86_32.java
index 3d1f28b..a61ce4f 100644
--- 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/Murmur3_x86_32.java
+++ 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/Murmur3_x86_32.java
@@ -28,7 +28,7 @@ final class Murmur3_x86_32 {
 
   private final int seed;
 
-  public Murmur3_x86_32(int seed) {
+  Murmur3_x86_32(int seed) {
     this.seed = seed;
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/common/unsafe/src/main/java/org/apache/spark/unsafe/bitset/BitSetMethods.java
----------------------------------------------------------------------
diff --git 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/bitset/BitSetMethods.java 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/bitset/BitSetMethods.java
index 7857bf6..c8c5738 100644
--- 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/bitset/BitSetMethods.java
+++ 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/bitset/BitSetMethods.java
@@ -87,7 +87,8 @@ public final class BitSetMethods {
    * To iterate over the true bits in a BitSet, use the following loop:
    * <pre>
    * <code>
-   *  for (long i = bs.nextSetBit(0, sizeInWords); i &gt;= 0; i = 
bs.nextSetBit(i + 1, sizeInWords)) {
+   *  for (long i = bs.nextSetBit(0, sizeInWords); i &gt;= 0;
+   *    i = bs.nextSetBit(i + 1, sizeInWords)) {
    *    // operate on index i here
    *  }
    * </code>

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
 
b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
index d4c42b3..0dd8faf 100644
--- 
a/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
+++ 
b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
@@ -62,5 +62,6 @@ abstract class JavaSparkContextVarargsWorkaround {
   // These methods take separate "first" and "rest" elements to avoid having 
the same type erasure
   public abstract <T> JavaRDD<T> union(JavaRDD<T> first, List<JavaRDD<T>> 
rest);
   public abstract JavaDoubleRDD union(JavaDoubleRDD first, List<JavaDoubleRDD> 
rest);
-  public abstract <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V> first, 
List<JavaPairRDD<K, V>> rest);
+  public abstract <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V> first, 
List<JavaPairRDD<K, V>>
+    rest);
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/api/java/function/DoubleFunction.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/api/java/function/DoubleFunction.java 
b/core/src/main/java/org/apache/spark/api/java/function/DoubleFunction.java
index 150144e..bf16f79 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/DoubleFunction.java
@@ -23,5 +23,5 @@ import java.io.Serializable;
  *  A function that returns Doubles, and can be used to construct DoubleRDDs.
  */
 public interface DoubleFunction<T> extends Serializable {
-  public double call(T t) throws Exception;
+  double call(T t) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/api/java/function/Function2.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/api/java/function/Function2.java 
b/core/src/main/java/org/apache/spark/api/java/function/Function2.java
index 793caaa..a975ce3 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/Function2.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/Function2.java
@@ -23,5 +23,5 @@ import java.io.Serializable;
  * A two-argument function that takes arguments of type T1 and T2 and returns 
an R.
  */
 public interface Function2<T1, T2, R> extends Serializable {
-  public R call(T1 v1, T2 v2) throws Exception;
+  R call(T1 v1, T2 v2) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/api/java/function/Function3.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/api/java/function/Function3.java 
b/core/src/main/java/org/apache/spark/api/java/function/Function3.java
index b4151c3..6eecfb6 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/Function3.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/Function3.java
@@ -23,5 +23,5 @@ import java.io.Serializable;
  * A three-argument function that takes arguments of type T1, T2 and T3 and 
returns an R.
  */
 public interface Function3<T1, T2, T3, R> extends Serializable {
-  public R call(T1 v1, T2 v2, T3 v3) throws Exception;
+  R call(T1 v1, T2 v2, T3 v3) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/api/java/function/PairFunction.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/api/java/function/PairFunction.java 
b/core/src/main/java/org/apache/spark/api/java/function/PairFunction.java
index 99bf240..2fdfa71 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/PairFunction.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/PairFunction.java
@@ -26,5 +26,5 @@ import scala.Tuple2;
  * construct PairRDDs.
  */
 public interface PairFunction<T, K, V> extends Serializable {
-  public Tuple2<K, V> call(T t) throws Exception;
+  Tuple2<K, V> call(T t) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java 
b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
index 8757dff..18612dd 100644
--- a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
+++ b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
@@ -67,9 +67,9 @@ public class TaskMemoryManager {
 
   /**
    * Maximum supported data page size (in bytes). In principle, the maximum 
addressable page size is
-   * (1L &lt;&lt; OFFSET_BITS) bytes, which is 2+ petabytes. However, the 
on-heap allocator's maximum page
-   * size is limited by the maximum amount of data that can be stored in a  
long[] array, which is
-   * (2^32 - 1) * 8 bytes (or 16 gigabytes). Therefore, we cap this at 16 
gigabytes.
+   * (1L &lt;&lt; OFFSET_BITS) bytes, which is 2+ petabytes. However, the 
on-heap allocator's
+   * maximum page size is limited by the maximum amount of data that can be 
stored in a long[]
+   * array, which is (2^32 - 1) * 8 bytes (or 16 gigabytes). Therefore, we cap 
this at 16 gigabytes.
    */
   public static final long MAXIMUM_PAGE_SIZE_BYTES = ((1L << 31) - 1) * 8L;
 

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
 
b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
index 052be54..7a60c3e 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
@@ -98,7 +98,7 @@ final class BypassMergeSortShuffleWriter<K, V> extends 
ShuffleWriter<K, V> {
    */
   private boolean stopping = false;
 
-  public BypassMergeSortShuffleWriter(
+  BypassMergeSortShuffleWriter(
       BlockManager blockManager,
       IndexShuffleBlockResolver shuffleBlockResolver,
       BypassMergeSortShuffleHandle<K, V> handle,

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
index c7d89e6..81ee7ab 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
@@ -96,7 +96,7 @@ final class ShuffleExternalSorter extends MemoryConsumer {
   @Nullable private MemoryBlock currentPage = null;
   private long pageCursor = -1;
 
-  public ShuffleExternalSorter(
+  ShuffleExternalSorter(
       TaskMemoryManager memoryManager,
       BlockManager blockManager,
       TaskContext taskContext,

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorter.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorter.java
index 2381cff..fe79ff0 100644
--- 
a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorter.java
+++ 
b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorter.java
@@ -51,7 +51,7 @@ final class ShuffleInMemorySorter {
    */
   private int pos = 0;
 
-  public ShuffleInMemorySorter(MemoryConsumer consumer, int initialSize) {
+  ShuffleInMemorySorter(MemoryConsumer consumer, int initialSize) {
     this.consumer = consumer;
     assert (initialSize > 0);
     this.array = consumer.allocateArray(initialSize);
@@ -122,7 +122,7 @@ final class ShuffleInMemorySorter {
     final PackedRecordPointer packedRecordPointer = new PackedRecordPointer();
     private int position = 0;
 
-    public ShuffleSorterIterator(int numRecords, LongArray pointerArray) {
+    ShuffleSorterIterator(int numRecords, LongArray pointerArray) {
       this.numRecords = numRecords;
       this.pointerArray = pointerArray;
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java
index df9f7b7..865def6 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/SpillInfo.java
@@ -29,7 +29,7 @@ final class SpillInfo {
   final File file;
   final TempShuffleBlockId blockId;
 
-  public SpillInfo(int numPartitions, File file, TempShuffleBlockId blockId) {
+  SpillInfo(int numPartitions, File file, TempShuffleBlockId blockId) {
     this.partitionLengths = new long[numPartitions];
     this.file = file;
     this.blockId = blockId;

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java 
b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
index cd06ce9..0c5fb88 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
@@ -45,7 +45,6 @@ import org.apache.spark.network.util.LimitedInputStream;
 import org.apache.spark.scheduler.MapStatus;
 import org.apache.spark.scheduler.MapStatus$;
 import org.apache.spark.serializer.SerializationStream;
-import org.apache.spark.serializer.Serializer;
 import org.apache.spark.serializer.SerializerInstance;
 import org.apache.spark.shuffle.IndexShuffleBlockResolver;
 import org.apache.spark.shuffle.ShuffleWriter;
@@ -82,7 +81,7 @@ public class UnsafeShuffleWriter<K, V> extends 
ShuffleWriter<K, V> {
 
   /** Subclass of ByteArrayOutputStream that exposes `buf` directly. */
   private static final class MyByteArrayOutputStream extends 
ByteArrayOutputStream {
-    public MyByteArrayOutputStream(int size) { super(size); }
+    MyByteArrayOutputStream(int size) { super(size); }
     public byte[] getBuf() { return buf; }
   }
 
@@ -108,7 +107,8 @@ public class UnsafeShuffleWriter<K, V> extends 
ShuffleWriter<K, V> {
     if (numPartitions > 
SortShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE()) {
       throw new IllegalArgumentException(
         "UnsafeShuffleWriter can only be used for shuffles with at most " +
-          
SortShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE() + " 
reduce partitions");
+        SortShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE() 
+
+        " reduce partitions");
     }
     this.blockManager = blockManager;
     this.shuffleBlockResolver = shuffleBlockResolver;

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java 
b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
index 0cf84d5..9307eb9 100644
--- a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
+++ b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
@@ -28,7 +28,7 @@ public enum TaskSorting {
   DECREASING_RUNTIME("-runtime");
 
   private final Set<String> alternateNames;
-  private TaskSorting(String... names) {
+  TaskSorting(String... names) {
     alternateNames = new HashSet<>();
     for (String n: names) {
       alternateNames.add(n);

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java 
b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
index b55a322..de36814 100644
--- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
+++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
@@ -689,7 +689,7 @@ public final class BytesToBytesMap extends MemoryConsumer {
       offset += keyLength;
       Platform.copyMemory(valueBase, valueOffset, base, offset, valueLength);
 
-      // --- Update bookkeeping data structures 
-----------------------------------------------------
+      // --- Update bookkeeping data structures 
----------------------------------------------------
       offset = currentPage.getBaseOffset();
       Platform.putInt(base, offset, Platform.getInt(base, offset) + 1);
       pageCursor += recordLength;

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index db8e7a7..927b19c 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -432,7 +432,7 @@ public final class UnsafeExternalSorter extends 
MemoryConsumer {
     private boolean loaded = false;
     private int numRecords = 0;
 
-    public SpillableIterator(UnsafeInMemorySorter.SortedIterator 
inMemIterator) {
+    SpillableIterator(UnsafeInMemorySorter.SortedIterator inMemIterator) {
       this.upstream = inMemIterator;
       this.numRecords = inMemIterator.getNumRecords();
     }
@@ -567,7 +567,7 @@ public final class UnsafeExternalSorter extends 
MemoryConsumer {
     private UnsafeSorterIterator current;
     private int numRecords;
 
-    public ChainedIterator(Queue<UnsafeSorterIterator> iterators) {
+    ChainedIterator(Queue<UnsafeSorterIterator> iterators) {
       assert iterators.size() > 0;
       this.numRecords = 0;
       for (UnsafeSorterIterator iter: iterators) {

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSortDataFormat.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSortDataFormat.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSortDataFormat.java
index d3137f5..12fb62f 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSortDataFormat.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSortDataFormat.java
@@ -47,7 +47,8 @@ final class UnsafeSortDataFormat extends 
SortDataFormat<RecordPointerAndKeyPrefi
   }
 
   @Override
-  public RecordPointerAndKeyPrefix getKey(LongArray data, int pos, 
RecordPointerAndKeyPrefix reuse) {
+  public RecordPointerAndKeyPrefix getKey(LongArray data, int pos,
+                                          RecordPointerAndKeyPrefix reuse) {
     reuse.recordPointer = data.get(pos * 2);
     reuse.keyPrefix = data.get(pos * 2 + 1);
     return reuse;

http://git-wip-us.apache.org/repos/asf/spark/blob/20fd2541/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java
index ceb5935..2b1c860 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java
@@ -26,7 +26,7 @@ final class UnsafeSorterSpillMerger {
   private int numRecords = 0;
   private final PriorityQueue<UnsafeSorterIterator> priorityQueue;
 
-  public UnsafeSorterSpillMerger(
+  UnsafeSorterSpillMerger(
       final RecordComparator recordComparator,
       final PrefixComparator prefixComparator,
       final int numSpills) {
@@ -57,7 +57,7 @@ final class UnsafeSorterSpillMerger {
       // make sure the hasNext method of UnsafeSorterIterator returned by 
getSortedIterator
       // does not return wrong result because hasNext will returns true
       // at least priorityQueue.size() times. If we allow n spillReaders in the
-      // priorityQueue, we will have n extra empty records in the result of 
the UnsafeSorterIterator.
+      // priorityQueue, we will have n extra empty records in the result of 
UnsafeSorterIterator.
       spillReader.loadNext();
       priorityQueue.add(spillReader);
       numRecords += spillReader.getNumRecords();


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to