Repository: spark
Updated Branches:
refs/heads/master f156136da -> f73891e0b
[MINOR] Fix Java style errors and remove unused imports
## What changes were proposed in this pull request?
Fix Java style errors and remove unused imports, which are randomly found
## How was this patch tested?
Tested on my local machine.
Author: Xin Ren
Closes #14161 from keypointt/SPARK-16437.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f73891e0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f73891e0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f73891e0
Branch: refs/heads/master
Commit: f73891e0b9640e14455bdbfd999a8ff10b78a819
Parents: f156136
Author: Xin Ren
Authored: Wed Jul 13 10:47:07 2016 +0100
Committer: Sean Owen
Committed: Wed Jul 13 10:47:07 2016 +0100
--
.../org/apache/spark/unsafe/memory/HeapMemoryAllocator.java| 1 -
.../java/org/apache/spark/unsafe/memory/MemoryAllocator.java | 6 +++---
.../sql/execution/datasources/parquet/ParquetFileFormat.scala | 3 +--
.../scala/org/apache/spark/sql/sources/DataSourceTest.scala| 1 -
4 files changed, 4 insertions(+), 7 deletions(-)
--
http://git-wip-us.apache.org/repos/asf/spark/blob/f73891e0/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
--
diff --git
a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
index 3cd4264..3557482 100644
---
a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
+++
b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
@@ -24,7 +24,6 @@ import java.util.LinkedList;
import java.util.Map;
import org.apache.spark.unsafe.Platform;
-import org.apache.spark.unsafe.memory.MemoryAllocator;
/**
* A simple {@link MemoryAllocator} that can allocate up to 16GB using a JVM
long primitive array.
http://git-wip-us.apache.org/repos/asf/spark/blob/f73891e0/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
--
diff --git
a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
index 8bd2b06..7b58868 100644
---
a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
+++
b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
@@ -23,12 +23,12 @@ public interface MemoryAllocator {
* Whether to fill newly allocated and deallocated memory with 0xa5 and 0x5a
bytes respectively.
* This helps catch misuse of uninitialized or freed memory, but imposes
some overhead.
*/
- public static final boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
+ boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
System.getProperty("spark.memory.debugFill", "false"));
// Same as jemalloc's debug fill values.
- public static final byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
- public static final byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
+ byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
+ byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
/**
* Allocates a contiguous block of memory. Note that the allocated memory is
not guaranteed
http://git-wip-us.apache.org/repos/asf/spark/blob/f73891e0/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
--
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
index 76d7f5c..772e031 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
@@ -780,8 +780,7 @@ private[sql] object ParquetFileFormat extends Logging {
val assumeBinaryIsString =
sparkSession.sessionState.conf.isParquetBinaryAsString
val assumeInt96IsTimestamp =
sparkSession.sessionState.conf.isParquetINT96AsTimestamp
val writeLegacyParquetFormat =
sparkSession.sessionState.conf.writeLegacyParquetFormat
-val serializedConf =
- new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
+val serializedConf = new
SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
// !!