Repository: spark
Updated Branches:
  refs/heads/branch-1.6 d0307deaa -> 9870e5c7a


[SPARK-12251] Document and improve off-heap memory configurations

This patch adds documentation for Spark configurations that affect off-heap 
memory and makes some naming and validation improvements for those configs.

- Change `spark.memory.offHeapSize` to `spark.memory.offHeap.size`. This is 
fine because this configuration has not shipped in any Spark release yet (it's 
new in Spark 1.6).
- Deprecated `spark.unsafe.offHeap` in favor of a new 
`spark.memory.offHeap.enabled` configuration. The motivation behind this change 
is to gather all memory-related configurations under the same prefix.
- Add a check which prevents users from setting 
`spark.memory.offHeap.enabled=true` when `spark.memory.offHeap.size == 0`. 
After SPARK-11389 (#9344), which was committed in Spark 1.6, Spark enforces a 
hard limit on the amount of off-heap memory that it will allocate to tasks. As 
a result, enabling off-heap execution memory without setting 
`spark.memory.offHeap.size` will lead to immediate OOMs. The new configuration 
validation makes this scenario easier to diagnose, helping to avoid user 
confusion.
- Document these configurations on the configuration page.

Author: Josh Rosen <[email protected]>

Closes #10237 from JoshRosen/SPARK-12251.

(cherry picked from commit 23a9e62bad9669e9ff5dc4bd714f58d12f9be0b5)
Signed-off-by: Andrew Or <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9870e5c7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9870e5c7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9870e5c7

Branch: refs/heads/branch-1.6
Commit: 9870e5c7af87190167ca3845ede918671b9420ca
Parents: d0307de
Author: Josh Rosen <[email protected]>
Authored: Thu Dec 10 15:29:04 2015 -0800
Committer: Andrew Or <[email protected]>
Committed: Thu Dec 10 15:29:13 2015 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SparkConf.scala |  4 +++-
 .../org/apache/spark/memory/MemoryManager.scala | 10 ++++++++--
 .../spark/memory/TaskMemoryManagerSuite.java    | 21 ++++++++++++++++----
 .../shuffle/sort/PackedRecordPointerSuite.java  |  6 ++++--
 .../sort/ShuffleInMemorySorterSuite.java        |  4 ++--
 .../shuffle/sort/UnsafeShuffleWriterSuite.java  |  2 +-
 .../map/AbstractBytesToBytesMapSuite.java       |  4 ++--
 .../unsafe/sort/UnsafeExternalSorterSuite.java  |  2 +-
 .../unsafe/sort/UnsafeInMemorySorterSuite.java  |  4 ++--
 .../spark/memory/StaticMemoryManagerSuite.scala |  2 +-
 .../memory/UnifiedMemoryManagerSuite.scala      |  2 +-
 docs/configuration.md                           | 16 +++++++++++++++
 .../sql/execution/joins/HashedRelation.scala    |  6 +++++-
 .../UnsafeFixedWidthAggregationMapSuite.scala   |  2 +-
 .../execution/UnsafeKVExternalSorterSuite.scala |  2 +-
 15 files changed, 65 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 19633a3..d3384fb 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -597,7 +597,9 @@ private[spark] object SparkConf extends Logging {
     "spark.streaming.fileStream.minRememberDuration" -> Seq(
       AlternateConfig("spark.streaming.minRememberDuration", "1.5")),
     "spark.yarn.max.executor.failures" -> Seq(
-      AlternateConfig("spark.yarn.max.worker.failures", "1.5"))
+      AlternateConfig("spark.yarn.max.worker.failures", "1.5")),
+    "spark.memory.offHeap.enabled" -> Seq(
+      AlternateConfig("spark.unsafe.offHeap", "1.6"))
     )
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/main/scala/org/apache/spark/memory/MemoryManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/memory/MemoryManager.scala 
b/core/src/main/scala/org/apache/spark/memory/MemoryManager.scala
index ae9e1ac..e707e27 100644
--- a/core/src/main/scala/org/apache/spark/memory/MemoryManager.scala
+++ b/core/src/main/scala/org/apache/spark/memory/MemoryManager.scala
@@ -50,7 +50,7 @@ private[spark] abstract class MemoryManager(
 
   storageMemoryPool.incrementPoolSize(storageMemory)
   onHeapExecutionMemoryPool.incrementPoolSize(onHeapExecutionMemory)
-  
offHeapExecutionMemoryPool.incrementPoolSize(conf.getSizeAsBytes("spark.memory.offHeapSize",
 0))
+  
offHeapExecutionMemoryPool.incrementPoolSize(conf.getSizeAsBytes("spark.memory.offHeap.size",
 0))
 
   /**
    * Total available memory for storage, in bytes. This amount can vary over 
time, depending on
@@ -182,7 +182,13 @@ private[spark] abstract class MemoryManager(
    * sun.misc.Unsafe.
    */
   final val tungstenMemoryMode: MemoryMode = {
-    if (conf.getBoolean("spark.unsafe.offHeap", false)) MemoryMode.OFF_HEAP 
else MemoryMode.ON_HEAP
+    if (conf.getBoolean("spark.memory.offHeap.enabled", false)) {
+      require(conf.getSizeAsBytes("spark.memory.offHeap.size", 0) > 0,
+        "spark.memory.offHeap.size must be > 0 when 
spark.memory.offHeap.enabled == true")
+      MemoryMode.OFF_HEAP
+    } else {
+      MemoryMode.ON_HEAP
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java 
b/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
index 711eed0..776a299 100644
--- a/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
+++ b/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
@@ -29,7 +29,7 @@ public class TaskMemoryManagerSuite {
   public void leakedPageMemoryIsDetected() {
     final TaskMemoryManager manager = new TaskMemoryManager(
       new StaticMemoryManager(
-        new SparkConf().set("spark.unsafe.offHeap", "false"),
+        new SparkConf().set("spark.memory.offHeap.enabled", "false"),
         Long.MAX_VALUE,
         Long.MAX_VALUE,
         1),
@@ -41,8 +41,10 @@ public class TaskMemoryManagerSuite {
 
   @Test
   public void encodePageNumberAndOffsetOffHeap() {
-    final TaskMemoryManager manager = new TaskMemoryManager(
-      new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"true")), 0);
+    final SparkConf conf = new SparkConf()
+      .set("spark.memory.offHeap.enabled", "true")
+      .set("spark.memory.offHeap.size", "1000");
+    final TaskMemoryManager manager = new TaskMemoryManager(new 
TestMemoryManager(conf), 0);
     final MemoryBlock dataPage = manager.allocatePage(256, null);
     // In off-heap mode, an offset is an absolute address that may require 
more than 51 bits to
     // encode. This test exercises that corner-case:
@@ -55,7 +57,7 @@ public class TaskMemoryManagerSuite {
   @Test
   public void encodePageNumberAndOffsetOnHeap() {
     final TaskMemoryManager manager = new TaskMemoryManager(
-      new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false")), 0);
+      new TestMemoryManager(new 
SparkConf().set("spark.memory.offHeap.enabled", "false")), 0);
     final MemoryBlock dataPage = manager.allocatePage(256, null);
     final long encodedAddress = manager.encodePageNumberAndOffset(dataPage, 
64);
     Assert.assertEquals(dataPage.getBaseObject(), 
manager.getPage(encodedAddress));
@@ -104,4 +106,15 @@ public class TaskMemoryManagerSuite {
     assert(manager.cleanUpAllAllocatedMemory() == 0);
   }
 
+  @Test
+  public void offHeapConfigurationBackwardsCompatibility() {
+    // Tests backwards-compatibility with the old `spark.unsafe.offHeap` 
configuration, which
+    // was deprecated in Spark 1.6 and replaced by 
`spark.memory.offHeap.enabled` (see SPARK-12251).
+    final SparkConf conf = new SparkConf()
+      .set("spark.unsafe.offHeap", "true")
+      .set("spark.memory.offHeap.size", "1000");
+    final TaskMemoryManager manager = new TaskMemoryManager(new 
TestMemoryManager(conf), 0);
+    assert(manager.tungstenMemoryMode == MemoryMode.OFF_HEAP);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/shuffle/sort/PackedRecordPointerSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/shuffle/sort/PackedRecordPointerSuite.java
 
b/core/src/test/java/org/apache/spark/shuffle/sort/PackedRecordPointerSuite.java
index 9a43f1f..fe5abc5 100644
--- 
a/core/src/test/java/org/apache/spark/shuffle/sort/PackedRecordPointerSuite.java
+++ 
b/core/src/test/java/org/apache/spark/shuffle/sort/PackedRecordPointerSuite.java
@@ -35,7 +35,7 @@ public class PackedRecordPointerSuite {
 
   @Test
   public void heap() throws IOException {
-    final SparkConf conf = new SparkConf().set("spark.unsafe.offHeap", 
"false");
+    final SparkConf conf = new SparkConf().set("spark.memory.offHeap.enabled", 
"false");
     final TaskMemoryManager memoryManager =
       new TaskMemoryManager(new TestMemoryManager(conf), 0);
     final MemoryBlock page0 = memoryManager.allocatePage(128, null);
@@ -54,7 +54,9 @@ public class PackedRecordPointerSuite {
 
   @Test
   public void offHeap() throws IOException {
-    final SparkConf conf = new SparkConf().set("spark.unsafe.offHeap", "true");
+    final SparkConf conf = new SparkConf()
+      .set("spark.memory.offHeap.enabled", "true")
+      .set("spark.memory.offHeap.size", "10000");
     final TaskMemoryManager memoryManager =
       new TaskMemoryManager(new TestMemoryManager(conf), 0);
     final MemoryBlock page0 = memoryManager.allocatePage(128, null);

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
 
b/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
index faa5a86..0328e63 100644
--- 
a/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
+++ 
b/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
@@ -34,7 +34,7 @@ import org.apache.spark.unsafe.memory.MemoryBlock;
 public class ShuffleInMemorySorterSuite {
 
   final TestMemoryManager memoryManager =
-    new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false"));
+    new TestMemoryManager(new SparkConf().set("spark.memory.offHeap.enabled", 
"false"));
   final TaskMemoryManager taskMemoryManager = new 
TaskMemoryManager(memoryManager, 0);
   final TestMemoryConsumer consumer = new 
TestMemoryConsumer(taskMemoryManager);
 
@@ -64,7 +64,7 @@ public class ShuffleInMemorySorterSuite {
       "Lychee",
       "Mango"
     };
-    final SparkConf conf = new SparkConf().set("spark.unsafe.offHeap", 
"false");
+    final SparkConf conf = new SparkConf().set("spark.memory.offHeap.enabled", 
"false");
     final TaskMemoryManager memoryManager =
       new TaskMemoryManager(new TestMemoryManager(conf), 0);
     final MemoryBlock dataPage = memoryManager.allocatePage(2048, null);

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
 
b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
index bc85918..5fe64bd 100644
--- 
a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
+++ 
b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
@@ -108,7 +108,7 @@ public class UnsafeShuffleWriterSuite {
     spillFilesCreated.clear();
     conf = new SparkConf()
       .set("spark.buffer.pageSize", "1m")
-      .set("spark.unsafe.offHeap", "false");
+      .set("spark.memory.offHeap.enabled", "false");
     taskMetrics = new TaskMetrics();
     memoryManager = new TestMemoryManager(conf);
     taskMemoryManager =  new TaskMemoryManager(memoryManager, 0);

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
 
b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
index d87a1d2..e4d857b 100644
--- 
a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
+++ 
b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
@@ -84,8 +84,8 @@ public abstract class AbstractBytesToBytesMapSuite {
     memoryManager =
       new TestMemoryManager(
         new SparkConf()
-          .set("spark.unsafe.offHeap", "" + useOffHeapMemoryAllocator())
-          .set("spark.memory.offHeapSize", "256mb"));
+          .set("spark.memory.offHeap.enabled", "" + 
useOffHeapMemoryAllocator())
+          .set("spark.memory.offHeap.size", "256mb"));
     taskMemoryManager = new TaskMemoryManager(memoryManager, 0);
 
     tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), 
"unsafe-test");

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
 
b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
index a1c9f6f..e0ee281 100644
--- 
a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
+++ 
b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
@@ -58,7 +58,7 @@ public class UnsafeExternalSorterSuite {
 
   final LinkedList<File> spillFilesCreated = new LinkedList<File>();
   final TestMemoryManager memoryManager =
-    new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false"));
+    new TestMemoryManager(new SparkConf().set("spark.memory.offHeap.enabled", 
"false"));
   final TaskMemoryManager taskMemoryManager = new 
TaskMemoryManager(memoryManager, 0);
   // Use integer comparison for comparing prefixes (which are partition ids, 
in this case)
   final PrefixComparator prefixComparator = new PrefixComparator() {

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
 
b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
index a203a09..93efd03 100644
--- 
a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
+++ 
b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
@@ -46,7 +46,7 @@ public class UnsafeInMemorySorterSuite {
   @Test
   public void testSortingEmptyInput() {
     final TaskMemoryManager memoryManager = new TaskMemoryManager(
-      new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false")), 0);
+      new TestMemoryManager(new 
SparkConf().set("spark.memory.offHeap.enabled", "false")), 0);
     final TestMemoryConsumer consumer = new TestMemoryConsumer(memoryManager);
     final UnsafeInMemorySorter sorter = new UnsafeInMemorySorter(consumer,
       memoryManager,
@@ -71,7 +71,7 @@ public class UnsafeInMemorySorterSuite {
       "Mango"
     };
     final TaskMemoryManager memoryManager = new TaskMemoryManager(
-      new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false")), 0);
+      new TestMemoryManager(new 
SparkConf().set("spark.memory.offHeap.enabled", "false")), 0);
     final TestMemoryConsumer consumer = new TestMemoryConsumer(memoryManager);
     final MemoryBlock dataPage = memoryManager.allocatePage(2048, null);
     final Object baseObject = dataPage.getBaseObject();

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/scala/org/apache/spark/memory/StaticMemoryManagerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/memory/StaticMemoryManagerSuite.scala 
b/core/src/test/scala/org/apache/spark/memory/StaticMemoryManagerSuite.scala
index 272253b..68cf26f 100644
--- a/core/src/test/scala/org/apache/spark/memory/StaticMemoryManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/memory/StaticMemoryManagerSuite.scala
@@ -47,7 +47,7 @@ class StaticMemoryManagerSuite extends MemoryManagerSuite {
       conf.clone
         .set("spark.memory.fraction", "1")
         .set("spark.testing.memory", maxOnHeapExecutionMemory.toString)
-        .set("spark.memory.offHeapSize", maxOffHeapExecutionMemory.toString),
+        .set("spark.memory.offHeap.size", maxOffHeapExecutionMemory.toString),
       maxOnHeapExecutionMemory = maxOnHeapExecutionMemory,
       maxStorageMemory = 0,
       numCores = 1)

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/core/src/test/scala/org/apache/spark/memory/UnifiedMemoryManagerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/memory/UnifiedMemoryManagerSuite.scala 
b/core/src/test/scala/org/apache/spark/memory/UnifiedMemoryManagerSuite.scala
index 71221de..e21a028 100644
--- 
a/core/src/test/scala/org/apache/spark/memory/UnifiedMemoryManagerSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/memory/UnifiedMemoryManagerSuite.scala
@@ -42,7 +42,7 @@ class UnifiedMemoryManagerSuite extends MemoryManagerSuite 
with PrivateMethodTes
     val conf = new SparkConf()
       .set("spark.memory.fraction", "1")
       .set("spark.testing.memory", maxOnHeapExecutionMemory.toString)
-      .set("spark.memory.offHeapSize", maxOffHeapExecutionMemory.toString)
+      .set("spark.memory.offHeap.size", maxOffHeapExecutionMemory.toString)
       .set("spark.memory.storageFraction", storageFraction.toString)
     UnifiedMemoryManager(conf, numCores = 1)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/docs/configuration.md
----------------------------------------------------------------------
diff --git a/docs/configuration.md b/docs/configuration.md
index ee20733..488dc52 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -739,6 +739,22 @@ Apart from these, the following properties are also 
available, and may be useful
   </td>
 </tr>
 <tr>
+  <td><code>spark.memory.offHeap.enabled</code></td>
+  <td>true</td>
+  <td>
+    If true, Spark will attempt to use off-heap memory for certain operations. 
If off-heap memory use is enabled, then <code>spark.memory.offHeap.size</code> 
must be positive.
+  </td>
+</tr>
+<tr>
+  <td><code>spark.memory.offHeap.size</code></td>
+  <td>0</td>
+  <td>
+    The absolute amount of memory which can be used for off-heap allocation.
+    This setting has no impact on heap memory usage, so if your executors' 
total memory consumption must fit within some hard limit then be sure to shrink 
your JVM heap size accordingly.
+    This must be set to a positive value when 
<code>spark.memory.offHeap.enabled=true</code>.
+  </td>
+</tr>
+<tr>
   <td><code>spark.memory.useLegacyMode</code></td>
   <td>false</td>
   <td>

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
index aebfea5..8c7099a 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
@@ -334,7 +334,11 @@ private[joins] final class UnsafeHashedRelation(
     // so that tests compile:
     val taskMemoryManager = new TaskMemoryManager(
       new StaticMemoryManager(
-        new SparkConf().set("spark.unsafe.offHeap", "false"), Long.MaxValue, 
Long.MaxValue, 1), 0)
+        new SparkConf().set("spark.memory.offHeap.enabled", "false"),
+        Long.MaxValue,
+        Long.MaxValue,
+        1),
+      0)
 
     val pageSizeBytes = Option(SparkEnv.get).map(_.memoryManager.pageSizeBytes)
       .getOrElse(new SparkConf().getSizeAsBytes("spark.buffer.pageSize", 
"16m"))

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMapSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMapSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMapSuite.scala
index 7ceaee3..5a84067 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMapSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMapSuite.scala
@@ -61,7 +61,7 @@ class UnsafeFixedWidthAggregationMapSuite
     }
 
     test(name) {
-      val conf = new SparkConf().set("spark.unsafe.offHeap", "false")
+      val conf = new SparkConf().set("spark.memory.offHeap.enabled", "false")
       memoryManager = new TestMemoryManager(conf)
       taskMemoryManager = new TaskMemoryManager(memoryManager, 0)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/9870e5c7/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala
index 7b80963..29027a6 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala
@@ -109,7 +109,7 @@ class UnsafeKVExternalSorterSuite extends SparkFunSuite 
with SharedSQLContext {
       pageSize: Long,
       spill: Boolean): Unit = {
     val memoryManager =
-      new TestMemoryManager(new SparkConf().set("spark.unsafe.offHeap", 
"false"))
+      new TestMemoryManager(new 
SparkConf().set("spark.memory.offHeap.enabled", "false"))
     val taskMemMgr = new TaskMemoryManager(memoryManager, 0)
     TaskContext.setTaskContext(new TaskContextImpl(
       stageId = 0,


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to