Rename IntermediateBlockId to TempBlockId
Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/08302b11 Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/08302b11 Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/08302b11 Branch: refs/heads/master Commit: 08302b113a5db773e3b8d7cfea1ab1d2b8d3695b Parents: 8bbe08b Author: Aaron Davidson <aa...@databricks.com> Authored: Tue Dec 31 17:42:30 2013 -0800 Committer: Aaron Davidson <aa...@databricks.com> Committed: Tue Dec 31 17:44:15 2013 -0800 ---------------------------------------------------------------------- core/src/main/scala/org/apache/spark/storage/BlockId.scala | 8 ++++---- .../scala/org/apache/spark/storage/DiskBlockManager.scala | 6 +++--- .../apache/spark/util/collection/ExternalAppendOnlyMap.scala | 2 +- .../spark/util/collection/SizeTrackingAppendOnlyMap.scala | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/08302b11/core/src/main/scala/org/apache/spark/storage/BlockId.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/storage/BlockId.scala b/core/src/main/scala/org/apache/spark/storage/BlockId.scala index c5dacf3..bcc3101 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockId.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockId.scala @@ -68,9 +68,9 @@ private[spark] case class StreamBlockId(streamId: Int, uniqueId: Long) extends B def name = "input-" + streamId + "-" + uniqueId } -/** Block associated with intermediate (temporary) data managed as blocks. */ -private[spark] case class IntermediateBlockId(id: String) extends BlockId { - def name = "intermediate_" + id +/** Block associated with temporary data managed as blocks. */ +private[spark] case class TempBlockId(id: String) extends BlockId { + def name = "temp_" + id } // Intended only for testing purposes @@ -85,7 +85,7 @@ private[spark] object BlockId { val BROADCAST_HELPER = "broadcast_([0-9]+)_([A-Za-z0-9]+)".r val TASKRESULT = "taskresult_([0-9]+)".r val STREAM = "input-([0-9]+)-([0-9]+)".r - val INTERMEDIATE = "intermediate_(.*)".r + val TEMP = "temp_(.*)".r val TEST = "test_(.*)".r /** Converts a BlockId "name" String back into a BlockId. */ http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/08302b11/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala index 58320f2..32da458 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala @@ -91,10 +91,10 @@ private[spark] class DiskBlockManager(shuffleManager: ShuffleBlockManager, rootD def getFile(blockId: BlockId): File = getFile(blockId.name) /** Produces a unique block id and File suitable for intermediate results. */ - def createIntermediateBlock: (IntermediateBlockId, File) = { - var blockId = new IntermediateBlockId(UUID.randomUUID().toString) + def createTempBlock(): (TempBlockId, File) = { + var blockId = new TempBlockId(UUID.randomUUID().toString) while (getFile(blockId).exists()) { - blockId = new IntermediateBlockId(UUID.randomUUID().toString) + blockId = new TempBlockId(UUID.randomUUID().toString) } (blockId, getFile(blockId)) } http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/08302b11/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 492b4fc..96f6bb3 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -138,7 +138,7 @@ private[spark] class SpillableAppendOnlyMap[K, V, G: ClassTag, C: ClassTag]( spillCount += 1 logWarning(s"In-memory KV map exceeded threshold of $memoryThresholdMB MB!") logWarning(s"Spilling to disk ($spillCount time"+(if (spillCount > 1) "s" else "")+" so far)") - val (blockId, file) = diskBlockManager.createIntermediateBlock + val (blockId, file) = diskBlockManager.createTempBlock() val writer = new DiskBlockObjectWriter(blockId, file, serializer, fileBufferSize, identity) try { val it = currentMap.destructiveSortedIterator(comparator) http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/08302b11/core/src/main/scala/org/apache/spark/util/collection/SizeTrackingAppendOnlyMap.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/util/collection/SizeTrackingAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/SizeTrackingAppendOnlyMap.scala index e6b6103..204330d 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/SizeTrackingAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/SizeTrackingAppendOnlyMap.scala @@ -96,6 +96,6 @@ private[spark] class SizeTrackingAppendOnlyMap[K, V] extends AppendOnlyMap[K, V] } } -object SizeTrackingAppendOnlyMap { +private object SizeTrackingAppendOnlyMap { case class Sample(size: Long, numUpdates: Long) }