Github user rxin commented on a diff in the pull request:
https://github.com/apache/spark/pull/1241#discussion_r16759871
--- Diff:
core/src/main/scala/org/apache/spark/shuffle/IndexShuffleBlockManager.scala ---
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.shuffle
+
+import java.io._
+import java.nio.ByteBuffer
+
+import org.apache.spark.{SparkEnv, SparkConf}
+import org.apache.spark.network.netty.PathResolver
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.storage._
+import org.apache.spark.storage.ShuffleBlockId
+
+private[spark]
+class IndexShuffleBlockManager(conf: SparkConf)
+ extends ShuffleBlockManager with PathResolver {
+
+ private lazy val blockManager = SparkEnv.get.blockManager
+
+ // Mapping to a single shuffleBlockId with reduce ID 0 that we'll used
to write all results to.
+ private def consolidatedId(blockId: ShuffleBlockId): ShuffleBlockId = {
+ blockId.copy(reduceId = 0)
+ }
+
+ def getDataFile(blockId: ShuffleBlockId): File = {
+ blockManager.diskBlockManager.getFile(consolidatedId(blockId))
+ }
+
+ private def getIndexFile(blockId: ShuffleBlockId): File = {
+ blockManager.diskBlockManager.getFile(consolidatedId(blockId).name +
".index")
+ }
+
+ // Remove data file and index file that contain the output data from one
map.
+ // Only need to be called once for all shuffleBlockId belong to one map
output.
+ def removeDataByMap(blockId: ShuffleBlockId): Unit = {
+ var file = getDataFile(blockId)
+ if (file.exists) {
+ file.delete
+ }
+
+ file = getIndexFile(blockId)
+ if (file.exists) {
+ file.delete
+ }
+ }
+
+ // Write an index file with the offsets of each block, plus a final
offset at the end for the
+ // end of the output file. This will be used by getBlockLocation to
figure out where each block
+ // begins and ends.
+ def writeIndexFile(blockId: ShuffleBlockId, offsets: Array[Long]) = {
+ val indexFile = getIndexFile(blockId)
+ val out = new DataOutputStream(new BufferedOutputStream(new
FileOutputStream(indexFile)))
+ try {
+ for(offset <- offsets) {
--- End diff --
space after for
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]