Author: jukka
Date: Tue Nov 26 17:32:02 2013
New Revision: 1545745

URL: http://svn.apache.org/r1545745
Log:
OAK-593: Segment-based MK

Use different UUID variants to distinguish between data and bulk segments.
Add summary output to "oak-run tarmk".

Added:
    
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentIdFactory.java
Modified:
    
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
    
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
    
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
    
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/TarFile.java
    
jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/RecordTest.java
    
jackrabbit/oak/trunk/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
    
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java

Modified: 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
 Tue Nov 26 17:32:02 2013
@@ -20,12 +20,16 @@ import static com.google.common.base.Obj
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkPositionIndexes;
 import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.Lists.newArrayListWithCapacity;
+import static java.util.Collections.emptyList;
+import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory.isDataSegmentId;
 import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.BLOCK_SIZE;
 
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
+import java.util.List;
 import java.util.UUID;
 import java.util.concurrent.Callable;
 
@@ -92,7 +96,7 @@ public class Segment {
             new Weigher<UUID, Segment>() {
                 @Override
                 public int weigh(UUID key, Segment value) {
-                    return value.data.remaining();
+                    return value.size();
                 }
             };
 
@@ -109,15 +113,12 @@ public class Segment {
         this.uuid = checkNotNull(uuid);
         this.data = checkNotNull(data);
 
-        if (data.capacity() > 0
-                && data.capacity() < Segment.MAX_SEGMENT_SIZE) {
-            // so skip the header parts of a normal non-bulk, non-empty segment
-            int roots = data.getShort(data.position() + 1) & 0xffff;
-            int headerSize = 3 + roots * 3;
-            this.refposition = data.position() + align(headerSize);
-        } else {
-            this.refposition = data.position();
+        int refpos = data.position();
+        if (isDataSegmentId(uuid)) {
+            int roots = data.getShort(refpos + 1) & 0xffff;
+            refpos += align(3 + roots * 3);
         }
+        this.refposition = refpos;
     }
 
     /**
@@ -140,6 +141,25 @@ public class Segment {
         return uuid;
     }
 
+    public List<UUID> getReferencedIds() {
+        if (isDataSegmentId(uuid)) {
+            int refcount = data.get(data.position()) & 0xff;
+            List<UUID> refs = newArrayListWithCapacity(refcount);
+            for (int i = 0; i < refcount; i++) {
+                refs.add(new UUID(
+                        data.getLong(refposition + i * 16),
+                        data.getLong(refposition + i * 16 + 8)));
+            }
+            return refs;
+        } else {
+            return emptyList();
+        }
+    }
+
+    public int size() {
+        return data.remaining();
+    }
+
     byte readByte(int offset) {
         return data.get(pos(offset, 1));
     }

Added: 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentIdFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentIdFactory.java?rev=1545745&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentIdFactory.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentIdFactory.java
 Tue Nov 26 17:32:02 2013
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.segment;
+
+import java.security.SecureRandom;
+import java.util.UUID;
+
+public class SegmentIdFactory {
+
+    private static final SecureRandom random = new SecureRandom();
+
+    private static final long MSB_MASK = ~(0xfL << 12);
+
+    private static final long VERSION = (0x4L << 12);
+
+    private static final long LSB_MASK = ~(0xfL << 60);
+
+    private static final long DATA = 0xAL << 60;
+
+    private static final long BULK = 0xBL << 60;
+
+    private static UUID newSegmentId(long type) {
+        long msb = (random.nextLong() & MSB_MASK) | VERSION;
+        long lsb = (random.nextLong() & LSB_MASK) | type;
+        return new UUID(msb, lsb);
+    }
+
+    static UUID newDataSegmentId() {
+        return newSegmentId(DATA);
+    }
+
+    static UUID newBulkSegmentId() {
+        return newSegmentId(BULK);
+    }
+
+    public static boolean isDataSegmentId(UUID id) {
+        return (id.getLeastSignificantBits() & ~LSB_MASK) == DATA; 
+    }
+
+    public static boolean isBulkSegmentId(UUID id) {
+        return (id.getLeastSignificantBits() & ~LSB_MASK) == BULK; 
+    }
+
+}

Modified: 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
 Tue Nov 26 17:32:02 2013
@@ -33,6 +33,8 @@ import static org.apache.jackrabbit.oak.
 import static 
org.apache.jackrabbit.oak.plugins.segment.MapRecord.BUCKETS_PER_LEVEL;
 import static 
org.apache.jackrabbit.oak.plugins.segment.Segment.MAX_SEGMENT_SIZE;
 import static org.apache.jackrabbit.oak.plugins.segment.Segment.align;
+import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory.newBulkSegmentId;
+import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory.newDataSegmentId;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -96,7 +98,7 @@ public class SegmentWriter {
         }
     };
 
-    private UUID uuid = UUID.randomUUID();
+    private UUID uuid = newDataSegmentId();
 
     /**
      * Insertion-ordered map from the UUIDs of referenced segments to the
@@ -136,7 +138,7 @@ public class SegmentWriter {
     public SegmentWriter(SegmentStore store) {
         this.store = store;
         this.dummySegment =
-                new Segment(store, UUID.randomUUID(), ByteBuffer.allocate(0));
+                new Segment(store, newBulkSegmentId(), ByteBuffer.allocate(0));
     }
 
     private void writeSegmentHeader(ByteBuffer b) {
@@ -194,7 +196,7 @@ public class SegmentWriter {
 
             store.writeSegment(uuid, buffer, buffer.length - length, length);
 
-            uuid = UUID.randomUUID();
+            uuid = newDataSegmentId();
             refids.clear();
             roots.clear();
             length = 0;
@@ -581,7 +583,7 @@ public class SegmentWriter {
 
     private RecordId internalWriteStream(InputStream stream)
             throws IOException {
-        byte[] data = new byte[Segment.MAX_SEGMENT_SIZE];
+        byte[] data = new byte[MAX_SEGMENT_SIZE];
         int n = ByteStreams.read(stream, data, 0, data.length);
 
         // Special case for short binaries (up to about 16kB):
@@ -607,29 +609,20 @@ public class SegmentWriter {
         long length = n;
         List<RecordId> blockIds = newArrayListWithExpectedSize(n / 4096);
 
-        // Write full bulk segments
-        while (n == buffer.length) {
-            UUID id = UUID.randomUUID();
-            store.writeSegment(id, data, 0, data.length);
+        // Write the data to bulk segments and collect the list of block ids
+        while (n != 0) {
+            UUID id = newBulkSegmentId();
+            int len = align(n);
+            store.writeSegment(id, data, 0, len);
 
-            for (int i = 0; i < data.length; i += BLOCK_SIZE) {
-                blockIds.add(new RecordId(id, i));
+            for (int i = 0; i < n; i += BLOCK_SIZE) {
+                blockIds.add(new RecordId(id, data.length - len + i));
             }
 
             n = ByteStreams.read(stream, data, 0, data.length);
             length += n;
         }
 
-
-        // Inline the remaining blocks in the current segments
-        for (int p = 0; p < n; p += BLOCK_SIZE) {
-            int size = Math.min(n - p, BLOCK_SIZE);
-            synchronized (this) {
-                blockIds.add(prepare(RecordType.BLOCK, size));
-                System.arraycopy(data, p, buffer, position, size);
-            }
-        }
-
         return writeValueRecord(length, writeList(blockIds));
     }
 

Modified: 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/FileStore.java
 Tue Nov 26 17:32:02 2013
@@ -19,14 +19,17 @@ package org.apache.jackrabbit.oak.plugin
 import static com.google.common.base.Charsets.UTF_8;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.Lists.newArrayList;
 import static com.google.common.collect.Lists.newLinkedList;
 import static com.google.common.collect.Maps.newHashMap;
 import static 
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory.isBulkSegmentId;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.LinkedList;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
@@ -55,7 +58,7 @@ public class FileStore extends AbstractS
 
     private final LinkedList<TarFile> bulkFiles = newLinkedList();
 
-    private final LinkedList<TarFile> treeFiles = newLinkedList();
+    private final LinkedList<TarFile> dataFiles = newLinkedList();
 
     private final Map<String, Journal> journals = newHashMap();
 
@@ -86,14 +89,14 @@ public class FileStore extends AbstractS
             String name = String.format(FILE_NAME_FORMAT, "data", i);
             File file = new File(directory, name);
             if (file.isFile()) {
-                treeFiles.add(new TarFile(file, maxFileSize, memoryMapping));
+                dataFiles.add(new TarFile(file, maxFileSize, memoryMapping));
             } else {
                 break;
             }
         }
 
         Segment segment = getWriter().getDummySegment();
-        for (TarFile tar : treeFiles) {
+        for (TarFile tar : dataFiles) {
             ByteBuffer buffer = tar.readEntry(JOURNALS_UUID);
             if (buffer != null) {
                 checkState(JOURNAL_MAGIC == buffer.getLong());
@@ -118,6 +121,17 @@ public class FileStore extends AbstractS
         }
     }
 
+    public Iterable<UUID> getSegmentIds() {
+        List<UUID> ids = newArrayList();
+        for (TarFile file : dataFiles) {
+            ids.addAll(file.getUUIDs());
+        }
+        for (TarFile file : bulkFiles) {
+            ids.addAll(file.getUUIDs());
+        }
+        return ids;
+    }
+
     @Override
     public synchronized void close() {
         try {
@@ -126,10 +140,10 @@ public class FileStore extends AbstractS
                 file.close();
             }
             bulkFiles.clear();
-            for (TarFile file : treeFiles) {
+            for (TarFile file : dataFiles) {
                 file.close();
             }
-            treeFiles.clear();
+            dataFiles.clear();
             System.gc(); // for any memory-mappings that are no longer used
         } catch (Exception e) {
             throw new RuntimeException(e);
@@ -148,7 +162,7 @@ public class FileStore extends AbstractS
 
     @Override
     protected Segment loadSegment(UUID id) throws Exception {
-        for (TarFile file : treeFiles) {
+        for (TarFile file : dataFiles) {
             ByteBuffer buffer = file.readEntry(id);
             if (buffer != null) {
                 return new Segment(FileStore.this, id, buffer);
@@ -176,9 +190,9 @@ public class FileStore extends AbstractS
     private void writeEntry(
             UUID segmentId, byte[] buffer, int offset, int length)
             throws IOException {
-        LinkedList<TarFile> files = treeFiles;
+        LinkedList<TarFile> files = dataFiles;
         String base = "data";
-        if (length == Segment.MAX_SEGMENT_SIZE) {
+        if (isBulkSegmentId(segmentId)) {
             files = bulkFiles;
             base = "bulk";
         }

Modified: 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/TarFile.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/TarFile.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/TarFile.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/file/TarFile.java
 Tue Nov 26 17:32:02 2013
@@ -24,6 +24,7 @@ import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.Map;
+import java.util.Set;
 import java.util.UUID;
 
 class TarFile {
@@ -90,6 +91,10 @@ class TarFile {
         }
     }
 
+    Set<UUID> getUUIDs() {
+        return entries.keySet();
+    }
+
     ByteBuffer readEntry(UUID id) throws IOException {
         Location location = entries.get(id);
         if (location != null) {

Modified: 
jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/RecordTest.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/RecordTest.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/RecordTest.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/RecordTest.java
 Tue Nov 26 17:32:02 2013
@@ -19,6 +19,7 @@ package org.apache.jackrabbit.oak.plugin
 import static com.google.common.collect.Lists.newArrayList;
 import static 
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
 import static org.apache.jackrabbit.oak.plugins.segment.ListRecord.LEVEL_SIZE;
+import static 
org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory.newBulkSegmentId;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
@@ -33,7 +34,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
-import java.util.UUID;
 
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.plugins.segment.memory.MemoryStore;
@@ -114,7 +114,7 @@ public class RecordTest {
     public void testListWithLotsOfReferences() { // OAK-1184
         List<RecordId> list = newArrayList();
         for (int i = 0; i < 1000; i++) {
-            list.add(new RecordId(UUID.randomUUID(), 0));
+            list.add(new RecordId(newBulkSegmentId(), 0));
         }
         writer.writeList(list);
     }

Modified: 
jackrabbit/oak/trunk/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
 Tue Nov 26 17:32:02 2013
@@ -112,7 +112,7 @@ class OakDirectory extends Directory {
         // do nothing
     }
 
-    private static final int BLOB_SIZE = 10 * 1024; // > MongoMK inline limit
+    private static final int BLOB_SIZE = 32 * 1024; // > blob inline limit
 
     private static class OakIndexFile {
 

Modified: 
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java?rev=1545745&r1=1545744&r2=1545745&view=diff
==============================================================================
--- 
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
 (original)
+++ 
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
 Tue Nov 26 17:32:02 2013
@@ -16,9 +16,16 @@
  */
 package org.apache.jackrabbit.oak.run;
 
+import static com.google.common.collect.Sets.newHashSet;
+
 import java.io.File;
 import java.io.InputStream;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
 import java.util.Properties;
+import java.util.Queue;
+import java.util.Set;
 import java.util.UUID;
 
 import javax.jcr.Repository;
@@ -33,6 +40,8 @@ import org.apache.jackrabbit.oak.benchma
 import org.apache.jackrabbit.oak.http.OakServlet;
 import org.apache.jackrabbit.oak.jcr.Jcr;
 import org.apache.jackrabbit.oak.kernel.KernelNodeStore;
+import org.apache.jackrabbit.oak.plugins.segment.Segment;
+import org.apache.jackrabbit.oak.plugins.segment.SegmentIdFactory;
 import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore;
 import org.apache.jackrabbit.oak.plugins.segment.file.FileStore;
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
@@ -44,6 +53,10 @@ import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
 
+import com.google.common.collect.Maps;
+import com.google.common.collect.Queues;
+import com.google.common.collect.Sets;
+
 public class Main {
 
     public static final int PORT = 8080;
@@ -75,17 +88,76 @@ public class Main {
                 System.err.println("usage: upgrade <olddir> <newdir>");
                 System.exit(1);
             }
-        } else if ("inspect".equals(command)) {
+        } else if ("tarmk".equals(command)) {
             if (args.length == 0) {
-                System.err.println("usage: inspect <path> [uuid...]");
+                System.err.println("usage: tarmk <path> [id...]");
                 System.exit(1);
             } else {
+                System.out.println("TarMK " + args[0]);
                 File file = new File(args[0]);
                 FileStore store = new FileStore(file, 256 * 1024 * 1024, 
false);
                 try {
-                    for (int i = 1; i < args.length; i++) {
-                        UUID uuid = UUID.fromString(args[i]);
-                        System.out.println(store.readSegment(uuid));
+                    if (args.length == 1) {
+                        Map<UUID, List<UUID>> idmap = Maps.newHashMap();
+
+                        int dataCount = 0;
+                        long dataSize = 0;
+                        int bulkCount = 0;
+                        long bulkSize = 0;
+                        for (UUID uuid : store.getSegmentIds()) {
+                            if (SegmentIdFactory.isDataSegmentId(uuid)) {
+                                Segment segment = store.readSegment(uuid);
+                                dataCount++;
+                                dataSize += segment.size();
+                                idmap.put(uuid, segment.getReferencedIds());
+                            } else if (SegmentIdFactory.isBulkSegmentId(uuid)) 
{
+                                bulkCount++;
+                                bulkSize += store.readSegment(uuid).size();
+                                idmap.put(uuid, Collections.<UUID>emptyList());
+                            }
+                        }
+                        System.out.println("Total size:");
+                        System.out.format(
+                                "%6dMB in %6d data segments%n",
+                                dataSize / (1024 * 1024), dataCount);
+                        System.out.format(
+                                "%6dMB in %6d bulk segments%n",
+                                bulkSize / (1024 * 1024), bulkCount);
+
+                        Set<UUID> garbage = newHashSet(idmap.keySet());
+                        Queue<UUID> queue = Queues.newArrayDeque();
+                        
queue.add(store.getJournal("root").getHead().getSegmentId());
+                        while (!queue.isEmpty()) {
+                            UUID id = queue.remove();
+                            if (garbage.remove(id)) {
+                                queue.addAll(idmap.get(id));
+                            }
+                        }
+                        dataCount = 0;
+                        dataSize = 0;
+                        bulkCount = 0;
+                        bulkSize = 0;
+                        for (UUID uuid : garbage) {
+                            if (SegmentIdFactory.isDataSegmentId(uuid)) {
+                                dataCount++;
+                                dataSize += store.readSegment(uuid).size();
+                            } else if (SegmentIdFactory.isBulkSegmentId(uuid)) 
{
+                                bulkCount++;
+                                bulkSize += store.readSegment(uuid).size();
+                            }
+                        }
+                        System.out.println("Available for garbage 
collection:");
+                        System.out.format(
+                                "%6dkB in %6d data segments%n",
+                                dataSize / 1024, dataCount);
+                        System.out.format(
+                                "%6dMB in %6d bulk segments%n",
+                                bulkSize / (1024 * 1024), bulkCount);
+                    } else {
+                        for (int i = 1; i < args.length; i++) {
+                            UUID uuid = UUID.fromString(args[i]);
+                            System.out.println(store.readSegment(uuid));
+                        }
                     }
                 } finally {
                     store.close();


Reply via email to