Author: stack
Date: Fri Mar 6 06:47:04 2009
New Revision: 750784
URL: http://svn.apache.org/viewvc?rev=750784&view=rev
Log:
HBASE-1245 hfile meta block handling bugs
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=750784&r1=750783&r2=750784&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Fri Mar 6 06:47:04 2009
@@ -34,6 +34,7 @@
HBASE-1239 in the REST interface does not correctly clear the character
buffer each iteration-1185 wrong request/sec in the gui
reporting wrong (Brian Beggs via Stack)
+ HBASE-1245 hfile meta block handling bugs (Ryan Rawson via Stack)
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
Modified:
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL:
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=750784&r1=750783&r2=750784&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java Fri
Mar 6 06:47:04 2009
@@ -713,7 +713,7 @@
// Read in the metadata index.
if (trailer.metaIndexCount > 0) {
- this.metaIndex = BlockIndex.readIndex(this.comparator,
+ this.metaIndex = BlockIndex.readIndex(Bytes.BYTES_RAWCOMPARATOR,
this.istream, this.trailer.metaIndexOffset, trailer.metaIndexCount);
}
this.fileInfoLoaded = true;
@@ -784,6 +784,9 @@
* @throws IOException
*/
public ByteBuffer getMetaBlock(String metaBlockName) throws IOException {
+ if (trailer.metaIndexCount == 0) {
+ return null; // there are no meta blocks
+ }
if (metaIndex == null) {
throw new IOException("Meta index not loaded");
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
URL:
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java?rev=750784&r1=750783&r2=750784&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java Fri Mar
6 06:47:04 2009
@@ -44,6 +44,7 @@
* Estimate based on study of jhat and jprofiler numbers.
*/
// JHat says BU is 56 bytes.
+ // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
public static final int ESTIMATED_HEAP_TAX = 16;
/**
Modified:
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL:
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=750784&r1=750783&r2=750784&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
(original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
Fri Mar 6 06:47:04 2009
@@ -169,7 +169,7 @@
private void writeNumMetablocks(Writer writer, int n) {
for (int i = 0; i < n; i++) {
- writer.appendMetaBlock("TfileMeta" + i, ("something to test" +
i).getBytes());
+ writer.appendMetaBlock("HFileMeta" + i, ("something to test" +
i).getBytes());
}
}
@@ -179,7 +179,7 @@
private void readNumMetablocks(Reader reader, int n) throws IOException {
for (int i = 0; i < n; i++) {
- ByteBuffer b = reader.getMetaBlock("TfileMeta" + i);
+ ByteBuffer b = reader.getMetaBlock("HFileMeta" + i);
byte [] found = Bytes.toBytes(b);
assertTrue("failed to match metadata", Arrays.equals(
("something to test" + i).getBytes(), found));
@@ -191,7 +191,7 @@
}
private void metablocks(final String compress) throws Exception {
- Path mFile = new Path(ROOT_DIR, "meta.tfile");
+ Path mFile = new Path(ROOT_DIR, "meta.hfile");
FSDataOutputStream fout = createFSOutput(mFile);
Writer writer = new Writer(fout, minBlockSize,
Compression.getCompressionAlgorithmByName(compress), null, false);
@@ -216,6 +216,19 @@
metablocks("gz");
}
+ public void testNullMetaBlocks() throws Exception {
+ Path mFile = new Path(ROOT_DIR, "nometa.hfile");
+ FSDataOutputStream fout = createFSOutput(mFile);
+ Writer writer = new Writer(fout, minBlockSize,
+ Compression.Algorithm.NONE, null, false);
+ writer.append("foo".getBytes(), "value".getBytes());
+ writer.close();
+ fout.close();
+ Reader reader = new Reader(fs, mFile, null);
+ reader.loadFileInfo();
+ assertNull(reader.getMetaBlock("non-existant"));
+ }
+
/**
* Make sure the orginals for our compression libs doesn't change on us.
*/