Modified: 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: 
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1239901&r1=1239900&r2=1239901&view=diff
==============================================================================
--- 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
 (original)
+++ 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
 Thu Feb  2 22:49:45 2012
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.Reference.Range;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache.CacheStats;
@@ -60,6 +61,7 @@ import com.google.common.collect.Lists;
 public class TestStoreFile extends HBaseTestCase {
   static final Log LOG = LogFactory.getLog(TestStoreFile.class);
   private MiniDFSCluster cluster;
+  private CacheConfig cacheConf;
   private Map<String, Long> startingMetrics;
 
   @Override
@@ -70,6 +72,7 @@ public class TestStoreFile extends HBase
       // Set the hbase.rootdir to be the home directory in mini dfs.
       this.conf.set(HConstants.HBASE_DIR,
         this.cluster.getFileSystem().getHomeDirectory().toString());
+      this.cacheConf = new CacheConfig(conf);
     } catch (IOException e) {
       shutdownDfs(cluster);
     }
@@ -94,10 +97,10 @@ public class TestStoreFile extends HBase
     // Make up a directory hierarchy that has a regiondir and familyname.
     StoreFile.Writer writer = StoreFile.createWriter(this.fs,
       new Path(new Path(this.testDir, "regionname"), "familyname"), 2 * 1024,
-      conf);
+      conf, cacheConf);
     writeStoreFile(writer);
-    checkHalfHFile(new StoreFile(this.fs, writer.getPath(), true, conf,
-        StoreFile.BloomType.NONE, false));
+    checkHalfHFile(new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
+        StoreFile.BloomType.NONE));
   }
 
   private void writeStoreFile(final StoreFile.Writer writer) throws 
IOException {
@@ -135,10 +138,10 @@ public class TestStoreFile extends HBase
     Path dir = new Path(storedir, "1234567890");
     // Make a store file and write data to it.
     StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024,
-        conf);
+        conf, cacheConf);
     writeStoreFile(writer);
-    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
-        StoreFile.BloomType.NONE, false);
+    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
+        StoreFile.BloomType.NONE);
     StoreFile.Reader reader = hsf.createReader();
     // Split on a row, not in middle of row.  Midkey returned by reader
     // may be in middle of row.  Create new one with empty column and
@@ -149,8 +152,8 @@ public class TestStoreFile extends HBase
     byte [] finalRow = kv.getRow();
     // Make a reference
     Path refPath = StoreFile.split(fs, dir, hsf, midRow, Range.top);
-    StoreFile refHsf = new StoreFile(this.fs, refPath, true, conf,
-        StoreFile.BloomType.NONE, false);
+    StoreFile refHsf = new StoreFile(this.fs, refPath, conf, cacheConf,
+        StoreFile.BloomType.NONE);
     // Now confirm that I can read from the reference and that it only gets
     // keys from top half of the file.
     HFileScanner s = refHsf.createReader().getScanner(false, false);
@@ -186,10 +189,10 @@ public class TestStoreFile extends HBase
     Path bottomPath = StoreFile.split(this.fs, bottomDir,
       f, midRow, Range.bottom);
     // Make readers on top and bottom.
-    StoreFile.Reader top = new StoreFile(this.fs, topPath, true, conf,
-        StoreFile.BloomType.NONE, false).createReader();
-    StoreFile.Reader bottom = new StoreFile(this.fs, bottomPath, true, conf,
-        StoreFile.BloomType.NONE, false).createReader();
+    StoreFile.Reader top = new StoreFile(this.fs, topPath, conf, cacheConf,
+        StoreFile.BloomType.NONE).createReader();
+    StoreFile.Reader bottom = new StoreFile(this.fs, bottomPath, conf, 
cacheConf,
+        StoreFile.BloomType.NONE).createReader();
     ByteBuffer previous = null;
     LOG.info("Midkey: " + midKV.toString());
     ByteBuffer bbMidkeyBytes = ByteBuffer.wrap(midkey);
@@ -244,10 +247,10 @@ public class TestStoreFile extends HBase
       topPath = StoreFile.split(this.fs, topDir, f, badmidkey, Range.top);
       bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey,
         Range.bottom);
-      top = new StoreFile(this.fs, topPath, true, conf,
-          StoreFile.BloomType.NONE, false).createReader();
-      bottom = new StoreFile(this.fs, bottomPath, true, conf,
-          StoreFile.BloomType.NONE, false).createReader();
+      top = new StoreFile(this.fs, topPath, conf, cacheConf,
+          StoreFile.BloomType.NONE).createReader();
+      bottom = new StoreFile(this.fs, bottomPath, conf, cacheConf,
+          StoreFile.BloomType.NONE).createReader();
       bottomScanner = bottom.getScanner(false, false);
       int count = 0;
       while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
@@ -289,10 +292,10 @@ public class TestStoreFile extends HBase
       topPath = StoreFile.split(this.fs, topDir, f, badmidkey, Range.top);
       bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey,
         Range.bottom);
-      top = new StoreFile(this.fs, topPath, true, conf,
-          StoreFile.BloomType.NONE, false).createReader();
-      bottom = new StoreFile(this.fs, bottomPath, true, conf,
-          StoreFile.BloomType.NONE, false).createReader();
+      top = new StoreFile(this.fs, topPath, conf, cacheConf,
+          StoreFile.BloomType.NONE).createReader();
+      bottom = new StoreFile(this.fs, bottomPath, conf, cacheConf,
+          StoreFile.BloomType.NONE).createReader();
       first = true;
       bottomScanner = bottom.getScanner(false, false);
       while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
@@ -350,7 +353,7 @@ public class TestStoreFile extends HBase
     }
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, f, null, false, false);
+    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
     reader.loadFileInfo();
     reader.loadBloomfilter();
     StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
@@ -390,7 +393,7 @@ public class TestStoreFile extends HBase
     Path f = new Path(ROOT_DIR, getName());
     StoreFile.Writer writer = new StoreFile.Writer(fs, f,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
-        conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
+        conf, cacheConf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
 
     bloomWriteRead(writer, fs);
   }
@@ -407,7 +410,7 @@ public class TestStoreFile extends HBase
     Path f = new Path(ROOT_DIR, getName());
     StoreFile.Writer writer = new StoreFile.Writer(fs, f,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
-        conf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000);
+        conf, cacheConf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000);
 
     // add delete family
     long now = System.currentTimeMillis();
@@ -419,8 +422,8 @@ public class TestStoreFile extends HBase
     }
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, writer.getPath(), null,
-        false, false);
+    StoreFile.Reader reader = new StoreFile.Reader(fs, writer.getPath(),
+        cacheConf);
     reader.loadFileInfo();
     reader.loadBloomfilter();
 
@@ -471,11 +474,11 @@ public class TestStoreFile extends HBase
 
     for (int x : new int[]{0,1}) {
       // write the file
-      Path f = new Path(ROOT_DIR, getName());
+      Path f = new Path(ROOT_DIR, getName() + x);
       StoreFile.Writer writer = new StoreFile.Writer(fs, f,
           StoreFile.DEFAULT_BLOCKSIZE_SMALL,
           HFile.DEFAULT_COMPRESSION_ALGORITHM,
-          conf, KeyValue.COMPARATOR, bt[x], expKeys[x]);
+          conf, cacheConf, KeyValue.COMPARATOR, bt[x], expKeys[x]);
 
       long now = System.currentTimeMillis();
       for (int i = 0; i < rowCount*2; i += 2) { // rows
@@ -492,10 +495,10 @@ public class TestStoreFile extends HBase
       }
       writer.close();
 
-      StoreFile.Reader reader = new StoreFile.Reader(fs, f, null, false, 
false);
+      StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
       reader.loadFileInfo();
       reader.loadBloomfilter();
-      StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
+      StoreFileScanner scanner = reader.getStoreFileScanner(true, true);
       assertEquals(expKeys[x], reader.generalBloomFilter.getKeyCount());
 
       // check false positives rate
@@ -546,7 +549,7 @@ public class TestStoreFile extends HBase
     // this should not create a bloom because the max keys is too small
     StoreFile.Writer writer = new StoreFile.Writer(fs, f,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
-        conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
+        conf, cacheConf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000);
     assertFalse(writer.hasGeneralBloom());
     writer.close();
     fs.delete(f, true);
@@ -569,7 +572,8 @@ public class TestStoreFile extends HBase
     // because Java can't create a contiguous array > MAX_INT
     writer = new StoreFile.Writer(fs, f,
         StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM,
-        conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, Integer.MAX_VALUE);
+        conf, cacheConf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW,
+        Integer.MAX_VALUE);
     assertFalse(writer.hasGeneralBloom());
     writer.close();
     fs.delete(f, true);
@@ -661,7 +665,7 @@ public class TestStoreFile extends HBase
     "familyname");
     Path dir = new Path(storedir, "1234567890");
     StoreFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024,
-        conf);
+        conf, cacheConf);
 
     List<KeyValue> kvList = getKeyValueSet(timestamps,numRows,
         family, qualifier);
@@ -672,8 +676,8 @@ public class TestStoreFile extends HBase
     writer.appendMetadata(0, false);
     writer.close();
 
-    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
-        StoreFile.BloomType.NONE, false);
+    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
+        StoreFile.BloomType.NONE);
     StoreFile.Reader reader = hsf.createReader();
     StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
     TreeSet<byte[]> columns = new TreeSet<byte[]>();
@@ -698,14 +702,13 @@ public class TestStoreFile extends HBase
 
   public void testCacheOnWriteEvictOnClose() throws Exception {
     Configuration conf = this.conf;
-    conf.setBoolean("hbase.rs.evictblocksonclose", false);
 
     // Find a home for our files
     Path baseDir = new Path(new Path(this.testDir, "regionname"),
     "twoCOWEOC");
 
     // Grab the block cache and get the initial hit/miss counts
-    BlockCache bc = StoreFile.getBlockCache(conf);
+    BlockCache bc = new CacheConfig(conf).getBlockCache();
     assertNotNull(bc);
     CacheStats cs = bc.getStats();
     long startHit = cs.getHitCount();
@@ -713,11 +716,12 @@ public class TestStoreFile extends HBase
     long startEvicted = cs.getEvictedCount();
 
     // Let's write a StoreFile with three blocks, with cache on write off
-    conf.setBoolean(HFile.CACHE_DATA_BLOCKS_ON_WRITE_KEY, false);
+    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);
+    CacheConfig cacheConf = new CacheConfig(conf);
     Path pathCowOff = new Path(baseDir, "123456789");
-    StoreFile.Writer writer = writeStoreFile(conf, pathCowOff, 3);
-    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
-        StoreFile.BloomType.NONE, false);
+    StoreFile.Writer writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
+    StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
+        StoreFile.BloomType.NONE);
     LOG.debug(hsf.getPath().toString());
 
     // Read this file, we should see 3 misses
@@ -734,11 +738,12 @@ public class TestStoreFile extends HBase
     reader.close();
 
     // Now write a StoreFile with three blocks, with cache on write on
-    conf.setBoolean(HFile.CACHE_DATA_BLOCKS_ON_WRITE_KEY, true);
+    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
+    cacheConf = new CacheConfig(conf);
     Path pathCowOn = new Path(baseDir, "123456788");
-    writer = writeStoreFile(conf, pathCowOn, 3);
-    hsf = new StoreFile(this.fs, writer.getPath(), true, conf,
-        StoreFile.BloomType.NONE, false);
+    writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
+    hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
+        StoreFile.BloomType.NONE);
 
     // Read this file, we should see 3 hits
     reader = hsf.createReader();
@@ -753,15 +758,15 @@ public class TestStoreFile extends HBase
     reader.close();
 
     // Let's read back the two files to ensure the blocks exactly match
-    hsf = new StoreFile(this.fs, pathCowOff, true, conf,
-        StoreFile.BloomType.NONE, false);
+    hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
+        StoreFile.BloomType.NONE);
     StoreFile.Reader readerOne = hsf.createReader();
     readerOne.loadFileInfo();
     StoreFileScanner scannerOne = readerOne.getStoreFileScanner(true, true,
         false);
     scannerOne.seek(KeyValue.LOWESTKEY);
-    hsf = new StoreFile(this.fs, pathCowOn, true, conf,
-        StoreFile.BloomType.NONE, false);
+    hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
+        StoreFile.BloomType.NONE);
     StoreFile.Reader readerTwo = hsf.createReader();
     readerTwo.loadFileInfo();
     StoreFileScanner scannerTwo = readerTwo.getStoreFileScanner(true, true,
@@ -791,8 +796,9 @@ public class TestStoreFile extends HBase
 
     // Let's close the first file with evict on close turned on
     conf.setBoolean("hbase.rs.evictblocksonclose", true);
-    hsf = new StoreFile(this.fs, pathCowOff, true, conf,
-        StoreFile.BloomType.NONE, false);
+    cacheConf = new CacheConfig(conf);
+    hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf,
+        StoreFile.BloomType.NONE);
     reader = hsf.createReader();
     reader.close();
 
@@ -804,8 +810,9 @@ public class TestStoreFile extends HBase
 
     // Let's close the second file with evict on close turned off
     conf.setBoolean("hbase.rs.evictblocksonclose", false);
-    hsf = new StoreFile(this.fs, pathCowOn, true, conf,
-        StoreFile.BloomType.NONE, false);
+    cacheConf = new CacheConfig(conf);
+    hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf,
+        StoreFile.BloomType.NONE);
     reader = hsf.createReader();
     reader.close();
 
@@ -815,8 +822,8 @@ public class TestStoreFile extends HBase
     assertEquals(startEvicted, cs.getEvictedCount());
   }
 
-  private StoreFile.Writer writeStoreFile(Configuration conf, Path path,
-      int numBlocks)
+  private StoreFile.Writer writeStoreFile(Configuration conf,
+      CacheConfig cacheConf, Path path, int numBlocks)
   throws IOException {
     // Let's put ~5 small KVs in each block, so let's make 5*numBlocks KVs
     int numKVs = 5 * numBlocks;
@@ -832,7 +839,7 @@ public class TestStoreFile extends HBase
     int blockSize = totalSize / numBlocks;
     StoreFile.Writer writer = new StoreFile.Writer(fs, path, blockSize,
         HFile.DEFAULT_COMPRESSION_ALGORITHM,
-        conf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000);
+        conf, cacheConf, KeyValue.COMPARATOR, StoreFile.BloomType.NONE, 2000);
     // We'll write N-1 KVs to ensure we don't write an extra block
     kvs.remove(kvs.size()-1);
     for (KeyValue kv : kvs) {

Modified: 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: 
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1239901&r1=1239900&r2=1239901&view=diff
==============================================================================
--- 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
 (original)
+++ 
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
 Thu Feb  2 22:49:45 2012
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.HRegion;


Reply via email to