Repository: hbase
Updated Branches:
  refs/heads/master dbdfd8e8d -> 7efb9edec


http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
index 3c062f8..577940b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
@@ -68,13 +68,13 @@ public class TestStoreFileScannerWithTagCompression {
     HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 
1024).withIncludesTags(true)
         
.withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
     // Make a store file and write data to it.
-    StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, 
fs).withFilePath(f)
+    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, 
fs).withFilePath(f)
         .withFileContext(meta).build();
 
     writeStoreFile(writer);
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf, conf);
+    StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, conf);
     StoreFileScanner s = reader.getStoreFileScanner(false, false);
     try {
       // Now do reseek with empty KV to position to the beginning of the file
@@ -94,7 +94,7 @@ public class TestStoreFileScannerWithTagCompression {
     }
   }
 
-  private void writeStoreFile(final StoreFile.Writer writer) throws 
IOException {
+  private void writeStoreFile(final StoreFileWriter writer) throws IOException 
{
     byte[] fam = Bytes.toBytes("f");
     byte[] qualifier = Bytes.toBytes("q");
     long now = System.currentTimeMillis();

http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
index 635e5b4..3e3eef9 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
@@ -106,7 +106,7 @@ public class TestStripeStoreEngine {
     StoreFile sf = mock(StoreFile.class);
     when(sf.getMetadataValue(any(byte[].class)))
       .thenReturn(StripeStoreFileManager.INVALID_KEY);
-    when(sf.getReader()).thenReturn(mock(StoreFile.Reader.class));
+    when(sf.getReader()).thenReturn(mock(StoreFileReader.class));
     when(sf.getPath()).thenReturn(new Path("moo"));
     return sf;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
index a469df6..663714a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -26,6 +26,7 @@ import com.google.common.base.Objects;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.util.StringUtils;
 
 import static org.mockito.Mockito.mock;
@@ -62,7 +63,7 @@ class MockStoreFileGenerator {
 
   protected StoreFile createMockStoreFile(final long sizeInBytes, final long 
seqId) {
     StoreFile mockSf = mock(StoreFile.class);
-    StoreFile.Reader reader = mock(StoreFile.Reader.class);
+    StoreFileReader reader = mock(StoreFileReader.class);
     String stringPath = "/hbase/testTable/regionA/"
         + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, 
random);
     Path path = new Path(stringPath);

http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index 6ec4cd4..7707116 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -46,7 +46,9 @@ import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
+import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
 import org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.mockito.invocation.InvocationOnMock;
@@ -58,7 +60,7 @@ public class TestCompactor {
     // "Files" are totally unused, it's Scanner class below that gives 
compactor fake KVs.
     // But compaction depends on everything under the sun, so stub everything 
with dummies.
     StoreFile sf = mock(StoreFile.class);
-    StoreFile.Reader r = mock(StoreFile.Reader.class);
+    StoreFileReader r = mock(StoreFileReader.class);
     when(r.length()).thenReturn(1L);
     when(r.getBloomFilterType()).thenReturn(BloomType.NONE);
     when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class));
@@ -78,7 +80,7 @@ public class TestCompactor {
 
   // StoreFile.Writer has private ctor and is unwieldy, so this has to be 
convoluted.
   public static class StoreFileWritersCapture
-      implements Answer<StoreFile.Writer>, StripeMultiFileWriter.WriterFactory 
{
+      implements Answer<StoreFileWriter>, StripeMultiFileWriter.WriterFactory {
     public static class Writer {
       public ArrayList<KeyValue> kvs = new ArrayList<KeyValue>();
       public TreeMap<byte[], byte[]> data = new TreeMap<byte[], 
byte[]>(Bytes.BYTES_COMPARATOR);
@@ -88,10 +90,10 @@ public class TestCompactor {
     private List<Writer> writers = new ArrayList<Writer>();
 
     @Override
-    public StoreFile.Writer createWriter() throws IOException {
+    public StoreFileWriter createWriter() throws IOException {
       final Writer realWriter = new Writer();
       writers.add(realWriter);
-      StoreFile.Writer writer = mock(StoreFile.Writer.class);
+      StoreFileWriter writer = mock(StoreFileWriter.class);
       doAnswer(new Answer<Object>() {
         public Object answer(InvocationOnMock invocation) {
           return realWriter.kvs.add((KeyValue) invocation.getArguments()[0]);
@@ -120,7 +122,7 @@ public class TestCompactor {
     }
 
     @Override
-    public StoreFile.Writer answer(InvocationOnMock invocation) throws 
Throwable {
+    public StoreFileWriter answer(InvocationOnMock invocation) throws 
Throwable {
       return createWriter();
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index 146882b..160deb3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter;
 import org.apache.hadoop.hbase.regionserver.StripeStoreConfig;
@@ -744,7 +745,7 @@ public class TestStripeCompactionPolicy {
   private static StoreFile createFile(long size) throws Exception {
     StoreFile sf = mock(StoreFile.class);
     when(sf.getPath()).thenReturn(new Path("moo"));
-    StoreFile.Reader r = mock(StoreFile.Reader.class);
+    StoreFileReader r = mock(StoreFileReader.class);
     when(r.getEntries()).thenReturn(size);
     when(r.length()).thenReturn(size);
     when(r.getBloomFilterType()).thenReturn(BloomType.NONE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/7efb9ede/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
----------------------------------------------------------------------
diff --git 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
index c16d45d..0384caf 100644
--- 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
+++ 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.io.compress.Compression
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 import org.apache.hadoop.hbase.io.hfile.{CacheConfig, HFileContextBuilder, 
HFileWriterImpl}
-import org.apache.hadoop.hbase.regionserver.{HStore, StoreFile, BloomType}
+import org.apache.hadoop.hbase.regionserver.{HStore, StoreFile, 
StoreFileWriter, BloomType}
 import org.apache.hadoop.hbase.util.Bytes
 import org.apache.hadoop.mapred.JobConf
 import org.apache.spark.broadcast.Broadcast
@@ -893,7 +893,7 @@ class HBaseContext(@transient sc: SparkContext,
     //Add a '_' to the file name because this is a unfinished file.  A rename 
will happen
     // to remove the '_' when the file is closed.
     new WriterLength(0,
-      new StoreFile.WriterBuilder(conf, new CacheConfig(tempConf), new 
HFileSystem(fs))
+      new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new 
HFileSystem(fs))
         .withBloomType(BloomType.valueOf(familyOptions.bloomType))
         
.withComparator(CellComparator.COMPARATOR).withFileContext(hFileContext)
         .withFilePath(new Path(familydir, "_" + 
UUID.randomUUID.toString.replaceAll("-", "")))
@@ -1048,7 +1048,7 @@ class HBaseContext(@transient sc: SparkContext,
    * @param compactionExclude      The exclude compaction metadata flag for 
the HFile
    */
   private def closeHFileWriter(fs:FileSystem,
-                               w: StoreFile.Writer,
+                               w: StoreFileWriter,
                                regionSplitPartitioner: BulkLoadPartitioner,
                                previousRow: Array[Byte],
                                compactionExclude: Boolean): Unit = {
@@ -1079,13 +1079,13 @@ class HBaseContext(@transient sc: SparkContext,
   }
 
   /**
-   * This is a wrapper class around StoreFile.Writer.  The reason for the
+   * This is a wrapper class around StoreFileWriter.  The reason for the
    * wrapper is to keep the length of the file along side the writer
    *
    * @param written The writer to be wrapped
    * @param writer  The number of bytes written to the writer
    */
-  class WriterLength(var written:Long, val writer:StoreFile.Writer)
+  class WriterLength(var written:Long, val writer:StoreFileWriter)
 }
 
 object LatestHBaseContextCache {

Reply via email to