Repository: hbase
Updated Branches:
  refs/heads/branch-2 1049025e1 -> 381a151d1


HBASE-18002 Investigate why bucket cache filling up in file mode in an
exisiting file  is slower (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/381a151d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/381a151d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/381a151d

Branch: refs/heads/branch-2
Commit: 381a151d14e5fe540f9ac9967115255a85d07b3c
Parents: 1049025
Author: Ramkrishna <ramkrishna.s.vasude...@intel.com>
Authored: Thu Jul 6 11:20:00 2017 +0530
Committer: Ramkrishna <ramkrishna.s.vasude...@intel.com>
Committed: Thu Jul 6 11:22:11 2017 +0530

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/bucket/BucketCache.java    |  7 ++++---
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java   | 15 ++++++++++++++-
 .../hbase/io/hfile/bucket/TestFileIOEngine.java      |  2 +-
 3 files changed, 19 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/381a151d/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 3c27f14..489c805 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -227,7 +227,7 @@ public class BucketCache implements BlockCache, HeapSize {
   public BucketCache(String ioEngineName, long capacity, int blockSize, int[] 
bucketSizes,
       int writerThreadNum, int writerQLen, String persistencePath, int 
ioErrorsTolerationDuration)
       throws FileNotFoundException, IOException {
-    this.ioEngine = getIOEngineFromName(ioEngineName, capacity);
+    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, 
persistencePath);
     this.writerThreads = new WriterThread[writerThreadNum];
     long blockNumCapacity = capacity / blockSize;
     if (blockNumCapacity >= Integer.MAX_VALUE) {
@@ -309,10 +309,11 @@ public class BucketCache implements BlockCache, HeapSize {
    * Get the IOEngine from the IO engine name
    * @param ioEngineName
    * @param capacity
+   * @param persistencePath
    * @return the IOEngine
    * @throws IOException
    */
-  private IOEngine getIOEngineFromName(String ioEngineName, long capacity)
+  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, 
String persistencePath)
       throws IOException {
     if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) 
{
       // In order to make the usage simple, we only need the prefix 'files:' in
@@ -320,7 +321,7 @@ public class BucketCache implements BlockCache, HeapSize {
       // the compatibility
       String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 
1)
           .split(FileIOEngine.FILE_DELIMITER);
-      return new FileIOEngine(capacity, filePaths);
+      return new FileIOEngine(capacity, persistencePath != null, filePaths);
     } else if (ioEngineName.startsWith("offheap")) {
       return new ByteBufferIOEngine(capacity, true);
     } else if (ioEngineName.startsWith("heap")) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/381a151d/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index 7586d57..a847bfe 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -52,11 +52,24 @@ public class FileIOEngine implements IOEngine {
   private FileReadAccessor readAccessor = new FileReadAccessor();
   private FileWriteAccessor writeAccessor = new FileWriteAccessor();
 
-  public FileIOEngine(long capacity, String... filePaths) throws IOException {
+  public FileIOEngine(long capacity, boolean maintainPersistence, String... 
filePaths)
+      throws IOException {
     this.sizePerFile = capacity / filePaths.length;
     this.capacity = this.sizePerFile * filePaths.length;
     this.filePaths = filePaths;
     this.fileChannels = new FileChannel[filePaths.length];
+    if (!maintainPersistence) {
+      for (String filePath : filePaths) {
+        File file = new File(filePath);
+        if (file.exists()) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("File " + filePath + " already exists. Deleting!!");
+          }
+          file.delete();
+          // If deletion fails still we can manage with the writes
+        }
+      }
+    }
     this.rafs = new RandomAccessFile[filePaths.length];
     for (int i = 0; i < filePaths.length; i++) {
       String filePath = filePaths[i];

http://git-wip-us.apache.org/repos/asf/hbase/blob/381a151d/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index d1f3dfe..d13022d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -58,7 +58,7 @@ public class TestFileIOEngine {
       boundaryStopPositions.add(sizePerFile * i + 1);
     }
     boundaryStopPositions.add(sizePerFile * filePaths.length - 1);
-    FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, filePaths);
+    FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, false, 
filePaths);
     try {
       for (int i = 0; i < 500; i++) {
         int len = (int) Math.floor(Math.random() * 100);

Reply via email to