Author: suresh
Date: Wed Apr 17 00:23:22 2013
New Revision: 1468698

URL: http://svn.apache.org/r1468698
Log:
HDFS-4635. Move BlockManager#computeCapacity to LightWeightGSet. Contributed by 
Suresh Srinivas.

Modified:
    hadoop/common/branches/branch-1/CHANGES.txt
    
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
    
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/util/LightWeightGSet.java
    
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/util/TestGSet.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1468698&r1=1468697&r2=1468698&view=diff
==============================================================================
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Apr 17 00:23:22 2013
@@ -236,6 +236,9 @@ Release 1.2.0 - unreleased
     MAPREDUCE-5129. Allow tags to JobHistory for deeper analytics. (billie via
     acmurthy)
 
+    HDFS-4635. Move BlockManager#computeCapacity to LightWeightGSet.
+    (suresh)
+
   BUG FIXES
 
     HADOOP-9467. Metrics2 record filter should check name as well as tags.

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java?rev=1468698&r1=1468697&r2=1468698&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
 Wed Apr 17 00:23:22 2013
@@ -315,37 +315,11 @@ class BlocksMap {
   private GSet<Block, BlockInfo> blocks;
 
   BlocksMap(int initialCapacity, float loadFactor) {
-    this.capacity = computeCapacity();
+    // Use 2% of total memory to size the GSet capacity
+    this.capacity = LightWeightGSet.computeCapacity(2.0, "BlocksMap");
     this.blocks = new LightWeightGSet<Block, BlockInfo>(capacity);
   }
 
-  /**
-   * Let t = 2% of max memory.
-   * Let e = round(log_2 t).
-   * Then, we choose capacity = 2^e/(size of reference),
-   * unless it is outside the close interval [1, 2^30].
-   */
-  private static int computeCapacity() {
-    //VM detection
-    //See http://java.sun.com/docs/hotspot/HotSpotFAQ.html#64bit_detection
-    final String vmBit = System.getProperty("sun.arch.data.model");
-
-    //2% of max memory
-    final double twoPC = Runtime.getRuntime().maxMemory()/50.0;
-
-    //compute capacity
-    final int e1 = (int)(Math.log(twoPC)/Math.log(2.0) + 0.5);
-    final int e2 = e1 - ("32".equals(vmBit)? 2: 3);
-    final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
-    final int c = 1 << exponent;
-
-    LightWeightGSet.LOG.info("VM type       = " + vmBit + "-bit");
-    LightWeightGSet.LOG.info("2% max memory = " + twoPC/(1 << 20) + " MB");
-    LightWeightGSet.LOG.info("capacity      = 2^" + exponent
-        + " = " + c + " entries");
-    return c;
-  }
-
   void close() {
     blocks = null;
   }

Modified: 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/util/LightWeightGSet.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/util/LightWeightGSet.java?rev=1468698&r1=1468697&r2=1468698&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/util/LightWeightGSet.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/util/LightWeightGSet.java
 Wed Apr 17 00:23:22 2013
@@ -280,4 +280,53 @@ public class LightWeightGSet<K, E extend
       throw new UnsupportedOperationException("Remove is not supported.");
     }
   }
+  
+  /**
+   * Let t = percentage of max memory.
+   * Let e = round(log_2 t).
+   * Then, we choose capacity = 2^e/(size of reference),
+   * unless it is outside the close interval [1, 2^30].
+   */
+  public static int computeCapacity(double percentage, String mapName) {
+    return computeCapacity(Runtime.getRuntime().maxMemory(), percentage,
+        mapName);
+  }
+  
+  /** Visible for testing */
+  static int computeCapacity(long maxMemory, double percentage,
+      String mapName) {
+    if (percentage > 100.0 || percentage < 0.0) {
+      throw new IllegalArgumentException("Percentage " + percentage
+          + " must be greater than or equal to 0 "
+          + " and less than or equal to 100");
+    }
+    if (maxMemory < 0) {
+      throw new IllegalArgumentException("Memory " + maxMemory
+          + " must be greater than or equal to 0");
+    }
+    if (percentage == 0.0 || maxMemory == 0) {
+      return 0;
+    }
+    //VM detection
+    //See http://java.sun.com/docs/hotspot/HotSpotFAQ.html#64bit_detection
+    final String vmBit = System.getProperty("sun.arch.data.model");
+
+    //Percentage of max memory
+    final double percentDivisor = 100.0/percentage;
+    final double percentMemory = maxMemory/percentDivisor;
+    
+    //compute capacity
+    final int e1 = (int)(Math.log(percentMemory)/Math.log(2.0) + 0.5);
+    final int e2 = e1 - ("32".equals(vmBit)? 2: 3);
+    final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
+    final int c = 1 << exponent;
+
+    if (LightWeightGSet.LOG.isDebugEnabled()) {
+      LOG.debug("Computing capacity for map " + mapName);
+      LOG.debug("VM type       = " + vmBit + "-bit");
+      LOG.debug(percentage + "% max memory = " + maxMemory);
+      LOG.debug("capacity      = 2^" + exponent + " = " + c + " entries");
+    }
+    return c;
+  }
 }

Modified: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/util/TestGSet.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/util/TestGSet.java?rev=1468698&r1=1468697&r2=1468698&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/util/TestGSet.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/hdfs/util/TestGSet.java
 Wed Apr 17 00:23:22 2013
@@ -451,4 +451,81 @@ public class TestGSet {
       next = e;
     }
   }
+  
+  /** 
+   * Test for {@link LightWeightGSet#computeCapacity(double, String)}
+   * with invalid percent less than 0.
+   */
+  @Test(expected=IllegalArgumentException.class)
+  public void testComputeCapacityNegativePercent() {
+    LightWeightGSet.computeCapacity(1024, -1.0, "testMap");
+  }
+  
+  /** 
+   * Test for {@link LightWeightGSet#computeCapacity(double, String)}
+   * with invalid percent greater than 100.
+   */
+  @Test(expected=IllegalArgumentException.class)
+  public void testComputeCapacityInvalidPercent() {
+    LightWeightGSet.computeCapacity(1024, 101.0, "testMap");
+  }
+  
+  /** 
+   * Test for {@link LightWeightGSet#computeCapacity(double, String)}
+   * with invalid negative max memory
+   */
+  @Test(expected=IllegalArgumentException.class)
+  public void testComputeCapacityInvalidMemory() {
+    LightWeightGSet.computeCapacity(-1, 50.0, "testMap");
+  }
+  
+  private static boolean isPowerOfTwo(int num) {
+    return num == 0 || (num > 0 && Integer.bitCount(num) == 1);
+  }
+  
+  /** Return capacity as percentage of total memory */
+  private static int getPercent(long total, int capacity) {
+    // Reference size in bytes
+    double referenceSize = 
+        System.getProperty("sun.arch.data.model").equals("32") ? 4.0 : 8.0;
+    return (int)(((capacity * referenceSize)/total) * 100.0);
+  }
+  
+  /** Return capacity as percentage of total memory */
+  private static void testCapacity(long maxMemory, double percent) {
+    int capacity = LightWeightGSet.computeCapacity(maxMemory, percent, "map");
+    LightWeightGSet.LOG.info("Validating - total memory " + maxMemory + " 
percent "
+        + percent + " returned capacity " + capacity);
+    // Returned capacity is zero or power of two
+    Assert.assertTrue(isPowerOfTwo(capacity));
+
+    // Ensure the capacity returned is the nearest to the asked perecentage
+    int capacityPercent = getPercent(maxMemory, capacity);
+    if (capacityPercent == percent) {
+      return;
+    } else if (capacityPercent > percent) {
+      Assert.assertTrue(getPercent(maxMemory, capacity * 2) > percent);
+    } else {
+      Assert.assertTrue(getPercent(maxMemory, capacity / 2) < percent);
+    }
+  }
+  
+  /** 
+   * Test for {@link LightWeightGSet#computeCapacity(double, String)}
+   */
+  @Test
+  public void testComputeCapacity() {
+    // Tests for boundary conditions where percent or memory are zero
+    testCapacity(0, 0.0);
+    testCapacity(100, 0.0);
+    testCapacity(0, 100.0);
+    
+    // Compute capacity for some 100 random max memory and percentage
+    Random r = new Random();
+    for (int i = 0; i < 100; i++) {
+      long maxMemory = r.nextInt(Integer.MAX_VALUE);
+      double percent = r.nextInt(101);
+      testCapacity(maxMemory, percent);
+    }
+  }
 }


Reply via email to