hadoop git commit: HDFS-6945. BlockManager should remove a block from excessReplicateMap and decrement ExcessBlocks metric when the block is removed. (aajisaka)

2015-07-20 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 63e4ada51 - 3e793224f


HDFS-6945. BlockManager should remove a block from excessReplicateMap and 
decrement ExcessBlocks metric when the block is removed. (aajisaka)

(cherry picked from commit 18a91fe4df0448d9f7de91602646ecf5a51c52e4)
(cherry picked from commit b85bbca74565b18dfa6689c9545d07bff5d31f83)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3e793224
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3e793224
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3e793224

Branch: refs/heads/branch-2.7
Commit: 3e793224f929bfc272dbfdb0f580208c8703d31f
Parents: 63e4ada
Author: Akira Ajisaka aajis...@apache.org
Authored: Wed Apr 1 09:07:28 2015 +0900
Committer: Akira Ajisaka aajis...@apache.org
Committed: Tue Jul 21 11:42:21 2015 +0900

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../server/blockmanagement/BlockManager.java| 22 ++--
 .../namenode/metrics/TestNameNodeMetrics.java   |  9 ++--
 3 files changed, 30 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3e793224/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 5cfb9a5..8aab98a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -16,6 +16,9 @@ Release 2.7.2 - UNRELEASED
 
   BUG FIXES
 
+HDFS-6945. BlockManager should remove a block from excessReplicateMap and
+decrement ExcessBlocks metric when the block is removed. (aajisaka)
+
 Release 2.7.1 - 2015-07-06 
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3e793224/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 09e5748..d770346 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -3382,8 +3382,7 @@ public class BlockManager {
 // file already removes them from the block map below.
 block.setNumBytes(BlockCommand.NO_ACK);
 addToInvalidates(block);
-corruptReplicas.removeFromCorruptReplicasMap(block);
-blocksMap.removeBlock(block);
+removeBlockFromMap(block);
 // Remove the block from pendingReplications and neededReplications
 pendingReplications.remove(block);
 neededReplications.remove(block, UnderReplicatedBlocks.LEVEL);
@@ -3559,11 +3558,30 @@ public class BlockManager {
   }
 
   public void removeBlockFromMap(Block block) {
+removeFromExcessReplicateMap(block);
 blocksMap.removeBlock(block);
 // If block is removed from blocksMap remove it from corruptReplicasMap
 corruptReplicas.removeFromCorruptReplicasMap(block);
   }
 
+  /**
+   * If a block is removed from blocksMap, remove it from excessReplicateMap.
+   */
+  private void removeFromExcessReplicateMap(Block block) {
+for (DatanodeStorageInfo info : blocksMap.getStorages(block)) {
+  String uuid = info.getDatanodeDescriptor().getDatanodeUuid();
+  LightWeightLinkedSetBlock excessReplicas = 
excessReplicateMap.get(uuid);
+  if (excessReplicas != null) {
+if (excessReplicas.remove(block)) {
+  excessBlocksCount.decrementAndGet();
+  if (excessReplicas.isEmpty()) {
+excessReplicateMap.remove(uuid);
+  }
+}
+  }
+}
+  }
+
   public int getCapacity() {
 return blocksMap.getCapacity();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3e793224/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
index 63ab395..1fbb62c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
+++ 

[02/50] [abbrv] hadoop git commit: HDFS-6945. BlockManager should remove a block from excessReplicateMap and decrement ExcessBlocks metric when the block is removed. (aajisaka)

2015-04-06 Thread zjshen
HDFS-6945. BlockManager should remove a block from excessReplicateMap and 
decrement ExcessBlocks metric when the block is removed. (aajisaka)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/71b43741
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/71b43741
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/71b43741

Branch: refs/heads/YARN-2928
Commit: 71b43741dde686725ac188ae91b14185989944a1
Parents: 40a1282
Author: Akira Ajisaka aajis...@apache.org
Authored: Wed Apr 1 09:07:28 2015 +0900
Committer: Zhijie Shen zjs...@apache.org
Committed: Mon Apr 6 12:08:09 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../server/blockmanagement/BlockManager.java| 22 ++--
 .../namenode/metrics/TestNameNodeMetrics.java   |  9 ++--
 3 files changed, 30 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/71b43741/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f3537b0..4247ea6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -398,6 +398,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7997. The first non-existing xattr should also throw IOException.
 (zhouyingchao via yliu)
 
+HDFS-6945. BlockManager should remove a block from excessReplicateMap and
+decrement ExcessBlocks metric when the block is removed. (aajisaka)
+
 Release 2.7.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71b43741/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index f6e15a3..d9aee62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -3351,8 +3351,7 @@ public class BlockManager {
 // file already removes them from the block map below.
 block.setNumBytes(BlockCommand.NO_ACK);
 addToInvalidates(block);
-corruptReplicas.removeFromCorruptReplicasMap(block);
-blocksMap.removeBlock(block);
+removeBlockFromMap(block);
 // Remove the block from pendingReplications and neededReplications
 pendingReplications.remove(block);
 neededReplications.remove(block, UnderReplicatedBlocks.LEVEL);
@@ -3528,11 +3527,30 @@ public class BlockManager {
   }
 
   public void removeBlockFromMap(Block block) {
+removeFromExcessReplicateMap(block);
 blocksMap.removeBlock(block);
 // If block is removed from blocksMap remove it from corruptReplicasMap
 corruptReplicas.removeFromCorruptReplicasMap(block);
   }
 
+  /**
+   * If a block is removed from blocksMap, remove it from excessReplicateMap.
+   */
+  private void removeFromExcessReplicateMap(Block block) {
+for (DatanodeStorageInfo info : blocksMap.getStorages(block)) {
+  String uuid = info.getDatanodeDescriptor().getDatanodeUuid();
+  LightWeightLinkedSetBlock excessReplicas = 
excessReplicateMap.get(uuid);
+  if (excessReplicas != null) {
+if (excessReplicas.remove(block)) {
+  excessBlocksCount.decrementAndGet();
+  if (excessReplicas.isEmpty()) {
+excessReplicateMap.remove(uuid);
+  }
+}
+  }
+}
+  }
+
   public int getCapacity() {
 return blocksMap.getCapacity();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71b43741/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
index 64ea1e4..b390391 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
@@ -280,11 +280,16 @@ public class TestNameNodeMetrics {
   public void testExcessBlocks() throws Exception {
   

hadoop git commit: HDFS-6945. BlockManager should remove a block from excessReplicateMap and decrement ExcessBlocks metric when the block is removed. (aajisaka)

2015-03-31 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 4cf44bef5 - b85bbca74


HDFS-6945. BlockManager should remove a block from excessReplicateMap and 
decrement ExcessBlocks metric when the block is removed. (aajisaka)

(cherry picked from commit 18a91fe4df0448d9f7de91602646ecf5a51c52e4)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b85bbca7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b85bbca7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b85bbca7

Branch: refs/heads/branch-2
Commit: b85bbca74565b18dfa6689c9545d07bff5d31f83
Parents: 4cf44be
Author: Akira Ajisaka aajis...@apache.org
Authored: Wed Apr 1 09:07:28 2015 +0900
Committer: Akira Ajisaka aajis...@apache.org
Committed: Wed Apr 1 09:08:22 2015 +0900

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../server/blockmanagement/BlockManager.java| 22 ++--
 .../namenode/metrics/TestNameNodeMetrics.java   |  9 ++--
 3 files changed, 30 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b85bbca7/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 3bfc550..1d733a0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -83,6 +83,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7997. The first non-existing xattr should also throw IOException.
 (zhouyingchao via yliu)
 
+HDFS-6945. BlockManager should remove a block from excessReplicateMap and
+decrement ExcessBlocks metric when the block is removed. (aajisaka)
+
 Release 2.7.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b85bbca7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 11965c1..acb5c44 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -3354,8 +3354,7 @@ public class BlockManager {
 // file already removes them from the block map below.
 block.setNumBytes(BlockCommand.NO_ACK);
 addToInvalidates(block);
-corruptReplicas.removeFromCorruptReplicasMap(block);
-blocksMap.removeBlock(block);
+removeBlockFromMap(block);
 // Remove the block from pendingReplications and neededReplications
 pendingReplications.remove(block);
 neededReplications.remove(block, UnderReplicatedBlocks.LEVEL);
@@ -3531,11 +3530,30 @@ public class BlockManager {
   }
 
   public void removeBlockFromMap(Block block) {
+removeFromExcessReplicateMap(block);
 blocksMap.removeBlock(block);
 // If block is removed from blocksMap remove it from corruptReplicasMap
 corruptReplicas.removeFromCorruptReplicasMap(block);
   }
 
+  /**
+   * If a block is removed from blocksMap, remove it from excessReplicateMap.
+   */
+  private void removeFromExcessReplicateMap(Block block) {
+for (DatanodeStorageInfo info : blocksMap.getStorages(block)) {
+  String uuid = info.getDatanodeDescriptor().getDatanodeUuid();
+  LightWeightLinkedSetBlock excessReplicas = 
excessReplicateMap.get(uuid);
+  if (excessReplicas != null) {
+if (excessReplicas.remove(block)) {
+  excessBlocksCount.decrementAndGet();
+  if (excessReplicas.isEmpty()) {
+excessReplicateMap.remove(uuid);
+  }
+}
+  }
+}
+  }
+
   public int getCapacity() {
 return blocksMap.getCapacity();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b85bbca7/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
index 2ba609d..438c2d7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
+++ 

hadoop git commit: HDFS-6945. BlockManager should remove a block from excessReplicateMap and decrement ExcessBlocks metric when the block is removed. (aajisaka)

2015-03-31 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk e428fea73 - 18a91fe4d


HDFS-6945. BlockManager should remove a block from excessReplicateMap and 
decrement ExcessBlocks metric when the block is removed. (aajisaka)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/18a91fe4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/18a91fe4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/18a91fe4

Branch: refs/heads/trunk
Commit: 18a91fe4df0448d9f7de91602646ecf5a51c52e4
Parents: e428fea
Author: Akira Ajisaka aajis...@apache.org
Authored: Wed Apr 1 09:07:28 2015 +0900
Committer: Akira Ajisaka aajis...@apache.org
Committed: Wed Apr 1 09:07:28 2015 +0900

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../server/blockmanagement/BlockManager.java| 22 ++--
 .../namenode/metrics/TestNameNodeMetrics.java   |  9 ++--
 3 files changed, 30 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/18a91fe4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f3537b0..4247ea6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -398,6 +398,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7997. The first non-existing xattr should also throw IOException.
 (zhouyingchao via yliu)
 
+HDFS-6945. BlockManager should remove a block from excessReplicateMap and
+decrement ExcessBlocks metric when the block is removed. (aajisaka)
+
 Release 2.7.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/18a91fe4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index f6e15a3..d9aee62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -3351,8 +3351,7 @@ public class BlockManager {
 // file already removes them from the block map below.
 block.setNumBytes(BlockCommand.NO_ACK);
 addToInvalidates(block);
-corruptReplicas.removeFromCorruptReplicasMap(block);
-blocksMap.removeBlock(block);
+removeBlockFromMap(block);
 // Remove the block from pendingReplications and neededReplications
 pendingReplications.remove(block);
 neededReplications.remove(block, UnderReplicatedBlocks.LEVEL);
@@ -3528,11 +3527,30 @@ public class BlockManager {
   }
 
   public void removeBlockFromMap(Block block) {
+removeFromExcessReplicateMap(block);
 blocksMap.removeBlock(block);
 // If block is removed from blocksMap remove it from corruptReplicasMap
 corruptReplicas.removeFromCorruptReplicasMap(block);
   }
 
+  /**
+   * If a block is removed from blocksMap, remove it from excessReplicateMap.
+   */
+  private void removeFromExcessReplicateMap(Block block) {
+for (DatanodeStorageInfo info : blocksMap.getStorages(block)) {
+  String uuid = info.getDatanodeDescriptor().getDatanodeUuid();
+  LightWeightLinkedSetBlock excessReplicas = 
excessReplicateMap.get(uuid);
+  if (excessReplicas != null) {
+if (excessReplicas.remove(block)) {
+  excessBlocksCount.decrementAndGet();
+  if (excessReplicas.isEmpty()) {
+excessReplicateMap.remove(uuid);
+  }
+}
+  }
+}
+  }
+
   public int getCapacity() {
 return blocksMap.getCapacity();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/18a91fe4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
index 64ea1e4..b390391 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
@@ -280,11 +280,16 @@ public