Repository: hadoop
Updated Branches:
  refs/heads/trunk 39537b7c8 -> 9714fc1dd


HDFS-336. dfsadmin -report should report number of blocks from datanode. 
Contributed by Bharat Viswanadham.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9714fc1d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9714fc1d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9714fc1d

Branch: refs/heads/trunk
Commit: 9714fc1dd48edb1c40d96d69ae82ed3b0fab7748
Parents: 39537b7
Author: Arpit Agarwal <a...@apache.org>
Authored: Tue Mar 13 16:39:17 2018 -0700
Committer: Arpit Agarwal <a...@apache.org>
Committed: Tue Mar 13 16:39:17 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hdfs/protocol/DatanodeInfo.java      | 31 ++++++++++++++--
 .../hadoop/hdfs/protocolPB/PBHelperClient.java  |  5 ++-
 .../src/main/proto/hdfs.proto                   |  1 +
 .../hdfs/server/namenode/FSNamesystem.java      |  1 +
 .../apache/hadoop/hdfs/tools/TestDFSAdmin.java  | 38 ++++++++++++++++++++
 .../src/test/resources/testHDFSConf.xml         |  4 +++
 6 files changed, 77 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
index 0a8c915..c140d06 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
@@ -56,6 +56,7 @@ public class DatanodeInfo extends DatanodeID implements Node {
   private List<String> dependentHostNames = new LinkedList<>();
   private String upgradeDomain;
   public static final DatanodeInfo[] EMPTY_ARRAY = {};
+  private int numBlocks;
 
   // Datanode administrative states
   public enum AdminStates {
@@ -106,6 +107,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     this.upgradeDomain = from.getUpgradeDomain();
     this.lastBlockReportTime = from.getLastBlockReportTime();
     this.lastBlockReportMonotonic = from.getLastBlockReportMonotonic();
+    this.numBlocks = from.getNumBlocks();
   }
 
   protected DatanodeInfo(DatanodeID nodeID) {
@@ -123,6 +125,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     this.adminState = null;
     this.lastBlockReportTime = 0L;
     this.lastBlockReportMonotonic = 0L;
+    this.numBlocks = 0;
   }
 
   protected DatanodeInfo(DatanodeID nodeID, String location) {
@@ -139,7 +142,8 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
       final long lastUpdate, final long lastUpdateMonotonic,
       final int xceiverCount, final String networkLocation,
       final AdminStates adminState, final String upgradeDomain,
-      final long lastBlockReportTime, final long lastBlockReportMonotonic) {
+      final long lastBlockReportTime, final long lastBlockReportMonotonic,
+                       final int blockCount) {
     super(ipAddr, hostName, datanodeUuid, xferPort, infoPort, infoSecurePort,
         ipcPort);
     this.capacity = capacity;
@@ -157,6 +161,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     this.upgradeDomain = upgradeDomain;
     this.lastBlockReportTime = lastBlockReportTime;
     this.lastBlockReportMonotonic = lastBlockReportMonotonic;
+    this.numBlocks = blockCount;
   }
 
   /** Network location name. */
@@ -247,6 +252,13 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
   public long getLastUpdateMonotonic() { return lastUpdateMonotonic;}
 
   /**
+   * @return Num of Blocks
+   */
+  public int getNumBlocks() {
+    return numBlocks;
+  }
+
+  /**
    * Set lastUpdate monotonic time
    */
   public void setLastUpdateMonotonic(long lastUpdateMonotonic) {
@@ -301,6 +313,11 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     this.xceiverCount = xceiverCount;
   }
 
+  /** Sets number of blocks. */
+  public void setNumBlocks(int blockCount) {
+    this.numBlocks = blockCount;
+  }
+
   /** network location */
   @Override
   public String getNetworkLocation() {return location;}
@@ -351,6 +368,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     float cacheUsedPercent = getCacheUsedPercent();
     float cacheRemainingPercent = getCacheRemainingPercent();
     String lookupName = NetUtils.getHostNameOfIP(getName());
+    int blockCount = getNumBlocks();
 
     buffer.append("Name: ").append(getName());
     if (lookupName != null) {
@@ -406,6 +424,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
         .append(
             lastBlockReportTime != 0 ? new Date(lastBlockReportTime) : "Never")
         .append("\n");
+    buffer.append("Num of Blocks: ").append(blockCount).append("\n");
     return buffer.toString();
   }
 
@@ -680,6 +699,8 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
     private long nonDfsUsed = 0L;
     private long lastBlockReportTime = 0L;
     private long lastBlockReportMonotonic = 0L;
+    private int numBlocks;
+
 
     public DatanodeInfoBuilder setFrom(DatanodeInfo from) {
       this.capacity = from.getCapacity();
@@ -697,6 +718,7 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
       this.upgradeDomain = from.getUpgradeDomain();
       this.lastBlockReportTime = from.getLastBlockReportTime();
       this.lastBlockReportMonotonic = from.getLastBlockReportMonotonic();
+      this.numBlocks = from.getNumBlocks();
       setNodeID(from);
       return this;
     }
@@ -823,13 +845,18 @@ public class DatanodeInfo extends DatanodeID implements 
Node {
       this.lastBlockReportMonotonic = time;
       return this;
     }
+    public DatanodeInfoBuilder setNumBlocks(int blockCount) {
+      this.numBlocks = blockCount;
+      return this;
+    }
 
     public DatanodeInfo build() {
       return new DatanodeInfo(ipAddr, hostName, datanodeUuid, xferPort,
           infoPort, infoSecurePort, ipcPort, capacity, dfsUsed, nonDfsUsed,
           remaining, blockPoolUsed, cacheCapacity, cacheUsed, lastUpdate,
           lastUpdateMonotonic, xceiverCount, location, adminState,
-          upgradeDomain, lastBlockReportTime, lastBlockReportMonotonic);
+          upgradeDomain, lastBlockReportTime, lastBlockReportMonotonic,
+          numBlocks);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
index 3180f70..d9e7aa0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
@@ -333,6 +333,7 @@ public class PBHelperClient {
         .setAdminState(convert(info.getAdminState()))
         .setLastBlockReportTime(info.getLastBlockReportTime())
         .setLastBlockReportMonotonic(info.getLastBlockReportMonotonic())
+        .setNumBlocks(info.getNumBlocks())
         .build();
     return builder.build();
   }
@@ -704,7 +705,9 @@ public class PBHelperClient {
             .setLastBlockReportTime(di.hasLastBlockReportTime() ?
                 di.getLastBlockReportTime() : 0)
             .setLastBlockReportMonotonic(di.hasLastBlockReportMonotonic() ?
-                di.getLastBlockReportMonotonic() : 0);
+                di.getLastBlockReportMonotonic() : 0)
+            .setNumBlocks(di.getNumBlocks());
+
     if (di.hasNonDfsUsed()) {
       dinfo.setNonDfsUsed(di.getNonDfsUsed());
     } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
index 405495f..29d0b4e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
@@ -132,6 +132,7 @@ message DatanodeInfoProto {
   optional string upgradeDomain = 14;
   optional uint64 lastBlockReportTime = 15 [default = 0];
   optional uint64 lastBlockReportMonotonic = 16 [default = 0];
+  optional uint32 numBlocks = 17 [default = 0];
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index e0ece35..868f669 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -4338,6 +4338,7 @@ public class FSNamesystem implements Namesystem, 
FSNamesystemMBean,
       for (int i=0; i<arr.length; i++) {
         arr[i] = new DatanodeInfoBuilder().setFrom(results.get(i))
             .build();
+        arr[i].setNumBlocks(results.get(i).numBlocks());
       }
     } finally {
       readUnlock(operationName);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
index 7237c88..1365b1a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
@@ -882,4 +882,42 @@ public class TestDFSAdmin {
     assertEquals(-1, ToolRunner.run(dfsAdmin,
         new String[]{"-setBalancerBandwidth", "-10m"}));
   }
+
+  @Test(timeout = 300000L)
+  public void testCheckNumOfBlocksInReportCommand() throws Exception {
+    Configuration config = new Configuration();
+    config.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 512);
+    config.set(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, "3s");
+
+    int numOfDatanodes = 1;
+    MiniDFSCluster miniDFSCluster = new MiniDFSCluster.Builder(config)
+        .numDataNodes(numOfDatanodes).build();
+    try {
+      miniDFSCluster.waitActive();
+      DistributedFileSystem dfs = miniDFSCluster.getFileSystem();
+      Path path= new Path("/tmp.txt");
+
+      DatanodeInfo[] dn = dfs.getDataNodeStats();
+      assertEquals(dn.length, numOfDatanodes);
+      //Block count should be 0, as no files are created
+      assertEquals(dn[0].getNumBlocks(), 0);
+
+
+      //Create a file with 2 blocks
+      DFSTestUtil.createFile(dfs, path, 1024, (short) 1, 0);
+      int expectedBlockCount = 2;
+
+      //Wait for One Heartbeat
+      Thread.sleep(3 * 1000);
+
+      dn = dfs.getDataNodeStats();
+      assertEquals(dn.length, numOfDatanodes);
+
+      //Block count should be 2, as file is created with block count 2
+      assertEquals(dn[0].getNumBlocks(), expectedBlockCount);
+
+    } finally {
+      cluster.shutdown();
+    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9714fc1d/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index 5094183..a13c441 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -16551,6 +16551,10 @@
           <expected-output>Last contact: [a-zA-Z]+ [a-zA-Z]+ [0-9]+ [0-9:]+ 
[A-Z\-\+\:0-9]+ [0-9]+</expected-output>
         </comparator>
         <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>Num of Blocks: [0-9]+</expected-output>
+        </comparator>
+        <comparator>
           <type>TokenComparator</type>
           <expected-output>Live datanodes</expected-output>
         </comparator>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to