hadoop git commit: HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

2016-06-28 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 47c13c4f7 -> f44d7782f


HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

(cherry picked from commit 2a0082c51da7cbe2770eddb5f72cd7f8d72fa5f6)
(cherry picked from commit 1e347631817d882353bfb91d68f109cb8232e8c4)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f44d7782
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f44d7782
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f44d7782

Branch: refs/heads/branch-2.8
Commit: f44d7782f605cfc1ee4ddae6d3a4ce877c9f2fd6
Parents: 47c13c4
Author: Vinayakumar B 
Authored: Tue Jun 28 16:49:39 2016 +0530
Committer: Vinayakumar B 
Committed: Tue Jun 28 16:55:57 2016 +0530

--
 .../hdfs/server/datanode/BPServiceActor.java|  46 +++
 .../hadoop/hdfs/server/datanode/DataNode.java   |  26 
 .../hdfs/server/datanode/DataNodeMXBean.java|  20 ++-
 .../src/main/webapps/datanode/datanode.html | 129 +++
 .../hadoop-hdfs/src/main/webapps/datanode/dn.js |  70 ++
 .../src/main/webapps/datanode/index.html|  48 +--
 .../server/datanode/TestDataNodeMXBean.java |   4 +
 7 files changed, 297 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f44d7782/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
index 99874dd..70004e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
@@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionUtil;
@@ -138,6 +140,10 @@ class BPServiceActor implements Runnable {
 || runningState == BPServiceActor.RunningState.CONNECTING;
   }
 
+  String getRunningState() {
+return runningState.toString();
+  }
+
   @Override
   public String toString() {
 return bpos.toString() + " service to " + nnAddr;
@@ -147,6 +153,22 @@ class BPServiceActor implements Runnable {
 return nnAddr;
   }
 
+  private String getNameNodeAddress() {
+return NetUtils.getHostPortString(getNNSocketAddress());
+  }
+
+  Map getActorInfoMap() {
+final Map info = new HashMap();
+info.put("NamenodeAddress", getNameNodeAddress());
+info.put("BlockPoolID", bpos.getBlockPoolId());
+info.put("ActorState", getRunningState());
+info.put("LastHeartbeat",
+String.valueOf(getScheduler().getLastHearbeatTime()));
+info.put("LastBlockReport",
+String.valueOf(getScheduler().getLastBlockReportTime()));
+return info;
+  }
+
   private final CountDownLatch initialRegistrationComplete;
   private final LifelineSender lifelineSender;
 
@@ -379,6 +401,7 @@ class BPServiceActor implements Runnable {
   (nCmds + " commands: " + Joiner.on("; ").join(cmds +
   ".");
 }
+scheduler.updateLastBlockReportTime(monotonicNow());
 scheduler.scheduleNextBlockReport();
 return cmds.size() == 0 ? null : cmds;
   }
@@ -425,6 +448,7 @@ class BPServiceActor implements Runnable {
 " storage reports from service actor: " + this);
 }
 
+scheduler.updateLastHeartbeatTime(monotonicNow());
 VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
 .getVolumeFailureSummary();
 int numFailedVolumes = volumeFailureSummary != null ?
@@ -996,6 +1020,12 @@ class BPServiceActor implements Runnable {
 volatile long nextLifelineTime = monotonicNow();
 
 @VisibleForTesting
+volatile long lastBlockReportTime = monotonicNow();
+
+@VisibleForTesting
+volatile long lastHeartbeatTime = monotonicNow();
+
+@VisibleForTesting
 boolean resetBlockReportTime = true;
 
 private 

[2/2] hadoop git commit: HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

2016-06-28 Thread vinayakumarb
HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

(cherry picked from commit 2a0082c51da7cbe2770eddb5f72cd7f8d72fa5f6)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1e347631
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1e347631
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1e347631

Branch: refs/heads/branch-2
Commit: 1e347631817d882353bfb91d68f109cb8232e8c4
Parents: 0a1a2ce
Author: Vinayakumar B 
Authored: Tue Jun 28 16:49:39 2016 +0530
Committer: Vinayakumar B 
Committed: Tue Jun 28 16:51:00 2016 +0530

--
 .../hdfs/server/datanode/BPServiceActor.java|  46 +++
 .../hadoop/hdfs/server/datanode/DataNode.java   |  26 
 .../hdfs/server/datanode/DataNodeMXBean.java|  20 ++-
 .../src/main/webapps/datanode/datanode.html | 129 +++
 .../hadoop-hdfs/src/main/webapps/datanode/dn.js |  70 ++
 .../src/main/webapps/datanode/index.html|  48 +--
 .../server/datanode/TestDataNodeMXBean.java |   4 +
 7 files changed, 297 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
index 99874dd..70004e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
@@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionUtil;
@@ -138,6 +140,10 @@ class BPServiceActor implements Runnable {
 || runningState == BPServiceActor.RunningState.CONNECTING;
   }
 
+  String getRunningState() {
+return runningState.toString();
+  }
+
   @Override
   public String toString() {
 return bpos.toString() + " service to " + nnAddr;
@@ -147,6 +153,22 @@ class BPServiceActor implements Runnable {
 return nnAddr;
   }
 
+  private String getNameNodeAddress() {
+return NetUtils.getHostPortString(getNNSocketAddress());
+  }
+
+  Map getActorInfoMap() {
+final Map info = new HashMap();
+info.put("NamenodeAddress", getNameNodeAddress());
+info.put("BlockPoolID", bpos.getBlockPoolId());
+info.put("ActorState", getRunningState());
+info.put("LastHeartbeat",
+String.valueOf(getScheduler().getLastHearbeatTime()));
+info.put("LastBlockReport",
+String.valueOf(getScheduler().getLastBlockReportTime()));
+return info;
+  }
+
   private final CountDownLatch initialRegistrationComplete;
   private final LifelineSender lifelineSender;
 
@@ -379,6 +401,7 @@ class BPServiceActor implements Runnable {
   (nCmds + " commands: " + Joiner.on("; ").join(cmds +
   ".");
 }
+scheduler.updateLastBlockReportTime(monotonicNow());
 scheduler.scheduleNextBlockReport();
 return cmds.size() == 0 ? null : cmds;
   }
@@ -425,6 +448,7 @@ class BPServiceActor implements Runnable {
 " storage reports from service actor: " + this);
 }
 
+scheduler.updateLastHeartbeatTime(monotonicNow());
 VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
 .getVolumeFailureSummary();
 int numFailedVolumes = volumeFailureSummary != null ?
@@ -996,6 +1020,12 @@ class BPServiceActor implements Runnable {
 volatile long nextLifelineTime = monotonicNow();
 
 @VisibleForTesting
+volatile long lastBlockReportTime = monotonicNow();
+
+@VisibleForTesting
+volatile long lastHeartbeatTime = monotonicNow();
+
+@VisibleForTesting
 boolean resetBlockReportTime = true;
 
 private final AtomicBoolean forceFullBlockReport =
@@ -1033,6 +1063,22 @@ class BPServiceActor implements Runnable {
   return nextHeartbeatTime;
 }
 
+

[1/2] hadoop git commit: HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

2016-06-28 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0a1a2ce07 -> 1e3476318
  refs/heads/trunk 4fd37eed9 -> 2a0082c51


HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2a0082c5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2a0082c5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2a0082c5

Branch: refs/heads/trunk
Commit: 2a0082c51da7cbe2770eddb5f72cd7f8d72fa5f6
Parents: 4fd37ee
Author: Vinayakumar B 
Authored: Tue Jun 28 16:49:39 2016 +0530
Committer: Vinayakumar B 
Committed: Tue Jun 28 16:49:39 2016 +0530

--
 .../hdfs/server/datanode/BPServiceActor.java|  46 +++
 .../hadoop/hdfs/server/datanode/DataNode.java   |  26 
 .../hdfs/server/datanode/DataNodeMXBean.java|  20 ++-
 .../src/main/webapps/datanode/datanode.html | 129 +++
 .../hadoop-hdfs/src/main/webapps/datanode/dn.js |  70 ++
 .../src/main/webapps/datanode/index.html|  48 +--
 .../server/datanode/TestDataNodeMXBean.java |   4 +
 7 files changed, 297 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2a0082c5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
index 39f8219..1b61b4b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
@@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionUtil;
@@ -138,6 +140,10 @@ class BPServiceActor implements Runnable {
 || runningState == BPServiceActor.RunningState.CONNECTING;
   }
 
+  String getRunningState() {
+return runningState.toString();
+  }
+
   @Override
   public String toString() {
 return bpos.toString() + " service to " + nnAddr;
@@ -147,6 +153,22 @@ class BPServiceActor implements Runnable {
 return nnAddr;
   }
 
+  private String getNameNodeAddress() {
+return NetUtils.getHostPortString(getNNSocketAddress());
+  }
+
+  Map getActorInfoMap() {
+final Map info = new HashMap();
+info.put("NamenodeAddress", getNameNodeAddress());
+info.put("BlockPoolID", bpos.getBlockPoolId());
+info.put("ActorState", getRunningState());
+info.put("LastHeartbeat",
+String.valueOf(getScheduler().getLastHearbeatTime()));
+info.put("LastBlockReport",
+String.valueOf(getScheduler().getLastBlockReportTime()));
+return info;
+  }
+
   private final CountDownLatch initialRegistrationComplete;
   private final LifelineSender lifelineSender;
 
@@ -379,6 +401,7 @@ class BPServiceActor implements Runnable {
   (nCmds + " commands: " + Joiner.on("; ").join(cmds +
   ".");
 }
+scheduler.updateLastBlockReportTime(monotonicNow());
 scheduler.scheduleNextBlockReport();
 return cmds.size() == 0 ? null : cmds;
   }
@@ -425,6 +448,7 @@ class BPServiceActor implements Runnable {
 " storage reports from service actor: " + this);
 }
 
+scheduler.updateLastHeartbeatTime(monotonicNow());
 VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
 .getVolumeFailureSummary();
 int numFailedVolumes = volumeFailureSummary != null ?
@@ -996,6 +1020,12 @@ class BPServiceActor implements Runnable {
 volatile long nextLifelineTime = monotonicNow();
 
 @VisibleForTesting
+volatile long lastBlockReportTime = monotonicNow();
+
+@VisibleForTesting
+volatile long lastHeartbeatTime = monotonicNow();
+
+@VisibleForTesting
 boolean resetBlockReportTime = true;
 
 private final AtomicBoolean forceFullBlockReport =
@@ -1033,6 +1063,22 @@ class BPServiceActor implements