This is an automated email from the ASF dual-hosted git repository.
surendralilhore pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 92b53c4 HDFS-14216. NullPointerException happens in NamenodeWebHdfs.
Contributed by lujie.
92b53c4 is described below
commit 92b53c40f070bbfe65c736f6f3eca721b9d227f5
Author: Surendra Singh Lilhore <[email protected]>
AuthorDate: Thu Feb 21 20:36:34 2019 +0530
HDFS-14216. NullPointerException happens in NamenodeWebHdfs. Contributed by
lujie.
---
.../web/resources/NamenodeWebHdfsMethods.java | 16 +++++++++++----
.../web/resources/TestWebHdfsDataLocality.java | 23 ++++++++++++++++++++++
2 files changed, 35 insertions(+), 4 deletions(-)
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
index 0dea48a..1e8d1a5 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
@@ -273,11 +273,19 @@ public class NamenodeWebHdfsMethods {
for (String host : StringUtils
.getTrimmedStringCollection(excludeDatanodes)) {
int idx = host.indexOf(":");
- if (idx != -1) {
- excludes.add(bm.getDatanodeManager().getDatanodeByXferAddr(
- host.substring(0, idx), Integer.parseInt(host.substring(idx +
1))));
+ Node excludeNode = null;
+ if (idx != -1) {
+ excludeNode = bm.getDatanodeManager().getDatanodeByXferAddr(
+ host.substring(0, idx), Integer.parseInt(host.substring(idx +
1)));
} else {
- excludes.add(bm.getDatanodeManager().getDatanodeByHost(host));
+ excludeNode = bm.getDatanodeManager().getDatanodeByHost(host);
+ }
+
+ if (excludeNode != null) {
+ excludes.add(excludeNode);
+ } else {
+ LOG.debug("DataNode {} was requested to be excluded, "
+ + "but it was not found.", host);
}
}
}
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
index 028e18c..e009bc6 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
@@ -240,6 +240,29 @@ public class TestWebHdfsDataLocality {
}
@Test
+ public void testExcludeWrongDataNode() throws Exception {
+ final Configuration conf = WebHdfsTestUtil.createConf();
+ final String[] racks = {RACK0};
+ final String[] hosts = {"DataNode1"};
+ final int nDataNodes = hosts.length;
+
+ final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+ .hosts(hosts).numDataNodes(nDataNodes).racks(racks).build();
+ try {
+ cluster.waitActive();
+ final NameNode namenode = cluster.getNameNode();
+ NamenodeWebHdfsMethods.chooseDatanode(
+ namenode, "/path", PutOpParam.Op.CREATE, 0,
+ DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT,
+ "DataNode2", LOCALHOST, null);
+ } catch (Exception e) {
+ Assert.fail("Failed to exclude DataNode2" + e.getMessage());
+ } finally {
+ cluster.shutdown();
+ }
+ }
+
+ @Test
public void testChooseDatanodeBeforeNamesystemInit() throws Exception {
NameNode nn = mock(NameNode.class);
when(nn.getNamesystem()).thenReturn(null);
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]