Repository: hadoop Updated Branches: refs/heads/branch-2.8 67d8301e5 -> 5a39a7560
HDFS-10972. Add unit test for HDFS command 'dfsadmin -getDatanodeInfo'. Contributed by Xiaobing Zhou Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5a39a756 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5a39a756 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5a39a756 Branch: refs/heads/branch-2.8 Commit: 5a39a7560a9401b02f6ad99f0da38eec4886b7f4 Parents: 67d8301 Author: Mingliang Liu <[email protected]> Authored: Mon Oct 10 17:46:31 2016 -0700 Committer: Mingliang Liu <[email protected]> Committed: Mon Oct 10 17:46:31 2016 -0700 ---------------------------------------------------------------------- .../apache/hadoop/hdfs/tools/TestDFSAdmin.java | 122 +++++++++++++++++-- 1 file changed, 109 insertions(+), 13 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5a39a756/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java index 3a30ccf..b28917c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java @@ -23,10 +23,12 @@ import com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.ReconfigurationUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.StorageLocation; +import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -56,6 +58,10 @@ public class TestDFSAdmin { private MiniDFSCluster cluster; private DFSAdmin admin; private DataNode datanode; + private final ByteArrayOutputStream out = new ByteArrayOutputStream(); + private final ByteArrayOutputStream err = new ByteArrayOutputStream(); + private static final PrintStream OLD_OUT = System.out; + private static final PrintStream OLD_ERR = System.err; @Before public void setUp() throws Exception { @@ -65,12 +71,32 @@ public class TestDFSAdmin { admin = new DFSAdmin(); } + private void redirectStream() { + System.setOut(new PrintStream(out)); + System.setErr(new PrintStream(err)); + } + + private void resetStream() { + out.reset(); + err.reset(); + } + @After public void tearDown() throws Exception { + try { + System.out.flush(); + System.err.flush(); + } finally { + System.setOut(OLD_OUT); + System.setErr(OLD_ERR); + } + if (cluster != null) { cluster.shutdown(); cluster = null; } + + resetStream(); } private void restartCluster() throws IOException { @@ -85,16 +111,89 @@ public class TestDFSAdmin { private List<String> getReconfigureStatus(String nodeType, String address) throws IOException { ByteArrayOutputStream bufOut = new ByteArrayOutputStream(); - PrintStream out = new PrintStream(bufOut); + PrintStream outStream = new PrintStream(bufOut); ByteArrayOutputStream bufErr = new ByteArrayOutputStream(); - PrintStream err = new PrintStream(bufErr); - admin.getReconfigurationStatus(nodeType, address, out, err); - Scanner scanner = new Scanner(bufOut.toString()); + PrintStream errStream = new PrintStream(bufErr); + admin.getReconfigurationStatus(nodeType, address, outStream, errStream); List<String> outputs = Lists.newArrayList(); + scanIntoList(bufOut, outputs); + return outputs; + } + + private static void scanIntoList( + final ByteArrayOutputStream baos, + final List<String> list) { + final Scanner scanner = new Scanner(baos.toString()); while (scanner.hasNextLine()) { - outputs.add(scanner.nextLine()); + list.add(scanner.nextLine()); + } + scanner.close(); + } + + @Test(timeout = 30000) + public void testGetDatanodeInfo() throws Exception { + redirectStream(); + final Configuration dfsConf = new HdfsConfiguration(); + final int numDn = 2; + + /* init cluster */ + try (MiniDFSCluster miniCluster = new MiniDFSCluster.Builder(dfsConf) + .numDataNodes(numDn).build()) { + + miniCluster.waitActive(); + assertEquals(numDn, miniCluster.getDataNodes().size()); + final DFSAdmin dfsAdmin = new DFSAdmin(dfsConf); + + /* init reused vars */ + List<String> outs = null; + int ret; + + /** + * test erroneous run + */ + resetStream(); + outs = Lists.newArrayList(); + + /* invoke getDatanodeInfo */ + ret = ToolRunner.run(dfsAdmin, + new String[] { "-getDatanodeInfo", "128.0.0.1:1234" }); + + /* collect outputs */ + scanIntoList(out, outs); + + /* verify results */ + assertEquals(-1, ret); + assertTrue("Unexpected getDatanodeInfo stdout", outs.isEmpty()); + + /** + * test normal run + */ + for (int i = 0; i < numDn; i++) { + resetStream(); + final DataNode dn = miniCluster.getDataNodes().get(i); + + /* invoke getDatanodeInfo */ + final String addr = String.format("%s:%d", + dn.getXferAddress().getHostString(), dn.getIpcPort()); + ret = ToolRunner.run(dfsAdmin, + new String[] { "-getDatanodeInfo", addr }); + + /* collect outputs */ + outs = Lists.newArrayList(); + scanIntoList(out, outs); + + /* verify results */ + assertEquals(0, ret); + assertEquals( + "One line per DataNode like: Uptime: XXX, Software version: x.y.z," + + " Config version: core-x.y.z,hdfs-x", + 1, outs.size()); + assertThat(outs.get(0), + is(allOf(containsString("Uptime:"), + containsString("Software version"), + containsString("Config version")))); + } } - return outputs; } /** @@ -189,15 +288,12 @@ public class TestDFSAdmin { String nodeType, String address) throws IOException { ByteArrayOutputStream bufOut = new ByteArrayOutputStream(); - PrintStream out = new PrintStream(bufOut); + PrintStream outStream = new PrintStream(bufOut); ByteArrayOutputStream bufErr = new ByteArrayOutputStream(); - PrintStream err = new PrintStream(bufErr); - admin.getReconfigurableProperties(nodeType, address, out, err); - Scanner scanner = new Scanner(bufOut.toString()); + PrintStream errStream = new PrintStream(bufErr); + admin.getReconfigurableProperties(nodeType, address, outStream, errStream); List<String> outputs = Lists.newArrayList(); - while (scanner.hasNextLine()) { - outputs.add(scanner.nextLine()); - } + scanIntoList(bufOut, outputs); return outputs; } --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
