[35/50] hadoop git commit: HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to create BlockReader. Contributed by Tsz Wo Nicholas Sze.
HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to create BlockReader. Contributed by Tsz Wo Nicholas Sze. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a0971b9b Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a0971b9b Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a0971b9b Branch: refs/heads/HDFS-7285 Commit: a0971b9b5318685921642d2da1a27169f5589cb3 Parents: e875f1d Author: Zhe Zhang z...@apache.org Authored: Tue Apr 21 20:56:39 2015 -0700 Committer: Zhe Zhang z...@apache.org Committed: Mon May 4 10:13:28 2015 -0700 -- .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt| 3 + .../apache/hadoop/hdfs/BlockReaderTestUtil.java | 7 +-- .../hadoop/hdfs/TestBlockReaderFactory.java | 16 +++--- .../hadoop/hdfs/TestDFSStripedOutputStream.java | 58 ++-- 4 files changed, 20 insertions(+), 64 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0971b9b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt index 8f28285..d8f2e9d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt @@ -107,3 +107,6 @@ HDFS-8190. StripedBlockUtil.getInternalBlockLength may have overflow error. (szetszwo) + +HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to +create BlockReader. (szetszwo via Zhe Zhang) http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0971b9b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java index 88b7f37..829cf03 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java @@ -165,20 +165,19 @@ public class BlockReaderTestUtil { */ public BlockReader getBlockReader(LocatedBlock testBlock, int offset, int lenToRead) throws IOException { -return getBlockReader(cluster, testBlock, offset, lenToRead); +return getBlockReader(cluster.getFileSystem(), testBlock, offset, lenToRead); } /** * Get a BlockReader for the given block. */ - public static BlockReader getBlockReader(MiniDFSCluster cluster, - LocatedBlock testBlock, int offset, int lenToRead) throws IOException { + public static BlockReader getBlockReader(final DistributedFileSystem fs, + LocatedBlock testBlock, int offset, long lenToRead) throws IOException { InetSocketAddress targetAddr = null; ExtendedBlock block = testBlock.getBlock(); DatanodeInfo[] nodes = testBlock.getLocations(); targetAddr = NetUtils.createSocketAddr(nodes[0].getXferAddr()); -final DistributedFileSystem fs = cluster.getFileSystem(); return new BlockReaderFactory(fs.getClient().getConf()). setInetSocketAddress(targetAddr). setBlock(block). http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0971b9b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java index d8aceff..1a767c3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java @@ -250,8 +250,8 @@ public class TestBlockReaderFactory { LocatedBlock lblock = locatedBlocks.get(0); // first block BlockReader blockReader = null; try { -blockReader = BlockReaderTestUtil. -getBlockReader(cluster, lblock, 0, TEST_FILE_LEN); +blockReader = BlockReaderTestUtil.getBlockReader( +cluster.getFileSystem(), lblock, 0, TEST_FILE_LEN); Assert.fail(expected getBlockReader to fail the first time.); } catch (Throwable t) { Assert.assertTrue(expected to see 'TCP reads were disabled + @@ -265,8 +265,8 @@ public class TestBlockReaderFactory { // Second time should succeed.
[35/50] hadoop git commit: HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to create BlockReader. Contributed by Tsz Wo Nicholas Sze.
HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to create BlockReader. Contributed by Tsz Wo Nicholas Sze. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dbeb2c99 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dbeb2c99 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dbeb2c99 Branch: refs/heads/HDFS-7285 Commit: dbeb2c9981a3339a2cc3a620e9bd55151b9a6804 Parents: 4b0ad40 Author: Zhe Zhang z...@apache.org Authored: Tue Apr 21 20:56:39 2015 -0700 Committer: Jing Zhao ji...@apache.org Committed: Wed Apr 29 11:16:55 2015 -0700 -- .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt| 3 + .../apache/hadoop/hdfs/BlockReaderTestUtil.java | 7 +-- .../hadoop/hdfs/TestBlockReaderFactory.java | 16 +++--- .../hadoop/hdfs/TestDFSStripedOutputStream.java | 58 ++-- 4 files changed, 20 insertions(+), 64 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/dbeb2c99/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt index 8f28285..d8f2e9d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt @@ -107,3 +107,6 @@ HDFS-8190. StripedBlockUtil.getInternalBlockLength may have overflow error. (szetszwo) + +HDFS-8216. TestDFSStripedOutputStream should use BlockReaderTestUtil to +create BlockReader. (szetszwo via Zhe Zhang) http://git-wip-us.apache.org/repos/asf/hadoop/blob/dbeb2c99/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java index 88b7f37..829cf03 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java @@ -165,20 +165,19 @@ public class BlockReaderTestUtil { */ public BlockReader getBlockReader(LocatedBlock testBlock, int offset, int lenToRead) throws IOException { -return getBlockReader(cluster, testBlock, offset, lenToRead); +return getBlockReader(cluster.getFileSystem(), testBlock, offset, lenToRead); } /** * Get a BlockReader for the given block. */ - public static BlockReader getBlockReader(MiniDFSCluster cluster, - LocatedBlock testBlock, int offset, int lenToRead) throws IOException { + public static BlockReader getBlockReader(final DistributedFileSystem fs, + LocatedBlock testBlock, int offset, long lenToRead) throws IOException { InetSocketAddress targetAddr = null; ExtendedBlock block = testBlock.getBlock(); DatanodeInfo[] nodes = testBlock.getLocations(); targetAddr = NetUtils.createSocketAddr(nodes[0].getXferAddr()); -final DistributedFileSystem fs = cluster.getFileSystem(); return new BlockReaderFactory(fs.getClient().getConf()). setInetSocketAddress(targetAddr). setBlock(block). http://git-wip-us.apache.org/repos/asf/hadoop/blob/dbeb2c99/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java index d8aceff..1a767c3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockReaderFactory.java @@ -250,8 +250,8 @@ public class TestBlockReaderFactory { LocatedBlock lblock = locatedBlocks.get(0); // first block BlockReader blockReader = null; try { -blockReader = BlockReaderTestUtil. -getBlockReader(cluster, lblock, 0, TEST_FILE_LEN); +blockReader = BlockReaderTestUtil.getBlockReader( +cluster.getFileSystem(), lblock, 0, TEST_FILE_LEN); Assert.fail(expected getBlockReader to fail the first time.); } catch (Throwable t) { Assert.assertTrue(expected to see 'TCP reads were disabled + @@ -265,8 +265,8 @@ public class TestBlockReaderFactory { // Second time should succeed.