Repository: bigtop Updated Branches: refs/heads/master 96ecf29a1 -> d4ada6133
BIGTOP-1981: Added tests for -test, -text and -count functionality Signed-off-by: YoungWoo Kim <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/d4ada613 Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/d4ada613 Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/d4ada613 Branch: refs/heads/master Commit: d4ada6133aac03eb27c5b129f213f21054350842 Parents: 96ecf29 Author: srinivas-altiscale <[email protected]> Authored: Sun Sep 6 13:47:12 2015 +0530 Committer: YoungWoo Kim <[email protected]> Committed: Sun Sep 6 20:18:37 2015 +0900 ---------------------------------------------------------------------- .../bigtop/itest/hadoop/hdfs/TestCmdTest.groovy | 321 +++++++++++++++++++ .../bigtop/itest/hadoop/hdfs/TestCmdText.groovy | 195 +++++++++++ .../bigtop/itest/hadoop/hdfs/TestCount.groovy | 230 +++++++++++++ 3 files changed, 746 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/bigtop/blob/d4ada613/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdTest.groovy ---------------------------------------------------------------------- diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdTest.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdTest.groovy new file mode 100644 index 0000000..38aeea6 --- /dev/null +++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdTest.groovy @@ -0,0 +1,321 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.bigtop.itest.hadoop.hdfs; + +import static org.junit.Assert.assertTrue; +import org.junit.AfterClass; +import org.junit.*; +import org.junit.Test; +import org.apache.bigtop.itest.shell.Shell; +import org.apache.hadoop.conf.Configuration; +import org.apache.bigtop.itest.JarContent; +import static org.apache.bigtop.itest.LogErrorsUtils.logError +import java.util.ArrayList; +import java.util.List; + +public class TestCmdTest { + + private static Shell sh = new Shell("/bin/bash -s"); + // extracting user identity for ls absolute path + private static final String USERNAME = System.getProperty("user.name"); + private static String date = sh.exec("date").getOut().get(0). + replaceAll("\\s","").replaceAll(":",""); + private static String namenode = ""; + private static String testCmdTestInputDir = "testCmdTestInputDir" + date; + private static String testCmdTestInputs = "test_data_TestCmdTest" + private static String testCmdTestOut = "testCmdTestOut" + date; + private static String testCmdTestOutCmp = "testCmdTestOutCmp" + date; + private static String user_testinputdir = USERNAME+"/"+testCmdTestInputDir+ + "/"+testCmdTestInputs; + private static String TESTDIR = "/user/$USERNAME/$testCmdTestInputDir"; + + private String teststr = "test: Too many arguments: expected 1 but got 2"; + + @BeforeClass + public static void setUp() { + // unpack resource + JarContent.unpackJarContainer(TestCmdTest.class, "." , null); + + sh.exec("cp -r test_data test_data_TestCmdTest"); + assertTrue("Could not copy data into test_data_TestCmdTest", + sh.getRet() == 0); + + // get namenode hostname from core-site.xml + Configuration conf = new Configuration(); + namenode = conf.get("fs.defaultFS"); + if (namenode == null) { + namenode = conf.get("fs.default.name"); + } + assertTrue("Could not find namenode", namenode != null); + + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("hdfs dfs -mkdir -p $TESTDIR"); + assertTrue("Could not create input directory on HDFS", sh.getRet() == 0); + + // copy input directory to hdfs + sh.exec("hdfs dfs -put $testCmdTestInputs $TESTDIR"); + assertTrue("Could not copy files to HDFS", sh.getRet() == 0); + + println("Running cmdtest:"); + } + + @AfterClass + public static void tearDown() { + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("test -f $testCmdTestOut"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCmdTestOut from local disk"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -f $testCmdTestOutCmp"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCmdTestOutCmp"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -d $testCmdTestInputs"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCmdTestInputs"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + } + + @Test + public void testForDirectory() { + // test whether if the given path is a directory or not + sh.exec("hdfs dfs -test -d $TESTDIR/$testCmdTestInputs"); + assertTrue("test command for directory on HDFS failed", sh.getRet() == 0); + } + + @Test + public void testDirectoryExists() { + println("testDirectoryExists"); + sh.exec("hdfs dfs -test -e $TESTDIR/$testCmdTestInputs"); + assertTrue("test command for directory on HDFS failed", + sh.getRet() == 0); + } + + @Test + public void testIfFileExists() { + println("testIfFileExists"); + sh.exec("hdfs dfs -test -e $TESTDIR/$testCmdTestInputs/test_1.txt "); + assertTrue("test command for file on HDFS failed", sh.getRet() == 0); + } + + @Test + public void testWithFile() { + println("testWithFile"); + sh.exec("hdfs dfs -test -f $TESTDIR/$testCmdTestInputs/test_2.txt "); + assertTrue("test command for file on HDFS failed", sh.getRet() == 0); + } + + @Test + public void testForNonEmptyPath() { + println("testForNonEmptyPath"); + sh.exec("hdfs dfs -test -s $TESTDIR/$testCmdTestInputs/test_2.txt "); + assertTrue("test command for non-empty file on HDFS failed", + sh.getRet() == 0); + } + + @Test + public void testForEmptyPath() { + println("testForNonEmptyPath"); + // create empty file + sh.exec("touch test_3.txt"); + assertTrue("touch command failed", sh.getRet() == 0); + + // move the file to hdfs + sh.exec("hdfs dfs -put test_3.txt $TESTDIR/$testCmdTestInputs/. "); + assertTrue("could not copy input to HDFS", sh.getRet() == 0); + + // now check with -test -z + sh.exec("hdfs dfs -test -z $TESTDIR/$testCmdTestInputs/test_3.txt "); + assertTrue("test command for empty file failed on HDFS", sh.getRet() == 0); + } + + @Test + public void testdoptionWithFile() { + println("testdoptionWithFile"); + sh.exec("hdfs dfs -test -d $TESTDIR/$testCmdTestInputs/test_1.txt "); + assertTrue("test command for directory worked on a file", + sh.getRet() == 1); + } + + @Test + public void testdoptionWithNonExistentDirectory() { + println("testdoptionWithNonExistentDirectory"); + sh.exec("hdfs dfs -test -d $TESTDIR/test_dir "); + assertTrue("test command for non-existing directory executed " + + "successfully on HDFS", sh.getRet() == 1); + } + + @Test + public void testfoptionWithDirectory() { + println("testfoptionWithDirectory"); + sh.exec("hdfs dfs -test -f $TESTDIR/$testCmdTestInputs"); + assertTrue("test command for file worked on a directory", + sh.getRet() == 1); + } + + @Test + public void testsoptionWithDirectory() { + println("testsoptionWithDirectory"); + sh.exec("hdfs dfs -test -s $TESTDIR/$testCmdTestInputs"); + assertTrue("test command for file worked on a directory", + sh.getRet() == 1); + } + + @Test + public void testzoptionWithDirectory() { + println("testsoptionWithDirectory"); + sh.exec("hdfs dfs -test -z $TESTDIR/$testCmdTestInputs "); + assertTrue("test command for directory failed", sh.getRet() == 0); + } + + @Test + public void testeoptionWithNonExistentFile() { + println("testeoptionWithNonExistentFile"); + sh.exec("hdfs dfs -test -e $TESTDIR/$testCmdTestInputs/test_4.txt"); + assertTrue("test command for non existing file worked", sh.getRet() == 1); + } + + @Test + public void testfoptionWithNonExistentFile() { + println("testfoptionWithNonExistentFile"); + sh.exec("hdfs dfs -test -f $TESTDIR/$testCmdTestInputs/test_4.txt "); + assertTrue("test command for non existing file worked", sh.getRet() == 1); + } + + @Test + public void testsoptionWithNonExistentFile() { + println("testsoptionWithNonExistentFile"); + sh.exec("hdfs dfs -test -s $TESTDIR/$testCmdTestInputs/test_4.txt "); + assertTrue("test command for non existing file worked", sh.getRet() == 1); + } + + @Test + public void testzoptionWithNonExistentFile() { + println("testzoptionWithNonExistentFile"); + sh.exec("hdfs dfs -test -z $TESTDIR/$testCmdTestInputs/test_4.txt "); + assertTrue("test command for non existing file worked", sh.getRet() == 1); + } + + @Test + public void testzoptionWithNonEmptyFile() { + println("testzoptionWithNonEmptyFile"); + sh.exec("hdfs dfs -test -z $TESTDIR/$testCmdTestInputs/test_1.txt "); + assertTrue("test -z command on non-empty file worked", sh.getRet() == 1); + } + + @Test + public void testdoptionWithMultipleDirectories() { + println("testForMultipleDirectories"); + // invoke test comamnd with multiple directory names + sh.exec("hdfs dfs -test -d $TESTDIR $TESTDIR/$testCmdTestInputs"); + assertTrue("test command accepted multiple directory paths", + sh.getRet() == 255); + + assertTrue("Does test command worked with multiple directories?", + lookForGivenString(sh.getErr(), teststr) == true); + } + + @Test + public void testeoptionWithMultipleDirectories() { + println("testDirectoryExistsForMultipleDirectories"); + // check if test command work with multiple directories with -e option + sh.exec("hdfs dfs -test -e $TESTDIR $TESTDIR/$testCmdTestInputs"); + assertTrue("test command accepted multiple directory paths", + sh.getRet() == 255); + + assertTrue("Does -test -e worked with multiple Directories?", + lookForGivenString(sh.getErr(), teststr) == true); + } + + @Test + public void testeoptionWithMultipleFilesExists() { + println("testForMultipleFilesExists"); + sh.exec("hdfs dfs -test -e $TESTDIR/$testCmdTestInputs/test_1.txt "+ + "$TESTDIR/$testCmdTestInputs/test_2.txt "); + assertTrue("test command accepted multiple file paths", + sh.getRet() == 255); + + assertTrue("Does -test -e worked with multiple files?", + lookForGivenString(sh.getErr(), teststr) == true); + } + + @Test + public void testfoptionWithMultipleFiles() { + println("testForMultipleFiles"); + sh.exec("hdfs dfs -test -f $TESTDIR/$testCmdTestInputs/test_1.txt "+ + "$TESTDIR/$testCmdTestInputs/test_2.txt "); + assertTrue("test command accepted multiple file paths", sh.getRet() == 255); + + assertTrue("Does -test -f worked with multiple files?", + lookForGivenString(sh.getErr(), teststr) == true); + } + + @Test + public void testsoptionWithMultipleFiles() { + println("testForNonEmptyWithMultipleFiles"); + sh.exec("hdfs dfs -test -s /$TESTDIR/$testCmdTestInputs/test_1.txt "+ + "$TESTDIR/$testCmdTestInputs/test_2.txt "); + assertTrue("test command accepted multiple file paths", sh.getRet() == 255); + + assertTrue("expected pattern not found in the output file", + lookForGivenString(sh.getErr(), teststr) == true); + } + + @Test + public void testzoptionWithMultipleFiles() { + println("testForEmptyWithMultipleFiles"); + sh.exec("hdfs dfs -test -z $TESTDIR/$testCmdTestInputs/test_1.txt " + + "$TESTDIR/$testCmdTestInputs/test_3.txt "); + assertTrue("test command accepted multiple directory paths", + sh.getRet() == 255); + + assertTrue("Does -test -z worked with multiple directories?", + lookForGivenString(sh.getErr(), teststr) == true); + } + + /** + * lookForGivenString checks whether the given search string is present + * in the provided data list. + */ + private boolean lookForGivenString(List<String> data, + String searchString) { + boolean result = false; + for( String output_String : data) { + if(output_String.contains(searchString)) { + result = true; + break; + } + } + return result; + } +} http://git-wip-us.apache.org/repos/asf/bigtop/blob/d4ada613/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdText.groovy ---------------------------------------------------------------------- diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdText.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdText.groovy new file mode 100644 index 0000000..91f7d22 --- /dev/null +++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCmdText.groovy @@ -0,0 +1,195 @@ +/*Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.bigtop.itest.hadoop.hdfs; + +import static org.junit.Assert.assertTrue; +import org.junit.AfterClass; +import org.junit.*; +import org.junit.Test; +import org.apache.bigtop.itest.shell.Shell; +import org.apache.hadoop.conf.Configuration; +import org.apache.bigtop.itest.JarContent; +import static org.apache.bigtop.itest.LogErrorsUtils.logError +import java.util.ArrayList; +import java.util.List; + +public class TestCmdText { + + private static Shell sh = new Shell("/bin/bash -s"); + //extracting user identity for ls absolute path + private static final String USERNAME = System.getProperty("user.name"); + private static String date = sh.exec("date").getOut().get(0). + replaceAll("\\s","").replaceAll(":",""); + private static String namenode = ""; + private static String testCmdTextInputDir = "testCmdTextInputDir" + date; + private static String testCmdTextInputs = "test_data_TestCmdText" + private static String testCmdTextOut = "testCmdTextOut" + date; + private static String testCmdTextOutCmp = "testCmdTextOutCmp" + date; + private static int repfactor = 2; + private static String TESTDIR = "/user/$USERNAME/$testCmdTextInputDir"; + private static String user_testinputdir = USERNAME+"/"+testCmdTextInputDir+ + "/"+testCmdTextInputs + + @BeforeClass + public static void setUp() { + + // unpack resource + JarContent.unpackJarContainer(TestCmdText.class, "." , null); + + sh.exec("cp -r test_data test_data_TestCmdText"); + assertTrue("Could not copy data into test_data_TestCmdText", + sh.getRet() == 0); + + // get namenode hostname from core-site.xml + Configuration conf = new Configuration(); + namenode = conf.get("fs.defaultFS"); + if (namenode == null) { + namenode = conf.get("fs.default.name"); + } + assertTrue("Could not find namenode", namenode != null); + + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + println("hdfs dfs -rm -r -skipTrash $TESTDIR") + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("hdfs dfs -mkdir -p $TESTDIR"); + assertTrue("Could not create input directory on HDFS", sh.getRet() == 0); + + // copy input directory to hdfs + println("hdfs dfs -put $testCmdTextInputs $TESTDIR"); + sh.exec("hdfs dfs -put $testCmdTextInputs $TESTDIR"); + assertTrue("Could not copy files to HDFS", sh.getRet() == 0); + + println("Running cmdtext:"); + } + + @AfterClass + public static void tearDown() { + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + // println("hdfs dfs -rm -r -skipTrash $TESTDIR") + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("test -f $testCmdTextOut"); + if (sh.getRet() == 0) { + // println("rm -rf $testCmdTextOut") + sh.exec("rm -rf $testCmdTextOut from local disk"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -f $testCmdTextOutCmp"); + if (sh.getRet() == 0) { + // println("rm -rf $testCmdTextOutCmp") + sh.exec("rm -rf $testCmdTextOutCmp"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -d $testCmdTextInputs"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCmdTextInputs"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + } + + @Test + public void testCmdTextSingleFile() { + println("testCmdTextSingleFile"); + // invoke text with single file + sh.exec("hdfs dfs -text $TESTDIR/$testCmdTextInputs/test_1.txt | " + + "sed '/INFO/'d &> $testCmdTextOut"); + assertTrue("text command on HDFS failed", sh.getRet() == 0); + sh.exec("diff $testCmdTextInputs/test_1.txt $testCmdTextOut"); + assertTrue("Input provided and output returned are different", + sh.getRet() == 0); + } + + @Test + public void testCmdTextMultipleFiles() { + println("testCmdTextMultipleFiles"); + // invoke text with 2 files + sh.exec("hdfs dfs -text $TESTDIR/$testCmdTextInputs/test_1.txt " + + "$TESTDIR/$testCmdTextInputs/test_3 | " + + "sed '/INFO/'d &> $testCmdTextOut"); + assertTrue("text command for multiple sources on HDFS failed", + sh.getRet() == 0); + + sh.exec("cat $testCmdTextInputs/test_1.txt " + + "$testCmdTextInputs/test_3 &> " + + "$testCmdTextOutCmp"); + assertTrue("cannot cat both the files", sh.getRet() == 0); + sh.exec("diff $testCmdTextOutCmp $testCmdTextOut"); + assertTrue("Input provided and output returned are different", + sh.getRet() == 0); + } + + @Test + public void testCmdWithZipFile() { + println("testCmdWithZipFile"); + // invoke -text with zip file + sh.exec("hdfs dfs -text $TESTDIR/$testCmdTextInputs/test.zip | " + + "sed '/INFO/'d &> $testCmdTextOut"); + assertTrue("text command for zip on HDFS failed", sh.getRet() == 0); + + sh.exec("diff $testCmdTextInputs/test.zip $testCmdTextOut"); + assertTrue("Input provided and output returned are different", + sh.getRet() == 0); + } + + @Test + public void testCmdWithNonExistenZipFile() { + println("testCmdWithNonExistenZipFile"); + sh.exec("hdfs dfs -text $TESTDIR/$testCmdTextInputs/test1.zip"); + assertTrue("text command for non-created zip on HDFS executed successfully", + sh.getRet() == 1); + + String searchStr = "text: `$TESTDIR/$testCmdTextInputs/test1.zip': No such file or directory"; + assertTrue("expected pattern not found in the output file", + lookForGivenString(sh.getErr(), searchStr) == true); + } + + @Test + public void testCmdWithDirectory() { + println("testCmdWithDirectory"); + sh.exec("hdfs dfs -text $TESTDIR/$testCmdTextInputs"); + assertTrue("text command for existing directory on HDFS executed successfully", + sh.getRet() == 1); + + String searchStr = "text: `/user/"+user_testinputdir+".: Is a directory"; + assertTrue("expected pattern not found in the output file", + lookForGivenString(sh.getErr(), searchStr) == false); + } + + /** + * lookForGivenString check the given + */ + private boolean lookForGivenString(List<String> data, + String searchString) { + boolean result = false; + for( String output_String : data) { + if(output_String.contains(searchString)) { + result = true; + break; + } + } + return result; + } +} http://git-wip-us.apache.org/repos/asf/bigtop/blob/d4ada613/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCount.groovy ---------------------------------------------------------------------- diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCount.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCount.groovy new file mode 100644 index 0000000..5e655f5 --- /dev/null +++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCount.groovy @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.bigtop.itest.hadoop.hdfs; + +import static org.junit.Assert.assertTrue; +import org.junit.AfterClass; +import org.junit.*; +import org.junit.Test; +import org.apache.bigtop.itest.shell.Shell; +import org.apache.hadoop.conf.Configuration; +import org.apache.bigtop.itest.JarContent; +import static org.apache.bigtop.itest.LogErrorsUtils.logError + +public class TestCount { + + private static Shell sh = new Shell("/bin/bash -s"); + //extracting user identity for ls absolute path + private static final String USERNAME = System.getProperty("user.name"); + private static String date = sh.exec("date").getOut().get(0). + replaceAll("\\s","").replaceAll(":",""); + private static String namenode = ""; + private static String testCountInputDir = "testCountInputDir" + date; + private static String testCountInputs = "test_data_TestCount" + private static String testCountOut = "testCountOut" + date; + private static String testCountOutCmp = "testCountOutCmp" + date; + private static String TESTDIR = "/user/$USERNAME/$testCountInputDir"; + + @BeforeClass + public static void setUp() { + // unpack resource + JarContent.unpackJarContainer(TestCount.class, "." , null); + + sh.exec("cp -r test_data test_data_TestCount"); + assertTrue("Could not copy data into test_data_TestCount", + sh.getRet() == 0); + + // get namenode hostname from core-site.xml + Configuration conf = new Configuration(); + namenode = conf.get("fs.defaultFS"); + if (namenode == null) { + namenode = conf.get("fs.default.name"); + } + assertTrue("Could not find namenode", namenode != null); + + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("hdfs dfs -mkdir -p $TESTDIR"); + assertTrue("Could not create input directory on HDFS", sh.getRet() == 0); + + // copy input directory to hdfs + sh.exec("hdfs dfs -put $testCountInputs $TESTDIR"); + assertTrue("Could not copy files to HDFS", sh.getRet() == 0); + + // set the replication if file exists + sh.exec("hdfs dfs -test -f $TESTDIR/$testCountInputs/test_2.txt"); + assertTrue("Could not find files on HDFS", sh.getRet() == 0); + + println("Running count:"); + } + + @AfterClass + public static void tearDown() { + sh.exec("hdfs dfs -test -d $TESTDIR"); + if (sh.getRet() == 0) { + println("hdfs dfs -rm -r -skipTrash $TESTDIR") + sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR"); + assertTrue("Could not remove input directory", sh.getRet() == 0); + } + + sh.exec("test -f $testCountOut"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCountOut from local disk"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -f $testCountOutCmp"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCountOutCmp"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + + sh.exec("test -d $testCountInputs"); + if (sh.getRet() == 0) { + sh.exec("rm -rf $testCountInputs"); + assertTrue("Could not remove output directory/file", sh.getRet() == 0); + } + } + + @Test + public void testCountWithqOption() { + println("testCountWithqOption"); + sh.exec("hdfs dfs -count -q $TESTDIR"); + assertTrue("count command failed on HDFS", sh.getRet() == 0); + + List out_msgs = sh.getOut(); + String file_name = "$TESTDIR"; + String output = out_msgs.get(0).toString(); + // Check the output contents + if (!(output.contains("none") && output.contains("inf") && + output.contains("2") && output.contains("4") && + output.contains(file_name))) { + assertTrue("Does count output is not having correct data?", false); + } + // now check the size displayed + String[] count_data = output.split("\\s+"); + sh.exec("hdfs dfs -du -s $TESTDIR"); + assertTrue("du command failed on HDFS", sh.getRet() == 0); + + List du_out_msgs = sh.getOut(); + String[] du_data = du_out_msgs.get(0).toString().split(" "); + if (!(count_data[7].equals(du_data[0]))) { + assertTrue("Is Size value in Count outut correct?", false); + } + + // now check count option with inner directory + sh.exec("hdfs dfs -count -q $TESTDIR/$testCountInputs"); + assertTrue("count command failed on HDFS", sh.getRet() == 0); + out_msgs = sh.getOut(); + file_name = "$TESTDIR/$testCountInputs"; + output = out_msgs.get(0).toString(); + // Check the output contents + if (!(output.contains("none") && output.contains("inf") && + output.contains("1") && output.contains("4") && + output.contains(file_name))) { + assertTrue("Does count output is not having correct data?", false); + } + // now check the size displayed + count_data = output.split("\\s+"); + sh.exec("hdfs dfs -du -s $TESTDIR/$testCountInputs"); + assertTrue("du command failed on HDFS", sh.getRet() == 0); + + du_out_msgs = sh.getOut(); + du_data = du_out_msgs.get(0).toString().split(" "); + if (!( count_data[7].equals(du_data[0]))) { + assertTrue("Is Size value in Count outut correct?", false); + } + } + + @Test + public void testCountWithEmptyDirectory() { + println("testCountWithEmptyDirectory"); + sh.exec("hdfs dfs -mkdir $TESTDIR/$testCountInputs/empty_dir"); + sh.exec("hdfs dfs -count -q $TESTDIR/$testCountInputs/empty_dir"); + assertTrue("count command failed on HDFS", sh.getRet() == 0); + List out_msgs = sh.getOut(); + String file_name = "$TESTDIR/$testCountInputs/empty_dir"; + String output = out_msgs.get(0).toString(); + // Check the output contents + if (!(output.contains("none") && output.contains("inf") && + output.contains("1") && output.contains("0") && + output.contains(file_name))) { + assertTrue("Does count output is not having correct data?", false); + } + + String[] count_data = output.split("\\s+"); + sh.exec("hdfs dfs -du -s $TESTDIR/$testCountInputs/empty_dir"); + assertTrue("du command failed on HDFS", sh.getRet() == 0); + + List du_out_msgs = sh.getOut(); + String[] du_data = du_out_msgs.get(0).toString().split(" "); + println(count_data[7]); + if (!( count_data[7].equals(du_data[0]))) { + assertTrue("count command failed for empty dir", false); + } + } + + @Test + public void testCountWithNonExistentDirectory() { + println("testCountWithNonExistentDirectory"); + sh.exec("hdfs dfs -count -q $TESTDIR/$testCountInputs/test"); + assertTrue("count command executed successfully with non existing dir " + + "on HDFS", sh.getRet() == 1); + List err_msgs = sh.getErr(); + Boolean failure = false; + String failure_msg = "count: `$TESTDIR/$testCountInputs/test\': " + + "No such file or directory"; + String file_name = "$TESTDIR/$testCountInputs/test"; + // quota remaining quota directory count file count file name + if (err_msgs.get(0).toString().contains(failure_msg)) { + failure = true; + } + assertTrue("count command executed successfully for non existing file", + failure == true); + } + + @Test + public void testCountOnDirectory() { + println("testCountOnDirectory"); + sh.exec("hdfs dfs -count $TESTDIR"); + assertTrue("count command failed on HDFS", sh.getRet() == 0); + List out_msgs = sh.getOut(); + String file_name = "$TESTDIR"; + String output = out_msgs.get(0).toString(); + // Check the output contents + if (!(output.contains("3") && output.contains("4") && + output.contains(file_name))) { + assertTrue("Does count output is not having correct data?", false); + } + + String[] count_data = output.split("\\s+"); + sh.exec("hdfs dfs -du -s $TESTDIR"); + assertTrue("du command failed on HDFS", sh.getRet() == 0); + + List du_out_msgs = sh.getOut(); + String[] du_data = du_out_msgs.get(0).toString().split(" "); + if (!( count_data[3].equals(du_data[0]))) { + assertTrue("count command failed for empty dir", false); + } + } +} +
