BIGTOP-2009: added new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat 
and touchz

Signed-off-by: YoungWoo Kim <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/96ecf29a
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/96ecf29a
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/96ecf29a

Branch: refs/heads/master
Commit: 96ecf29a1aa4f6109639411cd9287245e2ed0387
Parents: 49705da
Author: srinivas-altiscale <[email protected]>
Authored: Sun Sep 6 01:18:34 2015 +0530
Committer: YoungWoo Kim <[email protected]>
Committed: Sun Sep 6 14:44:06 2015 +0900

----------------------------------------------------------------------
 .../itest/hadoop/hdfs/CommonFunctions.groovy    |   37 +
 .../bigtop/itest/hadoop/hdfs/TestChgrp.groovy   |  249 ++
 .../bigtop/itest/hadoop/hdfs/TestCp.groovy      |  300 ++
 .../bigtop/itest/hadoop/hdfs/TestDu.groovy      |  323 ++
 .../bigtop/itest/hadoop/hdfs/TestGet.groovy     |  245 ++
 .../bigtop/itest/hadoop/hdfs/TestLs.groovy      |  240 ++
 .../bigtop/itest/hadoop/hdfs/TestMkdir.groovy   |  170 +
 .../bigtop/itest/hadoop/hdfs/TestMv.groovy      |  288 ++
 .../bigtop/itest/hadoop/hdfs/TestPut.groovy     |  192 +
 .../bigtop/itest/hadoop/hdfs/TestStat.groovy    |  222 ++
 .../bigtop/itest/hadoop/hdfs/TestTouchz.groovy  |  173 +
 .../src/main/resources/test_data/test.zip       |  Bin 0 -> 346 bytes
 .../src/main/resources/test_data/test_1.txt     |    4 +
 .../src/main/resources/test_data/test_2.txt     |    4 +
 .../hadoop/src/main/resources/test_data/test_3  | 3321 ++++++++++++++++++
 15 files changed, 5768 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
new file mode 100644
index 0000000..1844778
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+import java.util.ArrayList;
+import java.util.List;
+
+public class CommonFunctions {
+  /**
+   * lookForGivenString check the given string is present in the list data
+   */
+  private boolean lookForGivenString(List<String> data,
+                                     String searchString) {
+    boolean result = false;
+    for( String output_String : data) {
+      if(output_String.contains(searchString)) {
+        result = true;
+        break;
+      }
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
new file mode 100644
index 0000000..1569e0a
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestChgrp {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  private static Shell shHDFS = new Shell("/bin/bash -s","hdfs");
+  private static Shell shOOZIE = new Shell("/bin/bash -s","oozie");
+  // extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testChgrpInputDir = "testChgrpInputDir" + date;
+  private static String testChgrpInputs = "test_data_TestChgrp"
+  private static String testChgrpOut = "testChgrpOut" + date;
+  private static String testChgrpOutCmp = "testChgrpOutCmp" + date;
+  private static String user_testinputdir = USERNAME+"/"+testChgrpInputDir+
+                                             "/"+testChgrpInputs;
+  private static String TESTDIR = "/user/$USERNAME/$testChgrpInputDir";
+  static List<String> TestChgrp_output = new ArrayList<String>();
+  static List<String> TestChgrp_error = new ArrayList<String>();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestChgrp.class, "." , null);
+
+    sh.exec("cp -r test_data test_data_TestChgrp");
+    assertTrue("Could not copy data into test_data_TestChgrp", sh.getRet() == 
0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS",
+               sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testChgrpInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testChgrpInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -chmod -R o+w $TESTDIR/$testChgrpInputs");
+    logError(sh);
+    assertTrue("Could not change permissions", sh.getRet() == 0);
+
+    println("Running chgrp:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testChgrpOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpOut from local disk");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testChgrpOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpOutCmp");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testChgrpInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpInputs");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testChgrpWithOutgroupName() {
+    println("testChgrpWithOutgroupName");
+    sh.exec("hdfs dfs -chgrp $TESTDIR/$testChgrpInputs ");
+    assertTrue("chgrp command executed successfully without group name",
+               sh.getRet() == 255);
+
+    String searchStr = "chgrp: Not enough arguments: expected 2 but got 1";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrpWithWithInvalidPath() {
+    println("testChgrpWithWithInvalidPath");
+    sh.exec("hdfs dfs -chgrp $TESTDIR/$testChgrpInputs random");
+    assertTrue("Does chgrp worked with wrong path?", sh.getRet() == 1);
+
+    String searchStr = "chgrp: `random': No such file or directory";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrpWithWithInvalidGroupName() {
+    sh.exec("hdfs dfs -chgrp random $TESTDIR/$testChgrpInputs ");
+    assertTrue("Does chgrp worked with wrong group?", sh.getRet() == 1);
+
+    String searchStr = "chgrp: changing ownership of '/user/"+
+                       user_testinputdir+
+                       "': User does not belong to random";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrp() {
+    println("testChgrp");
+    sh.exec("id | awk \'{print \$3}\' | awk -F\'[()]\' \'{print \$2}\'");
+    List out_grp = sh.getOut();
+    String group_name = out_grp.get(0);
+
+    // first make sure that all the files in the directory belongs to a group
+    sh.exec("hdfs dfs -chgrp -R $group_name $TESTDIR");
+    assertTrue("chgrp command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR");
+    assertTrue("Able to list files?", sh.getRet() == 0);
+
+    String searchString = "$USERNAME $group_name";
+    assertTrue("chgrp failed for a proper group name",
+                lookForGivenString(sh.getOut(), searchString) == true);
+ 
+    shHDFS.exec("hdfs dfs -chgrp hdfs $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command failed with hdfs user on HDFS",
+               shHDFS.getRet() == 0);
+
+    shHDFS.exec("hdfs dfs -ls $TESTDIR");
+    assertTrue("Able to list files?", shHDFS.getRet() == 0);
+
+    // check that parent for directory only group name got changed
+
+    searchString = "$USERNAME hdfs";
+    assertTrue("chgrp applied only to parent directory?",
+               lookForGivenString(shHDFS.getOut(), searchString) == true);
+
+    // check that chgrp does not applied to files inside the directory
+    shHDFS.exec("hdfs dfs -ls -R $TESTDIR/$testChgrpInputs");
+    searchString = "$USERNAME hdfs";
+    assertTrue("chgrp does not applied to files inside the directory?",
+               lookForGivenString(shHDFS.getOut(), searchString) == false);
+
+    // now change the group recursively
+    shHDFS.exec("hdfs dfs -chgrp -R hdfs $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command with hdfs user failed on HDFS",
+               shHDFS.getRet() == 0);
+
+    shHDFS.exec("hdfs dfs -ls -R $TESTDIR");
+    assertTrue("listing directories failed", shHDFS.getRet() == 0);
+
+    searchString = "$USERNAME $group_name";
+    assertTrue("chgrp failed to execute recursively on directory",
+               lookForGivenString(shHDFS.getOut(), searchString) == false);
+  }
+
+  @Test
+  public void testChgrpWithUnauthorizedUser() {
+    println("testChgrpWithUnauthorizedUser");
+    // remove write permission for others
+    sh.exec("hdfs dfs -chmod -R o-w $TESTDIR/$testChgrpInputs");
+    assertTrue("Could not change permissions", sh.getRet() == 0);
+
+    // now try to change group as oozie user
+    shOOZIE.exec("hdfs dfs -chgrp oozie $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command with oozie user failed on HDFS",
+               shOOZIE.getRet() == 1);
+
+    List err_msgs = shOOZIE.getErr();
+    String failure_msg = "chgrp: changing ownership of " +
+                         "\'$TESTDIR/$testChgrpInputs\': Permission denied";
+    Boolean failure = false;
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("chgrp command with oozie user failed on HDFS",
+               failure == true);
+  }
+
+  /**
+   * lookForGivenString check the given string is present in the list data
+   */
+  private boolean lookForGivenString(List<String> data,
+                                     String searchString) {
+    boolean result = false;
+    for( String output_String : data) {
+      if(output_String.contains(searchString)) {
+        result = true;
+        break;
+      }
+    }
+    return result;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
new file mode 100644
index 0000000..ac96e6d
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import org.junit.runners.MethodSorters;
+import org.junit.FixMethodOrder;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestCp {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testCpInputDir = "testCpInputDir" + date;
+  private static String testCpInputs = "test_data_TestCp"
+  private static String testCpOut = "testCpOut" + date;
+  private static String testCpOutCmp = "testCpOutCmp" + date;
+  private static String TESTDIR  = "/user/$USERNAME/$testCpInputDir";
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestCp.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestCp");
+    logError(sh);
+    assertTrue("Could not copy data into test_data_TestCp.", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testcp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testcp");
+    }
+    sh.exec("mkdir temp_testcp");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testCpInputs $TESTDIR");
+    logError(sh);
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    println("Running cp:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testCpOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpOut from local disk");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testCpOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testcp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testcp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testCpInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testCpForFiles() {
+    println("testCpForFiles");
+    // first delete the test_3 file
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm $TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to cleanup file from destination", sh.getRet() == 0);
+    }
+    // copy test_1.txt file to test_3.txt on hdfs
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt 
$TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_1.txt 
temp_testcp/test_1.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_3.txt 
temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("diff temp_testcp/test_1.txt temp_testcp/test_3.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+
+    sh.exec("rm -f temp_testcp/test_1.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testCpForDirectories() {
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs $TESTDIR/test_temp");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_temp");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") && 
next_val.contains("$TESTDIR/test_temp/test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") && 
next_val.contains("$TESTDIR/test_temp/test_1.txt"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Copied files do not match", success_1 == true && success_2 == 
true);
+  }
+
+  @Test
+  public void testCopyExistingFile() {
+    println("testCopyExistingFile");
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt 
$TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to copy a file to HDFS", sh.getRet() == 0);
+    }
+
+    //copy test_2.txt file to test_3.txt on hdfs, see if it gets overwritten
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_2.txt 
$TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "cp: `$TESTDIR/$testCpInputs/test_3.txt': File 
exists";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("copy command failed", failure == true);
+  }
+ 
+  @Test
+  public void testCopyOverwriteFile() {
+    println("testCopyOverwriteFile");
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt 
$TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to copy a file to HDFS", sh.getRet() == 0);
+    }
+
+    //copy test_2.txt file to test_3.txt on hdfs, with overwrite flag
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_2.txt 
$TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_2.txt 
temp_testcp/test_2.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_3.txt 
temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testCopyOverwriteFileInNewDirectory() {
+    println("testCopyOverwriteFileInNewDirectory");
+    //copy test_2.txt file to a newly created directory on hdfs
+    sh.exec("hdfs dfs -mkdir $TESTDIR/temp_testcp");
+    assertTrue("could not create directory on hdfs", sh.getRet() == 0);
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_2.txt 
$TESTDIR/temp_testcp");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_2.txt 
temp_testcp/test_2.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/temp_testcp/test_2.txt 
temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testCopyNonExistingFile() {
+    println("testCopyNonExistingFile");
+    //copy test_4.txt (non existing file) to another location on hdfs
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_4.txt 
$TESTDIR/temp_testcp");
+    assertTrue("copy command should not get executed for a non existing file 
on HDFS", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    boolean failure= false;
+    String failure_msg = "cp: `$TESTDIR/$testCpInputs/test_4.txt': No such 
file or directory";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("copy command failed", failure == true);
+  }
+
+  @Test
+  public void TestCpFileProtocolWithFile() {
+    println("TestCpFileProtocolWithFile");
+    //copy test_1.txt from local to a new creatde dir on hdfs
+    sh.exec("hdfs dfs -mkdir $TESTDIR/temp_testcp_1");
+    sh.exec("hdfs dfs -cp file:///$USERDIR/$testCpInputs/test_1.txt " +
+            "$TESTDIR/temp_testcp_1");
+    assertTrue("copy command from local to hdfs failed", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/temp_testcp_1/test_1.txt " +
+            "temp_testcp/test_1.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_1.txt $testCpInputs/test_1.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_1.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+    sh.exec("hdfs dfs -rm -skipTrash $TESTDIR/temp_testcp_1/test_1.txt");
+    assertTrue("could not remove file from hdfs", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void TestCpFileProtocolWithDirectory() {
+    println("TestCpFileProtocolWithDirectory");
+    //copy a dir from local to hdfs
+    sh.exec("hdfs dfs -cp file:///$USERDIR/$testCpInputs 
$TESTDIR/test_temp_1");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_temp_1");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+    List out_msgs_fp = sh.getOut();
+    Boolean success_fp_1= false;
+    Boolean success_fp_2= false;
+    Iterator out_iter_fp = out_msgs_fp.iterator();
+    while (out_iter_fp.hasNext()) {
+      String next_val = out_iter_fp.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_temp_1/test_2.txt")) {
+        success_fp_1 = true;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_temp_1/test_3"))  {
+        success_fp_2 = true;
+      }
+    }
+    assertTrue("Copied files do not match",
+               success_fp_1 == true && success_fp_2 == true);
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
new file mode 100644
index 0000000..bb92cbc
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
@@ -0,0 +1,323 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestDu {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testDuInputDir = "testDuInputDir" + date;
+  private static String testDuInputs = "test_data_TestDu"
+  private static String testDuOut = "testDuOut" + date;
+  private static int repfactor = 2;
+  private static String user_testinputdir = USERNAME+"/"+testDuInputDir+
+                                            "/"+testDuInputs;
+  private static String TESTDIR  = "/user/$USERNAME/$testDuInputDir";
+  static List<String> TestDu_output = new ArrayList<String>();
+  static boolean result = false;
+  static boolean result_2 = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestDu.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestDu");
+    assertTrue("Could not copy data into test_data_TestDu", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testDuInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+    println("Running du:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testDuOut");
+    if (sh.getRet() == 0) {
+      // println("rm -rf $testDuOut")
+      sh.exec("rm -rf $testDuOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testDuInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testDuInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testDuBasics() {
+    println("TestDuBasics");
+    result = false;
+    sh.exec("hdfs dfs -du $TESTDIR");
+    assertTrue("du command on HDFS failed", sh.getRet() == 0);
+    TestDu_output=sh.getOut();
+    int size = TestDu_output.size();
+
+    assertTrue("more number of lines than expected, expected only 1 line",
+               size == 1);
+
+    String[] output_split = TestDu_output.get(0).split(" ");
+    if (Integer.parseInt(output_split[0]) > 119999 &&
+        Integer.parseInt(output_split[0]) < 140000 &&
+        output_split[1].contains("/user/"+user_testinputdir)) {
+      result = true;
+    }
+    assertTrue(" command failed", result == false);
+  }
+
+  @Test
+  public void testDuSummaryOptions() {
+    println("testDuSummaryOptions");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s $TESTDIR/$testDuInputs/*");
+    assertTrue("du -s command on HDFS failed", sh.getRet() == 0);
+
+    TestDu_output=sh.getOut();
+    int size = TestDu_output.size();
+    assertTrue("more number of lines than expected; expected only 4 line",
+               size == 4);
+
+    for(String string :TestDu_output)
+    {
+      if (string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 119999 &&
+           Integer.parseInt(output_split[0]) < 140000) {
+          result = true;
+        }
+        continue;
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+           Integer.parseInt(output_split[0]) < 20) {
+          result_2= true;
+        }
+        continue;
+      }
+    }
+    assertTrue("Does the -du -s output contains proper data?", result == true 
&& result_2 == true);
+  }
+
+  @Test
+  public void testDuhOptions() {
+    println("testDuSummaryOptions");
+    result = false;
+    sh.exec("hdfs dfs -du -h $TESTDIR ");
+    assertTrue("du -h command on HDFS failed", sh.getRet() == 0);
+    TestDu_output=sh.getOut();
+    assertTrue("Does -du -h generated more number of lines than expected; " +
+               "expected only 1 line", TestDu_output.size() == 1 );
+
+    String[] output_split = TestDu_output.get(0).split("\\s+");
+    if (output_split[0].matches("^1[2-3][0-9]..") &&
+       output_split[1].equals("K") &&
+       output_split[2].contains("/user/"+user_testinputdir)) {
+      result = true;
+    }
+    assertTrue("Does the du -h output is proper?", result == true);
+  }
+
+  @Test
+  public void testDuMultipleOptions() {
+    println("TestDuMultipleOptions");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s -h $TESTDIR/$testDuInputs/* ");
+    assertTrue("du with multiple options failed on HDFS", sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("Does -du -s -h generated more number of lines than expected?"+
+               " expected only 4 lines", TestDu_output.size() ==4 );
+
+    for(String string :TestDu_output) {
+      if (string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Float.parseFloat(output_split[0]) > 119 &&
+           Float.parseFloat(output_split[0]) < 140 &&
+           output_split[1].equals("K")) {
+          result =true;
+        }
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does the du -s -h output contains correct data about files?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuMultipleOptionsForFiles() {
+    println("testDuMultipleOptionsForFiles");
+    result = false;
+    result_2 = false;
+
+    sh.exec("hdfs dfs -du -s -h $TESTDIR/$testDuInputs/test_1.txt  " +
+            "$TESTDIR/$testDuInputs/test_2.txt ");
+    assertTrue("du with multiple options failed on HDFS", sh.getRet() == 0);
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 2 lines",
+               TestDu_output.size() == 2);
+
+    for(String string :TestDu_output)
+    {
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+           Integer.parseInt(output_split[0]) < 20) {
+          result=true;
+        }
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_2.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does the du -s -h output contains correct data about 2 files?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuHdfsProtocolForMultipleFiles() {
+    println("testDuHdfsProtocolForMultipleFiles");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s -h hdfs://$TESTDIR/$testDuInputs/test_1.txt " +
+            "$TESTDIR/$testDuInputs/test_2.txt ");
+    assertTrue("du command with hdfs protocol failed on HDFS",
+               sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 2 line",
+               TestDu_output.size() == 2);
+
+    for (String string :TestDu_output) {
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20 ) {
+          result=true;
+        }
+      }
+
+      if (string.contains(" /user/"+user_testinputdir+"/test_2.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20 ) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does du -s -h output contains valid data with hdfs protocol?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuHdfsProtocolForDirectory() {
+    println("testDuHdfsProtocolForDirectory");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s hdfs://$TESTDIR/$testDuInputs/* ");
+    assertTrue("dus command with hdfs protocol failed on HDFS",
+               sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 4 lines",
+               TestDu_output.size() == 4);
+
+    for (String string :TestDu_output) {
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 119999 &&
+            Integer.parseInt(output_split[0]) < 140000) {
+          result =true;
+        }
+      }
+
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does du -s -h output contains valid data with hdfs protocol?",
+               result == true && result_2 == true);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
new file mode 100644
index 0000000..1f04784
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+
+public class TestGet {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testGetInputDir = "testGetInputDir" + date;
+  private static String testGetInputs = "test_data_TestGet"
+  private static String TESTDIR  = "/user/$USERNAME/$testGetInputDir";
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestGet.class, "." , null);
+
+    sh.exec("cp -r test_data $testGetInputs");
+    logError(sh);
+    assertTrue("Could not copy data into $testGetInputs .", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testGetInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testget");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget");
+    }
+    sh.exec("mkdir temp_testget");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    println("Running get:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testget");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testGetInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testGetInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testget/test_optionscrc");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget/test_optionscrc");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testGetFile() {
+    println("testGetFile");
+
+    //get test_1.txt file from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_1.txt temp_testget");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testget/test_1.txt $testGetInputs/test_1.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testGetDirectory() {
+    println("testGetDirectory");
+    // get a dir from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs temp_testget");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("ls -l temp_testget/$testGetInputs");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("test_3"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Able to find Downloaded files?",
+               success_1 == true && success_2 == true);
+  }
+
+  @Test
+  public void testGetFileWhenFileExistsAtLocal() {
+    println("testGetFileWhenFileExistsAtLocal");
+    //get test_2.txt file from hdfs a location where the file already exists
+    sh.exec("test -f temp_testget/$testGetInputs/test_2.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs temp_testget");
+      assertTrue("get command failed for directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_2.txt " +
+            "temp_testget/$testGetInputs/test_2.txt");
+    assertTrue("get command failed on HDFS", sh.getRet() == 1);
+
+    String failure_msg = "get: `temp_testget/$testGetInputs/test_2.txt': " +
+                         "File exists";
+    assertTrue("Does get command properly failed to download an existing 
file?",
+               scripts.lookForGivenString(sh.getErr(),failure_msg) == true);
+  }
+
+  @Test
+  public void testGetFileNonExistingFile() {
+    println("testGetFileNonExistingFile");
+    //get test_4.txt(non existing) from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_4.txt temp_testget");
+    assertTrue("get command failed on HDFS", sh.getRet() == 1);
+
+    String failure_msg = "get: `$TESTDIR/$testGetInputs/test_4.txt': " +
+                         "No such file or directory";
+    assertTrue("Does get command failed to download non existing file?",
+               scripts.lookForGivenString(sh.getErr(),failure_msg) == true);
+  }
+
+  @Test
+  public void testGetFileWithOutSpecifyingDestination() {
+    println("testGetFileWithOutSpecifyingDestination");
+    //get test_2.txt from hdfs to local, with out specifying any destination
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_2.txt");
+    assertTrue("Does Get command worked when no destination is specified?",
+               sh.getRet() == 0);
+
+    sh.exec("diff test_2.txt $testGetInputs/test_2.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+
+    sh.exec("rm -f test_2.txt");
+    assertTrue("could not remove a file", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testGetWithCrc() {
+    println("testGetWithCrc");
+    sh.exec("mkdir -p temp_testget/test_optionscrc");
+    //get a text file with crc
+    assertTrue("Able to create directory?", sh.getRet() == 0);
+
+    //get test_2.txt file from hdfs a location where the file already exists
+    sh.exec("hdfs dfs -get -crc $TESTDIR/$testGetInputs/test_2.txt " +
+            "temp_testget/test_optionscrc/test_4.txt");
+    assertTrue("Does get command worked properly with crc option?",
+               sh.getRet() == 0);
+
+    sh.exec("ls -la temp_testget/test_optionscrc");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does get command download file with crc?",
+                scripts.lookForGivenString(sh.getOut(),
+                                          ".test_4.txt.crc") == true);
+    // now compare the contents
+    sh.exec("diff temp_testget/test_optionscrc/test_4.txt 
$testGetInputs/test_2.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testGetWithoutCrc() {
+    println("testGetWithoutCrc");
+
+    //get a text file without crc
+    sh.exec("hdfs dfs -get -ignoreCrc $TESTDIR/$testGetInputs/test_1.txt " +
+            "temp_testget/test_5.txt");
+    assertTrue("get command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("ls -la temp_testget");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does get command skipped crc file properly?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          ".test_5.txt.crc") == false);
+    // now compare the contents
+    sh.exec("diff $testGetInputs/test_1.txt temp_testget/test_5.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
new file mode 100644
index 0000000..d6742ac
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
@@ -0,0 +1,240 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestLs {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testLsInputDir = "testLsInputDir" + date;
+  private static String testLsInputs = "test_data_TestLs"
+  private static String user_testinputdir  = USERNAME+"/"+testLsInputDir+"/"+
+                                             testLsInputs;
+  static List<String> TestLs_output = new ArrayList<String>();
+  static boolean result = false;
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestLs.class, "." , null);
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("cp -r test_data test_data_TestLs");
+    logError(sh);
+    assertTrue("Could not copy data into test_data_TestLs", sh.getRet() == 0);
+
+    sh.exec("hadoop fs -test -d /user/$USERNAME/$testLsInputDir");
+    if (sh.getRet() == 0) {
+      println("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir")
+      sh.exec("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hadoop fs -mkdir $testLsInputDir");
+    assertTrue("Could not create input directory", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hadoop fs -put $testLsInputs /user/$USERNAME/$testLsInputDir");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    println("Running ls:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hadoop fs -test -d /user/$USERNAME/$testLsInputDir");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testLsInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testLsInputs");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testLs() {
+    println("TestLs");
+    // test whether root listing of ls command works
+    sh.exec("hdfs dfs -ls / ");
+    assertTrue("Able to list /user contents?",
+               scripts.lookForGivenString(sh.getOut(), "/user") == true);
+
+    // test whether a directory exists with the user name under '/user'
+    sh.exec("hdfs dfs -ls /user/$USERNAME ");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    assertTrue("Able to list /user contents?",
+                scripts.lookForGivenString(sh.getOut(),
+                                          "/user/"+USERNAME) == true);
+  }
+
+  @Test
+  public void testLsWithRegularExpressions() {
+    println("testLsWithRegularExpressions");
+    //test whether a one can list the files with reqular expression
+    sh.exec("hdfs dfs -ls /user/$USERNAME/$testLsInputDir/*");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    assertTrue("Able to list contents with regular expressions?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "/user/"+user_testinputdir) == true);
+  }
+
+  @Test
+  public void testLsVerifyOutputStructureForDirectory() {
+    println("TestLsVerifyOutputStructure");
+    result = false;
+    //verify the structure of the output of ls command for a directory
+    sh.exec("hdfs dfs -ls -R /user/$USERNAME/$testLsInputDir");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    TestLs_output=sh.getOut();
+
+    // verify that default permissions are listed in ls output for directory
+    assertTrue("Does ls outputs directory permissons properly?",
+               scripts.lookForGivenString(TestLs_output,
+               "drwxr-xr-x") == true);
+
+    result = false;
+    String searchDir = "/user/"+USERNAME+"/"+testLsInputDir;
+    for( String output_String : TestLs_output) {
+      String[] string = output_String.split("\\s+");
+      if(string[1].equals("-") && string[2].equals(USERNAME) &&
+         string[4].equals("0") && output_String.contains(searchDir)) {
+        result = true;
+        break;
+      }
+    }
+    // verify that no replication factor is set in the output
+    assertTrue("Does ls output contains proper data?", result == true);
+  }
+
+  @Test
+  public void testLsVerifyOutputStructureForFile() {
+    println("testLsVerifyOutputStructureForFile");
+    result = false;
+    // verify the structure of the output of ls command for a file
+    sh.exec("hdfs dfs -ls /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_2.txt");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    String fileName = "/user/"+user_testinputdir+"/test_2.txt";
+    TestLs_output=sh.getOut();
+    for( String output_String : TestLs_output)
+    {
+      String[] string = output_String.split("\\s+");
+      if(output_String.contains("-rw-r--r--") && string[1].equals("3") &&
+         string[2].equals(USERNAME) && output_String.contains(fileName)) {
+        result = true;
+        break;
+      }
+    }
+    assertTrue("Does the file listing happened properly?", result == true);
+    result = false;
+  }
+
+  @Test
+  public void testLsWithNonExistingDirectory() {
+   println("testLsWithNonExistingDirectory");
+    // verify that commands fails when wrong arguments are provided
+    sh.exec("hdfs dfs -ls -r /user/$USERNAME/$testLsInputDir/$testLsInputs");
+    assertTrue("wrong parameter ls -r command successfully executed on HDFS",
+               sh.getRet() == 255);
+  }
+
+  @Test
+  public void testLsWithdOption() {
+    println("testLsWithdOption");
+    //verify that when '-d' is used parent directory is listed
+    String dirName = "/user/$USERNAME/$testLsInputDir/$testLsInputs";
+    sh.exec("hdfs dfs -ls -d $dirName");
+ 
+    assertTrue("listing directory failed on HDFS", sh.getRet() == 0);
+    TestLs_output = sh.getOut();
+    String output_String = TestLs_output.get(0).toString();
+    assertTrue("Does output Contains only One Line?",
+               TestLs_output.size() == 1);
+    assertTrue("Does output Contains only directory Name?",
+               scripts.lookForGivenString(TestLs_output, dirName) == true);
+  }
+
+  @Test
+  public void testLsForRecursiveListing() {
+    println("testLsForRecursiveListing");
+    //verify that when '-R' is used the files are listed recursively
+    sh.exec("hdfs dfs -ls -R /user/$USERNAME/$testLsInputDir/$testLsInputs");
+    assertTrue("listing recursive directory structure failed on HDFS",
+                sh.getRet() == 0);
+
+    TestLs_output = sh.getOut();
+    String fileName = "/user/"+user_testinputdir+"/test_1.txt";
+    assertTrue("Does ls output contains file " + fileName + "?",
+               scripts.lookForGivenString(TestLs_output, fileName) == true);
+
+    fileName = "/user/"+user_testinputdir+"/test_2.txt";
+    assertTrue("Does ls output contains file " + fileName + "?",
+               scripts.lookForGivenString(TestLs_output, fileName) == true);
+
+    fileName = "/user/"+user_testinputdir+"/test.zip";
+    assertTrue("Does ls output contains file " + fileName + "?",
+                scripts.lookForGivenString(TestLs_output, fileName) == true);
+  }
+
+  @Test
+  public void testLsForHumanReadableFormat() {
+    println("testLsForHumanReadableFormat");
+    //verify that when '-h' and '-R' are used the files are listed recursively
+    sh.exec("hdfs dfs -ls -h /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_3 ");
+    assertTrue("Able to list file size in human readable format?",
+               sh.getRet() == 0);
+    TestLs_output=sh.getOut();
+
+    String fileName = "/user/$USERNAME/$testLsInputDir/$testLsInputs/test_3";
+    assertTrue("Does output contains proper size value for " + fileName + "?",
+               scripts.lookForGivenString(sh.getOut(), "131.8 K") == true);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
new file mode 100644
index 0000000..604cc82
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestMkdir {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for mkdir absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testMkdirInputDir = "testMkdirInputDir" + date;
+  private static String testMkdirOut = "testMkdirOut" + date;
+  private static int repfactor = 2;
+  private static String user_testinputdir  = USERNAME+"/"+testMkdirInputDir;
+  static List<String> TestMkdir_output = new ArrayList<String>();
+  static List<String> TestMkdir_error = new ArrayList<String>();
+  private static String TESTDIR  = "/user/$USERNAME/$testMkdirInputDir";
+  private CommonFunctions scripts = new CommonFunctions();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestMkdir.class, "." , null);
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testMkdirOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMkdirOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testMkdirBasics() {
+    println("TestMkdirBasics");
+    result = false;
+    // test whether basic mkdir command works
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test10");
+    assertTrue("Able to create directory?", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR ");
+    assertTrue("Directory found on HDFS?", sh.getRet() == 0);
+
+    TestMkdir_output=sh.getOut();
+    for (String output_String : TestMkdir_output) {
+      String[] string= output_String.split(" ");
+      if (output_String.contains("test10") &&
+          string[0].contains("drwxr-xr-x")) {
+        result = true;
+        break;
+      }
+    }
+    assertTrue("Does Directory created properly on hdfs?", result == true);
+  }
+
+  @Test
+  public void testMkdirWithpOption() {
+    println("testMkdirWithpOption");
+    // test whether mkdir command works with -p option
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR/test1/test2");
+    assertTrue("Could not create directory on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/test1 ");
+    assertTrue("directory not found on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test1/test2 folder got created with -p option?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test1/test2") == true);
+  }
+
+  @Test
+  public void testMkdirWithOutpOption() {
+    println("testMkdirWithOutpOption");
+    /* 
+     * test whether a directory can be created without creating a parent 
directory
+     * without using '-p'
+     */
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test2/test2");
+    assertTrue("Directory created on HDFS without -p option", sh.getRet() == 
1);
+
+    String errMsg = "mkdir: `$TESTDIR/test2/test2': No such file or directory";
+    assertTrue("Does $TESTDIR/test2/test2 folder created without -p option?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testMkdirWithExistingDir() {
+    println("testMkdirWithExistingDir");
+    // test whether msg is thrown if the directory already exists
+    sh.exec("hdfs dfs -mkdir $TESTDIR");
+    assertTrue("Able to create existing directory?", sh.getRet() == 1);
+
+    String errMsg = "mkdir: `$TESTDIR': File exists";
+    assertTrue("Does $TESTDIR folder created",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testMkdirWithMultipleDirectories() {
+    //test creating multiple directories
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR/test2 $TESTDIR/test3");
+    assertTrue("Able to Create multiple directories on HDFS?",
+               sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -d $TESTDIR/test2");
+    assertTrue("ls command failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test2 folder created?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test2") == true);
+
+    sh.exec("hdfs dfs -ls -d $TESTDIR/test3");
+    assertTrue("ls command failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test3 folder created?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test3") == true);
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
new file mode 100644
index 0000000..5cc741a
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import org.junit.runners.MethodSorters;
+
+public class TestMv {
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testMvInputDir = "testMvInputDir" + date;
+  private static String testMvInputs = "test_data_TestMv"
+  private static String testMvOut = "testMvOut" + date;
+  private static String testMvOutCmp = "testMvOutCmp" + date;
+  private static String TESTDIR  = "/user/$USERNAME/$testMvInputDir";
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestMv.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestMv");
+    assertTrue("Could not copy data into test_data_TestMv", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testMvInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testMvInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testmv");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testmv");
+    }
+    sh.exec("mkdir temp_testmv");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    println("Running mv:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testMvOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testMvOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -d temp_testmv");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testmv");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testMvInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testMvFile() {
+    println("testMvFile");
+    //mv file from one hdfs location to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_1.txt $TESTDIR");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test_1.txt");
+    assertTrue("Able to find original file?", sh.getRet() == 1);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/test_1.txt");
+    assertTrue("Able to find file in moved location?", sh.getRet() == 0);
+
+    // Now verify the moved file data
+    sh.exec("hdfs dfs -get $TESTDIR/test_1.txt temp_testmv/test_1.txt");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testmv/test_1.txt test_data/test_1.txt");
+    assertTrue("files differ", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testMvDirectory() {
+    println("testMvDirectory");
+    //mv dir from one hdfs location to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs $TESTDIR/test_moved");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    // check that original contents are no longer exists
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs");
+    assertTrue("Able to list deleted files?", sh.getRet() == 1);
+
+    // check the moved location
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_moved");
+    assertTrue("Able to list moved files?", sh.getRet() == 0);
+
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_moved/test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_moved/test.zip"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Does the moved files details are correct?",
+               success_1 == true && success_2 == true);
+
+    // now move back the files
+    sh.exec("hdfs dfs -mv $TESTDIR/test_moved $TESTDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testMvMultipleFiles() {
+    println("testMvMultipleFiles");
+    sh.exec("hdfs dfs -test -d $TESTDIR/test_moved");
+    if (sh.getRet() == 0)
+    {
+      sh.exec("hdfs dfs -rm -r $TESTDIR/test_moved");
+      assertTrue("Able to clear contents of dir $TESTDIR/test_moved?",
+                 sh.getRet() == 0);
+    }
+    //mv multiple files from one hdfs location to other
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test_moved");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_2.txt " +
+            "$TESTDIR/$testMvInputs/test.zip $TESTDIR/test_moved");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    // check that files in old location got deleted
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test_2.txt");
+    assertTrue("Does $TESTDIR/$testMvInputs/test_2.txt not moved?", 
sh.getRet() == 1);
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test.zip");
+    assertTrue("Does $TESTDIR/$testMvInputs/test.zip not moved?", sh.getRet() 
== 1);
+
+    // check that files present in new location
+    sh.exec("hdfs dfs -ls $TESTDIR/test_moved/test_2.txt");
+    assertTrue("Is $TESTDIR/test_moved/test_2.txt not present?",
+               sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls $TESTDIR/test_moved/test.zip");
+    assertTrue("Is $TESTDIR/test_moved/test_2.txt not present?",
+               sh.getRet() == 0);
+
+    // verify the moved files data
+    sh.exec("hdfs dfs -get $TESTDIR/test_moved/test.zip temp_testmv");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testmv/test.zip test_data/test.zip");
+    assertTrue("files differ", sh.getRet() == 0);
+
+    // Now move back files
+    sh.exec("hdfs dfs -mv $TESTDIR/test_moved/test_2.txt " +
+            "$TESTDIR/test_moved/test.zip $TESTDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testMvWithOutProperInputs() {
+    println("testMvWithOutProperInputs");
+    //mv files from one hdfs, but dont provide the destination
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_3");
+    assertTrue("mv command failed", sh.getRet() == 255);
+
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "-mv: Not enough arguments: expected 2 but got 1";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("get command failed", failure == true);
+  }
+
+  @Test
+  public void testMvNonexistingFile() {
+    println("testMvNonexistingFile");
+    //mv non existing file from one hdfs to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_13.txt 
$TESTDIR/temp_moved");
+    assertTrue("mv command failed", sh.getRet() == 1);
+
+    List err_msgs = sh.getErr();
+    boolean failure= false;
+    String failure_msg = "mv: `$TESTDIR/$testMvInputs/test_13.txt': " +
+                         "No such file or directory";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("Does mv returned proper error message for non existing file?",
+               failure == true);
+  }
+
+  @Test
+  public void testMvProtocol() {
+    println("testMvProtocol");
+    //mv files from one hdfs location to a local location
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_2.txt file://$USERDIR");
+    assertTrue("mv command failed", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "mv: `$TESTDIR/$testMvInputs/test_2.txt': " +
+                         "Does not match target filesystem";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("get command failed", failure == true);
+
+    //mv dir from one local location to another location
+    sh.exec("hdfs dfs -mv file://$USERDIR/$testMvInputs/ " +
+            "file://$USERDIR/temp_testmv_1");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    sh.exec("ls $testMvInputs");
+    assertTrue("Does mv source files still present?", sh.getRet() == 2);
+
+    sh.exec("ls temp_testmv_1");
+    assertTrue("listing file failed", sh.getRet() == 0);
+
+    //revert the changes
+    sh.exec("hdfs dfs -mv file://$USERDIR/temp_testmv_1 " +
+            "file://$USERDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
new file mode 100644
index 0000000..6f34363
--- /dev/null
+++ 
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional infoPutation
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing pePutissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError;
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestPut {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testPutInputDir = "testPutInputDir" + date;
+  private static String testPutInputs = "test_data_TestPut"
+  private static String testPutOut = "testPutOut" + date;
+  private static String testPutOutCmp = "testPutOutCmp" + date;
+  private static String user_testinputdir = USERNAME+"/"+testPutInputDir+"/"+
+                                             testPutInputs;
+  static List<String> TestPut_output = new ArrayList<String>();
+  static List<String> TestPut_error = new ArrayList<String>();
+  private static String TESTDIR  = "/user/$USERNAME/$testPutInputDir";
+  static boolean result = false;
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestPut.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestPut");
+    assertTrue("Could not copy data into test_data_TestPut", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    println("Running Put:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testPutOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testPutOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testPutInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testPutDirectory() {
+    println("testPutDirectory");
+    // upload directory to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs $TESTDIR ");
+    assertTrue("Could not put files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls -R $TESTDIR/$testPutInputs ");
+    assertTrue("could not find the copied directory on hdfs",
+               sh.getRet() == 0);
+
+    assertTrue("Able to find uploaded files on hdfs?",
+               scripts.lookForGivenString(sh.getOut(),"test_3") == true);
+    result = false;
+  }
+
+  @Test
+  public void testPutFile() {
+    println("testPutFile");
+    // upload single files
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -cat $TESTDIR/test_1.txt &> $testPutOut");
+    assertTrue("Able to cat data from $TESTDIR/test_1.txt from hdfs?",
+               sh.getRet() == 0);
+
+    sh.exec("cat $testPutInputs/test_1.txt &> $testPutOutCmp");
+    assertTrue("Able to cat data from $testPutInputs/test_1.txt from local?",
+               sh.getRet() == 0);
+
+    sh.exec("diff $testPutOutCmp $testPutOut");
+    assertTrue("Uploaded file data differs with local file?",
+               sh.getRet() == 0);
+  }
+
+  @Test
+  public void testPutMutltipleFiles() {
+    println("TestPutAdvanced");
+    // copy multiple input files to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs/test_2.txt $testPutInputs/test.zip "+
+            "$testPutInputs/test_3 $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -R $TESTDIR ");
+    assertTrue("could not find the copied directory on hdfs",
+               sh.getRet() == 0);
+
+    assertTrue("Does test_2.txt uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test_2.txt") == true);
+
+    assertTrue("Does test.zip uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test.zip") == true);
+
+    assertTrue("Does test_3 uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test_3") == true);
+  }
+
+  @Test
+  public void testPutNonExistingFile() {
+    println("testPutNonExistingFile");
+    sh.exec("hdfs dfs -put $testPutInputs/test_3.txt $TESTDIR");
+    assertTrue("A non existing file got copied to hdfs", sh.getRet() == 1);
+
+    String searchToken = "put: `"+testPutInputs+"/test_3.txt': " +
+                         "No such file or directory";
+    println(searchToken);
+    assertTrue("Able to Upload non-existing file?",
+               scripts.lookForGivenString(sh.getErr(), searchToken) == true);
+  }
+
+  @Test
+  public void testPutToOverWriteFile() {
+    println("testPutNonExistingFile");
+    // copy a file which is already present on HDFS at the destination
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_1.txt");
+    if (sh.getRet() == 1){
+      sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+      assertTrue("Able to upload file?", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR ");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 1);
+
+    String searchToken = "put: `/user/"+USERNAME+"/"+
+                          testPutInputDir+"/test_1.txt': File exists";
+    assertTrue("Able to Upload non-existing file?",
+               scripts.lookForGivenString(sh.getErr(), searchToken) == true);
+  }
+}

Reply via email to