Repository: bigtop
Updated Branches:
  refs/heads/master 06a0b005e -> 01ac55b5f


GTOP-1246. Fix the shell-object bug in HttpFs smoke tests


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/01ac55b5
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/01ac55b5
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/01ac55b5

Branch: refs/heads/master
Commit: 01ac55b5ff06118119113b8b27c448598650a13e
Parents: 06a0b00
Author: Adam Kawa <[email protected]>
Authored: Thu Mar 13 11:07:55 2014 -0700
Committer: Konstantin Boudnik <[email protected]>
Committed: Thu Mar 13 11:07:55 2014 -0700

----------------------------------------------------------------------
 .../bigtop/itest/httpfs/TestHttpFs.groovy       | 32 ++++++++++----------
 1 file changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/01ac55b5/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
----------------------------------------------------------------------
diff --git 
a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
 
b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
index 0aa9a45..9cd40eb 100644
--- 
a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
+++ 
b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
@@ -46,9 +46,9 @@ public class TestHttpFs {
     private static String testHttpFsFolder =  "/tmp/httpfssmoke-" + (new 
Date().getTime());
     private static String testHttpFsFolderRenamed = 
"$testHttpFsFolder-renamed";
     
-    private static Shell shHDFS = new Shell("/bin/bash", USERNAME);
     private static Shell sh = new Shell("/bin/bash");
-    
+    // it will used to cleanup directories, as they are created with via curl 
with user.name=$USERNAME
+    private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
 
     @BeforeClass
     public static void setUp() {
@@ -56,18 +56,18 @@ public class TestHttpFs {
 
     @AfterClass
     public static void tearDown() {
-        // clean up of existing folders
-        shHDFS.exec("hadoop fs -test -e $testHttpFsFolder");
-        if (shHDFS.getRet() == 0) {
-            shHDFS.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
+        // clean up of existing folders using USERNAME of user who created 
them via curl
+        shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
+        if (shUSERNAME.getRet() == 0) {
+            shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
             assertTrue("Deletion of previous testHttpFsFolder from HDFS 
failed",
-                shHDFS.getRet() == 0);
+                shUSERNAME.getRet() == 0);
         }
-        shHDFS.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
-        if (shHDFS.getRet() == 0) {
-            shHDFS.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
+        shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
+        if (shUSERNAME.getRet() == 0) {
+            shUSERNAME.exec("hadoop fs -rmr -skipTrash 
$testHttpFsFolderRenamed");
             assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS 
failed",
-                shHDFS.getRet() == 0);
+                shUSERNAME.getRet() == 0);
         }
     }
 
@@ -98,7 +98,7 @@ public class TestHttpFs {
         assertTrue("curl command to create a dir failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
         sh.exec("curl -i -X PUT 
'$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
-        assertTrue("curl command to rename a dir failed", shHDFS.getRet() == 
0);
+        assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
     }
 
@@ -108,7 +108,7 @@ public class TestHttpFs {
         assertTrue("curl command to create a dir failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
         sh.exec("curl -i -X DELETE 
'$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
-        assertTrue("curl command to delete a dir failed", shHDFS.getRet() == 
0);
+        assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
     }
     
@@ -131,7 +131,7 @@ public class TestHttpFs {
         createDir(testHttpFsFolder);
         assertTrue("curl command to create a dir failed", sh.getRet() == 0);
         sh.exec("curl -i -X PUT 
'$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
-        assertTrue("curl command to create a file failed", shHDFS.getRet() == 
0);
+        assertTrue("curl command to create a file failed", sh.getRet() == 0);
         String datanodeLocation = null;
         sh.getOut().each {
             if (it.startsWith("Location:")) {
@@ -142,10 +142,10 @@ public class TestHttpFs {
         LOG.debug("Datanode location: $datanodeLocation");
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
         sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 
'Content-Type:application/octet-stream'");
-        assertTrue("curl command to create a file failed", shHDFS.getRet() == 
0);
+        assertTrue("curl command to create a file failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
         sh.exec("curl -i -L 
'$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
-        assertTrue("curl command to create a file failed", shHDFS.getRet() == 
0);
+        assertTrue("curl command to create a file failed", sh.getRet() == 0);
         assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
         assertValueExists(sh.getOut(), filenameContent);
     }

Reply via email to