steveloughran commented on a change in pull request #743:
URL: https://github.com/apache/hadoop/pull/743#discussion_r479677745



##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
##########
@@ -195,99 +214,82 @@ public void testWDAbsolute() throws IOException {
     Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
     fSys.mkdirs(absoluteDir);
     fSys.setWorkingDirectory(absoluteDir);
-    Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
+    assertEquals(absoluteDir, fSys.getWorkingDirectory());
   }
   
   @Test
   public void testMkdirs() throws Exception {
     Path testDir = getTestRootPath(fSys, "test/hadoop");
-    Assert.assertFalse(exists(fSys, testDir));
-    Assert.assertFalse(isFile(fSys, testDir));
+    assertFalse(exists(fSys, testDir));
+    assertFalse(isFile(fSys, testDir));
 
     fSys.mkdirs(testDir);
 
-    Assert.assertTrue(exists(fSys, testDir));
-    Assert.assertFalse(isFile(fSys, testDir));
+    assertTrue(exists(fSys, testDir));
+    assertFalse(isFile(fSys, testDir));
     
     fSys.mkdirs(testDir);
 
-    Assert.assertTrue(exists(fSys, testDir));
-    Assert.assertFalse(isFile(fSys, testDir));
+    assertTrue(exists(fSys, testDir));
+    assertFalse(isFile(fSys, testDir));
 
     Path parentDir = testDir.getParent();
-    Assert.assertTrue(exists(fSys, parentDir));
-    Assert.assertFalse(isFile(fSys, parentDir));
+    assertTrue(exists(fSys, parentDir));
+    assertFalse(isFile(fSys, parentDir));
 
     Path grandparentDir = parentDir.getParent();
-    Assert.assertTrue(exists(fSys, grandparentDir));
-    Assert.assertFalse(isFile(fSys, grandparentDir));
+    assertTrue(exists(fSys, grandparentDir));
+    assertFalse(isFile(fSys, grandparentDir));
     
   }
   
   @Test
   public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
     Path testDir = getTestRootPath(fSys, "test/hadoop");
-    Assert.assertFalse(exists(fSys, testDir));
+    assertFalse(exists(fSys, testDir));
     fSys.mkdirs(testDir);
-    Assert.assertTrue(exists(fSys, testDir));
+    assertTrue(exists(fSys, testDir));
     
     createFile(getTestRootPath(fSys, "test/hadoop/file"));
     
     Path testSubDir = getTestRootPath(fSys, "test/hadoop/file/subdir");
-    try {
-      fSys.mkdirs(testSubDir);
-      Assert.fail("Should throw IOException.");
-    } catch (IOException e) {
-      // expected
-    }
-    Assert.assertFalse(exists(fSys, testSubDir));
+    intercept(IOException.class, () ->
+      fSys.mkdirs(testSubDir));
+    assertFalse(exists(fSys, testSubDir));
     
     Path testDeepSubDir = getTestRootPath(fSys, 
"test/hadoop/file/deep/sub/dir");
-    try {
-      fSys.mkdirs(testDeepSubDir);
-      Assert.fail("Should throw IOException.");
-    } catch (IOException e) {
-      // expected
-    }
-    Assert.assertFalse(exists(fSys, testDeepSubDir));
+    intercept(IOException.class, () ->
+        fSys.mkdirs(testDeepSubDir));
+    assertFalse(exists(fSys, testDeepSubDir));
     
   }
   
   @Test
-  public void testGetFileStatusThrowsExceptionForNonExistentFile() 
-    throws Exception {
-    try {
-      fSys.getFileStatus(getTestRootPath(fSys, "test/hadoop/file"));
-      Assert.fail("Should throw FileNotFoundException");
-    } catch (FileNotFoundException e) {
-      // expected
-    }
-  } 
+  public void testGetFileStatusThrowsExceptionForNonExistentFile()
+      throws Exception {
+    intercept(FileNotFoundException.class, () ->
+      fSys.getFileStatus(getTestRootPath(fSys, "test/hadoop/file")));
+  }
   
   @Test
   public void testListStatusThrowsExceptionForNonExistentFile()
-  throws Exception {
-    try {
-      fSys.listStatus(getTestRootPath(fSys, "test/hadoop/file"));
-      Assert.fail("Should throw FileNotFoundException");
-    } catch (FileNotFoundException fnfe) {
-      // expected
-    }
+      throws Exception {
+    intercept(FileNotFoundException.class, () ->
+      fSys.listStatus(getTestRootPath(fSys, "test/hadoop/file")));
   }
 
   @Test
   public void testListStatusThrowsExceptionForUnreadableDir()
   throws Exception {
+    assumePermissionsSupported();
     Path testRootDir = getTestRootPath(fSys, "test/hadoop/dir");
     Path obscuredDir = new Path(testRootDir, "foo");
     Path subDir = new Path(obscuredDir, "bar"); //so foo is non-empty
     fSys.mkdirs(subDir);
     fSys.setPermission(obscuredDir, new FsPermission((short)0)); //no access
     try {
-      fSys.listStatus(obscuredDir);
-      Assert.fail("Should throw IOException");
-    } catch (IOException ioe) {
-      // expected
+      intercept(IOException.class, () ->

Review comment:
       Now that's interesting. I wonder what unix does there. Because HDFS 
should really be doing the same. If I'm in dir /a and don't have exec perms for 
/a/b, I should be able call stat /a/b, just not ls(/a/b). (pauses to check. OK, 
on the command line, removing x perms from a dir means ls() returns an empty 
list, not an error
   
   (and hadoop fs -ls -R command returns an empty list too)
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to