This is an automated email from the ASF dual-hosted git repository.

snemeth pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 8c9173c  HADOOP-16510. [hadoop-common] Fix order of actual and 
expected expression in assert statements. Contributed by Adam Antal
8c9173c is described below

commit 8c9173c87ce02d4eaeeafac3e4e185b390e07527
Author: Szilard Nemeth <snem...@apache.org>
AuthorDate: Thu Oct 31 14:35:04 2019 +0100

    HADOOP-16510. [hadoop-common] Fix order of actual and expected expression 
in assert statements. Contributed by Adam Antal
---
 .../org/apache/hadoop/conf/TestConfiguration.java  |   4 +-
 .../hadoop/crypto/CryptoStreamsTestBase.java       |   8 +-
 .../TestCryptoStreamsWithJceAesCtrCryptoCodec.java |   9 +-
 .../org/apache/hadoop/fs/TestHarFileSystem.java    |  58 +++----
 .../org/apache/hadoop/fs/TestLocalFileSystem.java  |  43 +++--
 .../hadoop/fs/viewfs/ViewFileSystemBaseTest.java   |  10 +-
 .../apache/hadoop/fs/viewfs/ViewFsBaseTest.java    |   7 +-
 .../java/org/apache/hadoop/io/TestArrayFile.java   |  11 +-
 .../org/apache/hadoop/io/TestSequenceFileSync.java |  14 +-
 .../test/java/org/apache/hadoop/io/TestText.java   |  10 +-
 .../io/file/tfile/TestTFileUnsortedByteArrays.java | 193 +++++++++------------
 .../org/apache/hadoop/io/file/tfile/TestVLong.java |  10 +-
 .../org/apache/hadoop/ipc/TestFairCallQueue.java   |  22 ++-
 .../test/java/org/apache/hadoop/ipc/TestIPC.java   |   7 +-
 .../org/apache/hadoop/ipc/TestProtoBufRpc.java     |  39 ++---
 .../test/java/org/apache/hadoop/ipc/TestRPC.java   |   5 +-
 .../org/apache/hadoop/ipc/TestSocketFactory.java   |  25 ++-
 .../ipc/TestWeightedRoundRobinMultiplexer.java     |  85 ++++-----
 .../hadoop/metrics2/util/TestSampleQuantiles.java  |  28 +--
 .../apache/hadoop/security/TestGroupsCaching.java  |  23 ++-
 .../hadoop/security/TestShellBasedIdMapping.java   |   9 +-
 .../security/authorize/TestAccessControlList.java  | 123 +++++++------
 .../security/http/TestXFrameOptionsFilter.java     |  16 +-
 .../token/delegation/TestDelegationToken.java      |  13 +-
 .../hadoop/util/TestLightWeightResizableGSet.java  |  55 +++---
 .../hadoop/util/curator/TestChildReaper.java       |  19 +-
 hadoop-common-project/hadoop-nfs/pom.xml           |   5 +
 .../org/apache/hadoop/nfs/nfs3/TestFileHandle.java |  11 +-
 28 files changed, 430 insertions(+), 432 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
index 64beb7b..a6adb9f 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
@@ -2437,7 +2437,7 @@ public class TestConfiguration {
     }
     conf.set("different.prefix" + ".name", "value");
     Map<String, String> prefixedProps = conf.getPropsWithPrefix("prefix.");
-    assertEquals(prefixedProps.size(), 10);
+    assertThat(prefixedProps.size(), is(10));
     for (int i = 0; i < 10; i++) {
       assertEquals("value" + i, prefixedProps.get("name" + i));
     }
@@ -2448,7 +2448,7 @@ public class TestConfiguration {
       conf.set("subprefix." + "subname" + i, "value_${foo}" + i);
     }
     prefixedProps = conf.getPropsWithPrefix("subprefix.");
-    assertEquals(prefixedProps.size(), 10);
+    assertThat(prefixedProps.size(), is(10));
     for (int i = 0; i < 10; i++) {
       assertEquals("value_bar" + i, prefixedProps.get("subname" + i));
     }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
index 64bb966..53d0939 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
@@ -46,6 +46,8 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public abstract class CryptoStreamsTestBase {
   protected static final Logger LOG = LoggerFactory.getLogger(
       CryptoStreamsTestBase.class);
@@ -198,7 +200,7 @@ public abstract class CryptoStreamsTestBase {
     
     // EOF
     n = in.read(result, 0, dataLen);
-    Assert.assertEquals(n, -1);
+    assertThat(n).isEqualTo(-1);
   }
   
   /** Test crypto writing with different buffer size. */
@@ -612,7 +614,7 @@ public abstract class CryptoStreamsTestBase {
     
     // Skip after EOF
     skipped = in.skip(3);
-    Assert.assertEquals(skipped, 0);
+    assertThat(skipped).isZero();
     
     in.close();
   }
@@ -844,7 +846,7 @@ public abstract class CryptoStreamsTestBase {
     ((Seekable) in).seek(dataLen);
     buf.clear();
     n = ((ByteBufferReadable) in).read(buf);
-    Assert.assertEquals(n, -1);
+    assertThat(n).isEqualTo(-1);
     
     in.close();
   }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithJceAesCtrCryptoCodec.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithJceAesCtrCryptoCodec.java
index 76c39d6..d47dd30 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithJceAesCtrCryptoCodec.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithJceAesCtrCryptoCodec.java
@@ -19,20 +19,21 @@ package org.apache.hadoop.crypto;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.junit.Assert;
 import org.junit.BeforeClass;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestCryptoStreamsWithJceAesCtrCryptoCodec extends 
     TestCryptoStreams {
 
   @BeforeClass
-  public static void init() throws Exception {
+  public static void init() {
     Configuration conf = new Configuration();
     conf.set(
         
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
         JceAesCtrCryptoCodec.class.getName());
     codec = CryptoCodec.getInstance(conf);
-    Assert.assertEquals(JceAesCtrCryptoCodec.class.getCanonicalName(),
-        codec.getClass().getCanonicalName());
+    assertThat(JceAesCtrCryptoCodec.class.getCanonicalName())
+        .isEqualTo(codec.getClass().getCanonicalName());
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
index b442553..3b923e0 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
@@ -46,8 +46,7 @@ import java.util.concurrent.CompletableFuture;
 import static org.apache.hadoop.fs.Options.ChecksumOpt;
 import static org.apache.hadoop.fs.Options.CreateOpts;
 import static org.apache.hadoop.fs.Options.Rename;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.assertj.core.api.Assertions.assertThat;
 
 @SuppressWarnings("deprecation")
 public class TestHarFileSystem {
@@ -279,13 +278,8 @@ public class TestHarFileSystem {
   @Test
   public void testFileChecksum() throws Exception {
     final Path p = new Path("har://file-localhost/foo.har/file1");
-    final HarFileSystem harfs = new HarFileSystem();
-    try {
-      Assert.assertEquals(null, harfs.getFileChecksum(p));
-    } finally {
-      if (harfs != null) {
-        harfs.close();
-      }
+    try (HarFileSystem harfs = new HarFileSystem()) {
+      assertThat(harfs.getFileChecksum(p)).isNull();
     }
   }
 
@@ -299,30 +293,30 @@ public class TestHarFileSystem {
       // case 1: range starts before current har block and ends after
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 0, 20, 5);
-      assertEquals(b[0].getOffset(), 5);
-      assertEquals(b[0].getLength(), 10);
+      assertThat(b[0].getOffset()).isEqualTo(5);
+      assertThat(b[0].getLength()).isEqualTo(10);
     }
     {
       // case 2: range starts in current har block and ends after
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 0, 20, 15);
-      assertEquals(b[0].getOffset(), 0);
-      assertEquals(b[0].getLength(), 5);
+      assertThat(b[0].getOffset()).isZero();
+      assertThat(b[0].getLength()).isEqualTo(5);
     }
     {
       // case 3: range starts before current har block and ends in
       // current har block
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 0, 10, 5);
-      assertEquals(b[0].getOffset(), 5);
-      assertEquals(b[0].getLength(), 5);
+      assertThat(b[0].getOffset()).isEqualTo(5);
+      assertThat(b[0].getLength()).isEqualTo(5);
     }
     {
       // case 4: range starts and ends in current har block
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 0, 6, 12);
-      assertEquals(b[0].getOffset(), 0);
-      assertEquals(b[0].getLength(), 6);
+      assertThat(b[0].getOffset()).isZero();
+      assertThat(b[0].getLength()).isEqualTo(6);
     }
 
     // now try a range where start == 3
@@ -330,30 +324,30 @@ public class TestHarFileSystem {
       // case 5: range starts before current har block and ends after
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 3, 20, 5);
-      assertEquals(b[0].getOffset(), 5);
-      assertEquals(b[0].getLength(), 10);
+      assertThat(b[0].getOffset()).isEqualTo(5);
+      assertThat(b[0].getLength()).isEqualTo(10);
     }
     {
       // case 6: range starts in current har block and ends after
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 3, 20, 15);
-      assertEquals(b[0].getOffset(), 3);
-      assertEquals(b[0].getLength(), 2);
+      assertThat(b[0].getOffset()).isEqualTo(3);
+      assertThat(b[0].getLength()).isEqualTo(2);
     }
     {
       // case 7: range starts before current har block and ends in
       // current har block
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 3, 7, 5);
-      assertEquals(b[0].getOffset(), 5);
-      assertEquals(b[0].getLength(), 5);
+      assertThat(b[0].getOffset()).isEqualTo(5);
+      assertThat(b[0].getLength()).isEqualTo(5);
     }
     {
       // case 8: range starts and ends in current har block
       BlockLocation[] b = { new BlockLocation(null, null, 10, 10) };
       HarFileSystem.fixBlockLocations(b, 3, 3, 12);
-      assertEquals(b[0].getOffset(), 3);
-      assertEquals(b[0].getLength(), 3);
+      assertThat(b[0].getOffset()).isEqualTo(3);
+      assertThat(b[0].getLength()).isEqualTo(3);
     }
 
     // test case from JIRA MAPREDUCE-1752
@@ -361,10 +355,10 @@ public class TestHarFileSystem {
       BlockLocation[] b = { new BlockLocation(null, null, 512, 512),
                             new BlockLocation(null, null, 1024, 512) };
       HarFileSystem.fixBlockLocations(b, 0, 512, 896);
-      assertEquals(b[0].getOffset(), 0);
-      assertEquals(b[0].getLength(), 128);
-      assertEquals(b[1].getOffset(), 128);
-      assertEquals(b[1].getLength(), 384);
+      assertThat(b[0].getOffset()).isZero();
+      assertThat(b[0].getLength()).isEqualTo(128);
+      assertThat(b[1].getOffset()).isEqualTo(128);
+      assertThat(b[1].getLength()).isEqualTo(384);
     }
   }
 
@@ -396,7 +390,9 @@ public class TestHarFileSystem {
         }
       }
     }
-    assertTrue((errors + " methods were not overridden correctly - see log"),
-        errors <= 0);
+    assertThat(errors)
+        .withFailMessage(errors +
+            " methods were not overridden correctly - see log")
+        .isLessThanOrEqualTo(0);
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
index bffcfa7..517f6ce 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
@@ -44,7 +44,11 @@ import java.util.stream.Collectors;
 
 import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
 import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Mockito.*;
 
 import org.junit.After;
@@ -56,6 +60,7 @@ import org.junit.rules.Timeout;
 
 import javax.annotation.Nonnull;
 
+import static org.assertj.core.api.Assertions.assertThat;
 
 /**
  * This class tests the local file system via the FileSystem abstraction.
@@ -692,27 +697,33 @@ public class TestLocalFileSystem {
     FSDataOutputStreamBuilder builder =
         fileSys.createFile(path);
     try (FSDataOutputStream stream = builder.build()) {
-      Assert.assertEquals("Should be default block size",
-          builder.getBlockSize(), fileSys.getDefaultBlockSize());
-      Assert.assertEquals("Should be default replication factor",
-          builder.getReplication(), fileSys.getDefaultReplication());
-      Assert.assertEquals("Should be default buffer size",
-          builder.getBufferSize(),
-          fileSys.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
+      assertThat(builder.getBlockSize())
+          .withFailMessage("Should be default block size")
+          .isEqualTo(fileSys.getDefaultBlockSize());
+      assertThat(builder.getReplication())
+          .withFailMessage("Should be default replication factor")
+          .isEqualTo(fileSys.getDefaultReplication());
+      assertThat(builder.getBufferSize())
+          .withFailMessage("Should be default buffer size")
+          .isEqualTo(fileSys.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
               IO_FILE_BUFFER_SIZE_DEFAULT));
-      Assert.assertEquals("Should be default permission",
-          builder.getPermission(), FsPermission.getFileDefault());
+      assertThat(builder.getPermission())
+          .withFailMessage("Should be default permission")
+          .isEqualTo(FsPermission.getFileDefault());
     }
 
     // Test set 0 to replication, block size and buffer size
     builder = fileSys.createFile(path);
     builder.bufferSize(0).blockSize(0).replication((short) 0);
-    Assert.assertEquals("Block size should be 0",
-        builder.getBlockSize(), 0);
-    Assert.assertEquals("Replication factor should be 0",
-        builder.getReplication(), 0);
-    Assert.assertEquals("Buffer size should be 0",
-        builder.getBufferSize(), 0);
+    assertThat(builder.getBlockSize())
+        .withFailMessage("Block size should be 0")
+        .isZero();
+    assertThat(builder.getReplication())
+        .withFailMessage("Replication factor should be 0")
+        .isZero();
+    assertThat(builder.getBufferSize())
+        .withFailMessage("Buffer size should be 0")
+        .isZero();
   }
 
   /**
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
index 4288dbd..4902d73 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
@@ -69,9 +69,9 @@ import org.junit.Test;
 
 import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
 import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.equalTo;
 import static org.junit.Assert.*;
 
-
 /**
  * <p>
  * A collection of tests for the {@link ViewFileSystem}.
@@ -477,10 +477,10 @@ abstract public class ViewFileSystemBaseTest {
     Assert.assertEquals(targetBL.length, viewBL.length);
     int i = 0;
     for (BlockLocation vbl : viewBL) {
-      Assert.assertEquals(vbl.toString(), targetBL[i].toString());
-      Assert.assertEquals(targetBL[i].getOffset(), vbl.getOffset());
-      Assert.assertEquals(targetBL[i].getLength(), vbl.getLength());
-      i++;     
+      assertThat(vbl.toString(), equalTo(targetBL[i].toString()));
+      assertThat(vbl.getOffset(), equalTo(targetBL[i].getOffset()));
+      assertThat(vbl.getLength(), equalTo(targetBL[i].getLength()));
+      i++;
     } 
   }
 
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
index e080cd6..d96cdb1 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.viewfs;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.apache.hadoop.fs.FileContextTestHelper.checkFileLinkStatus;
 import static org.apache.hadoop.fs.FileContextTestHelper.checkFileStatus;
 import static org.apache.hadoop.fs.FileContextTestHelper.exists;
@@ -459,9 +460,9 @@ abstract public class ViewFsBaseTest {
     Assert.assertEquals(targetBL.length, viewBL.length);
     int i = 0;
     for (BlockLocation vbl : viewBL) {
-      Assert.assertEquals(vbl.toString(), targetBL[i].toString());
-      Assert.assertEquals(targetBL[i].getOffset(), vbl.getOffset());
-      Assert.assertEquals(targetBL[i].getLength(), vbl.getLength());
+      assertThat(vbl.toString()).isEqualTo(targetBL[i].toString());
+      assertThat(vbl.getOffset()).isEqualTo(targetBL[i].getOffset());
+      assertThat(vbl.getLength()).isEqualTo(targetBL[i].getLength());
       i++;     
     } 
   }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
index 722e9de..2f69093 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
@@ -30,9 +30,9 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
@@ -146,15 +146,16 @@ public class TestArrayFile {
       
       for (int i = 0; i < SIZE; i++) {
         nextWritable = (LongWritable)reader.next(nextWritable);
-        assertEquals(nextWritable.get(), i);
+        assertThat(nextWritable.get()).isEqualTo(i);
       }
         
       assertTrue("testArrayFileIteration seek error !!!",
           reader.seek(new LongWritable(6)));
       nextWritable = (LongWritable) reader.next(nextWritable);
-      assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
-      assertTrue("testArrayFileIteration error !!!",
-          nextWritable.equals(new LongWritable(7)));
+      assertThat(reader.key()).withFailMessage(
+          "testArrayFileIteration error !!!").isEqualTo(7);
+      assertThat(nextWritable).withFailMessage(
+          "testArrayFileIteration error !!!").isEqualTo(new LongWritable(7));
       assertFalse("testArrayFileIteration error !!!",
           reader.seek(new LongWritable(SIZE + 5)));
       reader.close();
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java
index 363177b..5fbb083 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.io;
 
-import static org.junit.Assert.assertEquals;
-
 import java.io.IOException;
 import java.util.Random;
 
@@ -31,6 +29,8 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 /** Tests sync based seek reads/write intervals inside SequenceFiles. */
 public class TestSequenceFileSync {
   private static final int NUMRECORDS = 2000;
@@ -46,12 +46,10 @@ public class TestSequenceFileSync {
     val.clear();
     reader.sync(off);
     reader.next(key, val);
-    assertEquals(key.get(), expectedRecord);
+    assertThat(key.get()).isEqualTo(expectedRecord);
     final String test = String.format(REC_FMT, expectedRecord, expectedRecord);
-    assertEquals(
-        "Invalid value in iter " + iter + ": " + val,
-        0,
-        val.find(test, 0));
+    assertThat(val.find(test, 0)).withFailMessage(
+        "Invalid value in iter " + iter + ": " + val).isZero();
   }
 
   @Test
@@ -124,7 +122,7 @@ public class TestSequenceFileSync {
         SequenceFile.Writer.syncInterval(20*100)
     );
     // Ensure the custom sync interval value is set
-    assertEquals(writer.syncInterval, 20*100);
+    assertThat(writer.syncInterval).isEqualTo(20*100);
     try {
       writeSequenceFile(writer, NUMRECORDS);
       for (int i = 0; i < 5; i++) {
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
index 9771fd1..59856a4 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
@@ -26,6 +26,8 @@ import java.util.Random;
 import com.google.common.base.Charsets;
 import com.google.common.primitives.Bytes;
 import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -232,10 +234,10 @@ public class TestText {
   @Test
   public void testFind() throws Exception {
     Text text = new Text("abcd\u20acbdcd\u20ac");
-    assertTrue(text.find("abd")==-1);
-    assertTrue(text.find("ac") ==-1);
-    assertTrue(text.find("\u20ac") == 4);
-    assertTrue(text.find("\u20ac", 5)==11);
+    assertThat(text.find("abd")).isEqualTo(-1);
+    assertThat(text.find("ac")).isEqualTo(-1);
+    assertThat(text.find("\u20ac")).isEqualTo(4);
+    assertThat(text.find("\u20ac", 5)).isEqualTo(11);
   }
 
   @Test
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
index f243b2a..f849d53 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 
 import org.junit.After;
-import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -33,6 +32,9 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+
 public class TestTFileUnsortedByteArrays {
   private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath();
 
@@ -84,152 +86,119 @@ public class TestTFileUnsortedByteArrays {
   // we still can scan records in an unsorted TFile
   @Test
   public void testFailureScannerWithKeys() throws IOException {
-    Reader reader =
-        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Assert.assertFalse(reader.isSorted());
-    Assert.assertEquals((int) reader.getEntryCount(), 4);
-
-    try {
-      Scanner scanner =
-          reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
-      Assert
-          .fail("Failed to catch creating scanner with keys on unsorted 
file.");
-    }
-    catch (RuntimeException e) {
-    }
-    finally {
-      reader.close();
+    try (Reader reader =
+        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf)) {
+      assertThat(reader.isSorted()).isFalse();
+      assertThat(reader.getEntryCount()).isEqualTo(4);
+      try {
+        reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
+        fail("Failed to catch creating scanner with keys on unsorted file.");
+      } catch (RuntimeException expected) {
+      }
     }
   }
 
   // we still can scan records in an unsorted TFile
   @Test
   public void testScan() throws IOException {
-    Reader reader =
-        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Assert.assertFalse(reader.isSorted());
-    Assert.assertEquals((int) reader.getEntryCount(), 4);
-
-    Scanner scanner = reader.createScanner();
-
-    try {
-
-      // read key and value
-      byte[] kbuf = new byte[BUF_SIZE];
-      int klen = scanner.entry().getKeyLength();
-      scanner.entry().getKey(kbuf);
-      Assert.assertEquals(new String(kbuf, 0, klen), "keyZ");
-
-      byte[] vbuf = new byte[BUF_SIZE];
-      int vlen = scanner.entry().getValueLength();
-      scanner.entry().getValue(vbuf);
-      Assert.assertEquals(new String(vbuf, 0, vlen), "valueZ");
-
-      scanner.advance();
-
-      // now try get value first
-      vbuf = new byte[BUF_SIZE];
-      vlen = scanner.entry().getValueLength();
-      scanner.entry().getValue(vbuf);
-      Assert.assertEquals(new String(vbuf, 0, vlen), "valueM");
-
-      kbuf = new byte[BUF_SIZE];
-      klen = scanner.entry().getKeyLength();
-      scanner.entry().getKey(kbuf);
-      Assert.assertEquals(new String(kbuf, 0, klen), "keyM");
-    }
-    finally {
-      scanner.close();
-      reader.close();
+    try (Reader reader =
+        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf)) {
+      assertThat(reader.isSorted()).isFalse();
+      assertThat(reader.getEntryCount()).isEqualTo(4);
+      try (Scanner scanner = reader.createScanner()) {
+        // read key and value
+        byte[] kbuf = new byte[BUF_SIZE];
+        int klen = scanner.entry().getKeyLength();
+        scanner.entry().getKey(kbuf);
+        assertThat(new String(kbuf, 0, klen)).isEqualTo("keyZ");
+
+        byte[] vbuf = new byte[BUF_SIZE];
+        int vlen = scanner.entry().getValueLength();
+        scanner.entry().getValue(vbuf);
+        assertThat(new String(vbuf, 0, vlen)).isEqualTo("valueZ");
+
+        scanner.advance();
+
+        // now try get value first
+        vbuf = new byte[BUF_SIZE];
+        vlen = scanner.entry().getValueLength();
+        scanner.entry().getValue(vbuf);
+        assertThat(new String(vbuf, 0, vlen)).isEqualTo("valueM");
+
+        kbuf = new byte[BUF_SIZE];
+        klen = scanner.entry().getKeyLength();
+        scanner.entry().getKey(kbuf);
+        assertThat(new String(kbuf, 0, klen)).isEqualTo("keyM");
+      }
     }
   }
 
   // we still can scan records in an unsorted TFile
   @Test
   public void testScanRange() throws IOException {
-    Reader reader =
-        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Assert.assertFalse(reader.isSorted());
-    Assert.assertEquals((int) reader.getEntryCount(), 4);
-
-    Scanner scanner = reader.createScanner();
-
-    try {
-
-      // read key and value
-      byte[] kbuf = new byte[BUF_SIZE];
-      int klen = scanner.entry().getKeyLength();
-      scanner.entry().getKey(kbuf);
-      Assert.assertEquals(new String(kbuf, 0, klen), "keyZ");
-
-      byte[] vbuf = new byte[BUF_SIZE];
-      int vlen = scanner.entry().getValueLength();
-      scanner.entry().getValue(vbuf);
-      Assert.assertEquals(new String(vbuf, 0, vlen), "valueZ");
-
-      scanner.advance();
-
-      // now try get value first
-      vbuf = new byte[BUF_SIZE];
-      vlen = scanner.entry().getValueLength();
-      scanner.entry().getValue(vbuf);
-      Assert.assertEquals(new String(vbuf, 0, vlen), "valueM");
-
-      kbuf = new byte[BUF_SIZE];
-      klen = scanner.entry().getKeyLength();
-      scanner.entry().getKey(kbuf);
-      Assert.assertEquals(new String(kbuf, 0, klen), "keyM");
-    }
-    finally {
-      scanner.close();
-      reader.close();
+    try (Reader reader =
+        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf)) {
+      assertThat(reader.isSorted()).isFalse();
+      assertThat(reader.getEntryCount()).isEqualTo(4);
+
+      try (Scanner scanner = reader.createScanner()) {
+
+        // read key and value
+        byte[] kbuf = new byte[BUF_SIZE];
+        int klen = scanner.entry().getKeyLength();
+        scanner.entry().getKey(kbuf);
+        assertThat(new String(kbuf, 0, klen)).isEqualTo("keyZ");
+
+        byte[] vbuf = new byte[BUF_SIZE];
+        int vlen = scanner.entry().getValueLength();
+        scanner.entry().getValue(vbuf);
+        assertThat(new String(vbuf, 0, vlen)).isEqualTo("valueZ");
+
+        scanner.advance();
+
+        // now try get value first
+        vbuf = new byte[BUF_SIZE];
+        vlen = scanner.entry().getValueLength();
+        scanner.entry().getValue(vbuf);
+        assertThat(new String(vbuf, 0, vlen)).isEqualTo("valueM");
+
+        kbuf = new byte[BUF_SIZE];
+        klen = scanner.entry().getKeyLength();
+        scanner.entry().getKey(kbuf);
+        assertThat(new String(kbuf, 0, klen)).isEqualTo("keyM");
+      }
     }
   }
 
   @Test
   public void testFailureSeek() throws IOException {
-    Reader reader =
-        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Scanner scanner = reader.createScanner();
-
-    try {
+    try (Reader reader = new Reader(fs.open(path),
+        fs.getFileStatus(path).getLen(), conf);
+        Scanner scanner = reader.createScanner()) {
       // can't find ceil
       try {
         scanner.lowerBound("keyN".getBytes());
-        Assert.fail("Cannot search in a unsorted TFile!");
-      }
-      catch (Exception e) {
-        // noop, expecting excetions
+        fail("Cannot search in a unsorted TFile!");
       }
-      finally {
+      catch (Exception expected) {
       }
 
       // can't find higher
       try {
         scanner.upperBound("keyA".getBytes());
-        Assert.fail("Cannot search higher in a unsorted TFile!");
+        fail("Cannot search higher in a unsorted TFile!");
       }
-      catch (Exception e) {
-        // noop, expecting excetions
-      }
-      finally {
+      catch (Exception expected) {
       }
 
       // can't seek
       try {
         scanner.seekTo("keyM".getBytes());
-        Assert.fail("Cannot search a unsorted TFile!");
-      }
-      catch (Exception e) {
-        // noop, expecting excetions
+        fail("Cannot search a unsorted TFile!");
       }
-      finally {
+      catch (Exception expected) {
       }
     }
-    finally {
-      scanner.close();
-      reader.close();
-    }
   }
 
   private void closeOutput() throws IOException {
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
index 69e6eb8..b7550f9 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestVLong {
   private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath();
   private Configuration conf;
@@ -70,8 +72,7 @@ public class TestVLong {
 
     FSDataInputStream in = fs.open(path);
     for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
-      long n = Utils.readVLong(in);
-      Assert.assertEquals(n, i);
+      assertThat(Utils.readVLong(in)).isEqualTo(i);
     }
     in.close();
     fs.delete(path, false);
@@ -85,8 +86,7 @@ public class TestVLong {
     out.close();
     FSDataInputStream in = fs.open(path);
     for (int i = Short.MIN_VALUE; i <= Short.MAX_VALUE; ++i) {
-      long n = Utils.readVLong(in);
-      Assert.assertEquals(n, ((long) i) << shift);
+      assertThat(Utils.readVLong(in)).isEqualTo(((long) i) << shift);
     }
     in.close();
     long ret = fs.getFileStatus(path).getLen();
@@ -165,7 +165,7 @@ public class TestVLong {
 
     FSDataInputStream in = fs.open(path);
     for (int i = 0; i < data.length; ++i) {
-      Assert.assertEquals(Utils.readVLong(in), data[i]);
+      assertThat(Utils.readVLong(in)).isEqualTo(data[i]);
     }
     in.close();
     fs.delete(path, false);
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
index 79f75e6..e6a5f5e 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
@@ -31,7 +31,13 @@ import static org.mockito.Mockito.times;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.junit.Before;
 import org.junit.Test;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
 
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
@@ -50,6 +56,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
 import 
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestFairCallQueue {
   private FairCallQueue<Schedulable> fcq;
 
@@ -85,17 +93,17 @@ public class TestFairCallQueue {
     Configuration conf = new Configuration();
     FairCallQueue<Schedulable> fairCallQueue;
     fairCallQueue = new FairCallQueue<Schedulable>(1, 1000, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1000);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1000);
     fairCallQueue = new FairCallQueue<Schedulable>(4, 1000, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1000);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1000);
     fairCallQueue = new FairCallQueue<Schedulable>(7, 1000, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1000);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1000);
     fairCallQueue = new FairCallQueue<Schedulable>(1, 1025, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1025);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1025);
     fairCallQueue = new FairCallQueue<Schedulable>(4, 1025, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1025);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1025);
     fairCallQueue = new FairCallQueue<Schedulable>(7, 1025, "ns", conf);
-    assertEquals(fairCallQueue.remainingCapacity(), 1025);
+    assertThat(fairCallQueue.remainingCapacity()).isEqualTo(1025);
   }
 
   @Test
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
index 1921a35..8254063 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
@@ -24,7 +24,6 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyInt;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
@@ -106,6 +105,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 /** Unit tests for IPC. */
 public class TestIPC {
   public static final Logger LOG = LoggerFactory.getLogger(TestIPC.class);
@@ -1274,7 +1275,7 @@ public class TestIPC {
       retryProxy.dummyRun();
     } finally {
       // Check if dummyRun called only once
-      Assert.assertEquals(handler.invocations, 1);
+      assertThat(handler.invocations).isOne();
       Client.setCallIdAndRetryCount(0, 0, null);
       client.stop();
       server.stop();
@@ -1455,7 +1456,7 @@ public class TestIPC {
   @Test
   public void testClientGetTimeout() throws IOException {
     Configuration config = new Configuration();
-    assertEquals(Client.getTimeout(config), -1);
+    assertThat(Client.getTimeout(config)).isEqualTo(-1);
   }
 
   @Test(timeout=60000)
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
index fd6a7ae..3053f87 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -44,10 +43,10 @@ import java.io.IOException;
 import java.net.URISyntaxException;
 import java.util.concurrent.TimeoutException;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt;
 import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test for testing protocol buffer based RPC mechanism.
@@ -143,19 +142,19 @@ public class TestProtoBufRpc extends TestRpcBase {
     EchoRequestProto echoRequest = EchoRequestProto.newBuilder()
         .setMessage("hello").build();
     EchoResponseProto echoResponse = client.echo(null, echoRequest);
-    Assert.assertEquals(echoResponse.getMessage(), "hello");
+    assertThat(echoResponse.getMessage()).isEqualTo("hello");
     
     // Test error method - error should be thrown as RemoteException
     try {
       client.error(null, newEmptyRequest());
-      Assert.fail("Expected exception is not thrown");
+      fail("Expected exception is not thrown");
     } catch (ServiceException e) {
       RemoteException re = (RemoteException)e.getCause();
       RpcServerException rse = (RpcServerException) re
           .unwrapRemoteException(RpcServerException.class);
-      Assert.assertNotNull(rse);
-      Assert.assertTrue(re.getErrorCode().equals(
-          RpcErrorCodeProto.ERROR_RPC_SERVER));
+      assertThat(rse).isNotNull();
+      assertThat(re.getErrorCode())
+          .isEqualTo(RpcErrorCodeProto.ERROR_RPC_SERVER);
     }
   }
   
@@ -169,7 +168,7 @@ public class TestProtoBufRpc extends TestRpcBase {
     // Test echo method
     EchoResponseProto echoResponse = client.echo2(null,
         newEchoRequest("hello"));
-    Assert.assertEquals(echoResponse.getMessage(), "hello");
+    assertThat(echoResponse.getMessage()).isEqualTo("hello");
     
     // Ensure RPC metrics are updated
     MetricsRecordBuilder rpcMetrics = 
getMetrics(server.getRpcMetrics().name());
@@ -188,13 +187,13 @@ public class TestProtoBufRpc extends TestRpcBase {
     try {
       client.error2(null, newEmptyRequest());
     } catch (ServiceException se) {
-      Assert.assertTrue(se.getCause() instanceof RemoteException);
+      assertThat(se.getCause()).isInstanceOf(RemoteException.class);
       RemoteException re = (RemoteException) se.getCause();
-      Assert.assertTrue(re.getClassName().equals(
-          URISyntaxException.class.getName()));
-      Assert.assertTrue(re.getMessage().contains("testException"));
-      Assert.assertTrue(
-          re.getErrorCode().equals(RpcErrorCodeProto.ERROR_APPLICATION));
+      assertThat(re.getClassName())
+          .isEqualTo(URISyntaxException.class.getName());
+      assertThat(re.getMessage()).contains("testException");
+      assertThat(re.getErrorCode())
+          .isEqualTo(RpcErrorCodeProto.ERROR_APPLICATION);
     }
   }
   
@@ -205,12 +204,12 @@ public class TestProtoBufRpc extends TestRpcBase {
     // short message goes through
     EchoResponseProto echoResponse = client.echo2(null,
         newEchoRequest(shortString));
-    Assert.assertEquals(shortString, echoResponse.getMessage());
+    assertThat(echoResponse.getMessage()).isEqualTo(shortString);
     
     final String longString = StringUtils.repeat("X", 4096);
     try {
       client.echo2(null, newEchoRequest(longString));
-      Assert.fail("expected extra-long RPC to fail");
+      fail("expected extra-long RPC to fail");
     } catch (ServiceException se) {
       // expected
     }
@@ -231,7 +230,7 @@ public class TestProtoBufRpc extends TestRpcBase {
 
     // Ensure RPC metrics are updated
     RpcMetrics rpcMetrics = server.getRpcMetrics();
-    assertTrue(rpcMetrics.getProcessingSampleCount() > 999L);
+    assertThat(rpcMetrics.getProcessingSampleCount()).isGreaterThan(999L);
     long before = rpcMetrics.getRpcSlowCalls();
 
     // make a really slow call. Sleep sleeps for 1000ms
@@ -255,7 +254,7 @@ public class TestProtoBufRpc extends TestRpcBase {
 
     // Ensure RPC metrics are updated
     RpcMetrics rpcMetrics = server.getRpcMetrics();
-    assertTrue(rpcMetrics.getProcessingSampleCount() > 999L);
+    assertThat(rpcMetrics.getProcessingSampleCount()).isGreaterThan(999L);
     long before = rpcMetrics.getRpcSlowCalls();
 
     // make a really slow call. Sleep sleeps for 1000ms
@@ -264,6 +263,6 @@ public class TestProtoBufRpc extends TestRpcBase {
     long after = rpcMetrics.getRpcSlowCalls();
 
     // make sure we never called into Log slow RPC routine.
-    assertEquals(before, after);
+    assertThat(before).isEqualTo(after);
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
index 232481a..0da0b47 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
@@ -84,6 +84,7 @@ import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
 import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt;
 import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
@@ -486,14 +487,14 @@ public class TestRPC extends TestRpcBase {
               .setParam2(2).build();
       TestProtos.AddResponseProto addResponse =
           proxy.add(null, addRequest);
-      assertEquals(addResponse.getResult(), 3);
+      assertThat(addResponse.getResult()).isEqualTo(3);
 
       Integer[] integers = new Integer[] {1, 2};
       TestProtos.AddRequestProto2 addRequest2 =
           TestProtos.AddRequestProto2.newBuilder().addAllParams(
               Arrays.asList(integers)).build();
       addResponse = proxy.add2(null, addRequest2);
-      assertEquals(addResponse.getResult(), 3);
+      assertThat(addResponse.getResult()).isEqualTo(3);
 
       boolean caught = false;
       try {
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
index ce481dc..1bad29e 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
@@ -33,23 +33,20 @@ import java.util.Map;
 
 import javax.net.SocketFactory;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.SocksSocketFactory;
 import org.apache.hadoop.net.StandardSocketFactory;
 import org.junit.After;
-import org.junit.Before;
 import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 /**
  * test StandardSocketFactory and SocksSocketFactory NetUtils
  *
@@ -111,10 +108,12 @@ public class TestSocketFactory {
         .getDefaultSocketFactory(conf);
     dummyCache.put(defaultSocketFactory, toBeCached2);
 
-    Assert
-        .assertEquals("The cache contains two elements", 2, dummyCache.size());
-    Assert.assertEquals("Equals of both socket factory shouldn't be same",
-        defaultSocketFactory.equals(dummySocketFactory), false);
+    assertThat(dummyCache.size())
+        .withFailMessage("The cache contains two elements")
+        .isEqualTo(2);
+    assertThat(defaultSocketFactory)
+        .withFailMessage("Equals of both socket factory shouldn't be same")
+        .isNotEqualTo(dummySocketFactory);
 
     assertSame(toBeCached2, dummyCache.remove(defaultSocketFactory));
     dummyCache.put(defaultSocketFactory, toBeCached2);
@@ -184,14 +183,13 @@ public class TestSocketFactory {
         "localhost", 0));
 
     SocksSocketFactory templateWithProxy = new SocksSocketFactory(proxy);
-    assertFalse(templateWithoutProxy.equals(templateWithProxy));
+    assertThat(templateWithoutProxy).isNotEqualTo(templateWithProxy);
 
     Configuration configuration = new Configuration();
     configuration.set("hadoop.socks.server", "localhost:0");
 
     templateWithoutProxy.setConf(configuration);
-    assertTrue(templateWithoutProxy.equals(templateWithProxy));
-
+    assertThat(templateWithoutProxy).isEqualTo(templateWithProxy);
   }
 
   private void checkSocket(Socket socket) throws Exception {
@@ -200,8 +198,7 @@ public class TestSocketFactory {
     DataOutputStream out = new DataOutputStream(socket.getOutputStream());
     out.writeBytes("test\n");
     String answer = input.readLine();
-    assertEquals("TEST", answer);
-
+    assertThat(answer).isEqualTo("TEST");
   }
 
   /**
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
index d4bc06a..11e2a9d 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.ipc;
 
-import static org.junit.Assert.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
+
 import org.junit.Test;
 
 import org.apache.hadoop.conf.Configuration;
@@ -68,47 +69,47 @@ public class TestWeightedRoundRobinMultiplexer {
     // Mux of size 1: 0 0 0 0 0, etc
     mux = new WeightedRoundRobinMultiplexer(1, "", new Configuration());
     for(int i = 0; i < 10; i++) {
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
+      assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
     }
 
     // Mux of size 2: 0 0 1 0 0 1 0 0 1, etc
     mux = new WeightedRoundRobinMultiplexer(2, "", new Configuration());
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
 
     // Size 3: 4x0 2x1 1x2, etc
     mux = new WeightedRoundRobinMultiplexer(3, "", new Configuration());
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 2);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(2);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
 
     // Size 4: 8x0 4x1 2x2 1x3
     mux = new WeightedRoundRobinMultiplexer(4, "", new Configuration());
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 2);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 2);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 3);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(2);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(2);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(3);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
   }
 
   @Test
@@ -119,10 +120,10 @@ public class TestWeightedRoundRobinMultiplexer {
       "1", "1");
 
     mux = new WeightedRoundRobinMultiplexer(2, "test.custom", conf);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-    assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+    assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
 
     // 1x0 3x1 2x2
     conf.setStrings("test.custom." + IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY,
@@ -131,12 +132,12 @@ public class TestWeightedRoundRobinMultiplexer {
     mux = new WeightedRoundRobinMultiplexer(3, "test.custom", conf);
 
     for(int i = 0; i < 5; i++) {
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 0);
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 1);
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 2);
-      assertEquals(mux.getAndAdvanceCurrentIndex(), 2);
+      assertThat(mux.getAndAdvanceCurrentIndex()).isZero();
+      assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+      assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+      assertThat(mux.getAndAdvanceCurrentIndex()).isOne();
+      assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(2);
+      assertThat(mux.getAndAdvanceCurrentIndex()).isEqualTo(2);
     } // Ensure pattern repeats
 
   }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
index 5c5f036..c7d8f60 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.metrics2.util;
 
-import static org.junit.Assert.*;
-
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collections;
@@ -29,6 +27,8 @@ import java.util.Random;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestSampleQuantiles {
 
   static final Quantile[] quantiles = { new Quantile(0.50, 0.050),
@@ -49,24 +49,24 @@ public class TestSampleQuantiles {
   @Test
   public void testCount() throws IOException {
     // Counts start off zero
-    assertEquals(estimator.getCount(), 0);
-    assertEquals(estimator.getSampleCount(), 0);
+    assertThat(estimator.getCount()).isZero();
+    assertThat(estimator.getSampleCount()).isZero();
     
     // Snapshot should be null if there are no entries.
-    assertNull(estimator.snapshot());
+    assertThat(estimator.snapshot()).isNull();
 
     // Count increment correctly by 1
     estimator.insert(1337);
-    assertEquals(estimator.getCount(), 1);
+    assertThat(estimator.getCount()).isOne();
     estimator.snapshot();
-    assertEquals(estimator.getSampleCount(), 1);
+    assertThat(estimator.getSampleCount()).isOne();
     
-    assertEquals(
+    assertThat(estimator.toString()).isEqualTo(
         "50.00 %ile +/- 5.00%: 1337\n" +
         "75.00 %ile +/- 2.50%: 1337\n" +
         "90.00 %ile +/- 1.00%: 1337\n" +
         "95.00 %ile +/- 0.50%: 1337\n" +
-        "99.00 %ile +/- 0.10%: 1337", estimator.toString());
+        "99.00 %ile +/- 0.10%: 1337");
   }
 
   /**
@@ -79,9 +79,9 @@ public class TestSampleQuantiles {
       estimator.insert(i);
     }
     estimator.clear();
-    assertEquals(estimator.getCount(), 0);
-    assertEquals(estimator.getSampleCount(), 0);
-    assertNull(estimator.snapshot());
+    assertThat(estimator.getCount()).isZero();
+    assertThat(estimator.getSampleCount()).isZero();
+    assertThat(estimator.snapshot()).isNull();
   }
 
   /**
@@ -113,8 +113,8 @@ public class TestSampleQuantiles {
         System.out
             .println(String.format("Expected %d with error %d, estimated %d",
                 actual, error, estimate));
-        assertTrue(estimate <= actual + error);
-        assertTrue(estimate >= actual - error);
+        assertThat(estimate <= actual + error).isTrue();
+        assertThat(estimate >= actual - error).isTrue();
       }
     }
   }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
index bba8152..4c471da 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
@@ -42,11 +42,10 @@ import static org.junit.Assert.fail;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.assertj.core.api.Assertions.assertThat;
 
 public class TestGroupsCaching {
   public static final Logger TESTLOG =
@@ -494,7 +493,7 @@ public class TestGroupsCaching {
     // Now get the cache entry - it should return immediately
     // with the old value and the cache will not have completed
     // a request to getGroups yet.
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
     assertEquals(startingRequestCount, FakeGroupMapping.getRequestCount());
 
     // Now sleep for over the delay time and the request count should
@@ -502,7 +501,7 @@ public class TestGroupsCaching {
     Thread.sleep(110);
     assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount());
     // Another call to get groups should give 3 groups instead of 2
-    assertEquals(groups.getGroups("me").size(), 3);
+    assertThat(groups.getGroups("me").size()).isEqualTo(3);
   }
 
   @Test
@@ -532,7 +531,7 @@ public class TestGroupsCaching {
 
     // Now get the cache entry - it should block and return the new
     // 3 group value
-    assertEquals(groups.getGroups("me").size(), 3);
+    assertThat(groups.getGroups("me").size()).isEqualTo(3);
     assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount());
   }
 
@@ -567,7 +566,7 @@ public class TestGroupsCaching {
     // Now get the cache entry - it should return immediately
     // with the old value and the cache will not have completed
     // a request to getGroups yet.
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
     assertEquals(startingRequestCount, FakeGroupMapping.getRequestCount());
     // Resume the getGroups operation and the cache can get refreshed
     FakeGroupMapping.resume();
@@ -577,14 +576,14 @@ public class TestGroupsCaching {
     waitForGroupCounters(groups, 0, 0, 0, 1);
     FakeGroupMapping.setThrowException(false);
     assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount());
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
 
     // Now the 3rd call to getGroups above will have kicked off
     // another refresh that updates the cache, since it no longer gives
     // exception, we now expect the counter for success is 1.
     waitForGroupCounters(groups, 0, 0, 1, 1);
     assertEquals(startingRequestCount + 2, FakeGroupMapping.getRequestCount());
-    assertEquals(groups.getGroups("me").size(), 3);
+    assertThat(groups.getGroups("me").size()).isEqualTo(3);
   }
 
 
@@ -613,7 +612,7 @@ public class TestGroupsCaching {
     // be triggered which will fail to update the key, but the keys old value
     // will be retrievable until it is evicted after about 10 seconds.
     for(int i=0; i<9; i++) {
-      assertEquals(groups.getGroups("me").size(), 2);
+      assertThat(groups.getGroups("me").size()).isEqualTo(2);
       timer.advance(1 * 1000);
     }
     // Wait until the 11th second. The call to getGroups should throw
@@ -631,7 +630,7 @@ public class TestGroupsCaching {
     // Finally check groups are retrieve again after FakeGroupMapping
     // stops throw exceptions
     FakeGroupMapping.setThrowException(false);
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
   }
 
   @Test
@@ -725,14 +724,14 @@ public class TestGroupsCaching {
     FakeGroupMapping.clearBlackList();
 
     // First populate the cash
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
 
     // Advance the timer so a refresh is required
     timer.advance(2 * 1000);
 
     // This call should throw an exception
     FakeGroupMapping.setThrowException(true);
-    assertEquals(groups.getGroups("me").size(), 2);
+    assertThat(groups.getGroups("me").size()).isEqualTo(2);
   }
 
   @Test
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
index d589c3a..e6fdc2b 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java
@@ -35,6 +35,8 @@ import org.junit.Test;
 import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestShellBasedIdMapping {
   
   private static final Map<Integer, Integer> EMPTY_PASS_THROUGH_MAP =
@@ -295,18 +297,19 @@ public class TestShellBasedIdMapping {
   @Test
   public void testUserUpdateSetting() throws IOException {
     ShellBasedIdMapping iug = new ShellBasedIdMapping(new Configuration());
-    assertEquals(iug.getTimeout(),
+    assertThat(iug.getTimeout()).isEqualTo(
         IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT);
 
     Configuration conf = new Configuration();
     conf.setLong(IdMappingConstant.USERGROUPID_UPDATE_MILLIS_KEY, 0);
     iug = new ShellBasedIdMapping(conf);
-    assertEquals(iug.getTimeout(), 
IdMappingConstant.USERGROUPID_UPDATE_MILLIS_MIN);
+    assertThat(iug.getTimeout()).isEqualTo(
+        IdMappingConstant.USERGROUPID_UPDATE_MILLIS_MIN);
 
     conf.setLong(IdMappingConstant.USERGROUPID_UPDATE_MILLIS_KEY,
         IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT * 2);
     iug = new ShellBasedIdMapping(conf);
-    assertEquals(iug.getTimeout(),
+    assertThat(iug.getTimeout()).isEqualTo(
         IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT * 2);
   }
   
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
index 7039001..8e1b82b 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
@@ -17,9 +17,7 @@
  */
 package org.apache.hadoop.security.authorize;
 
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 import java.util.Collection;
@@ -37,6 +35,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.verify;
@@ -187,29 +186,29 @@ public class TestAccessControlList {
     AccessControlList acl;
 
     acl = new AccessControlList("*");
-    assertEquals("All users are allowed", acl.toString());
+    assertThat(acl.toString()).isEqualTo("All users are allowed");
     validateGetAclString(acl);
 
     acl = new AccessControlList(" ");
-    assertEquals("No users are allowed", acl.toString());
+    assertThat(acl.toString()).isEqualTo("No users are allowed");
 
     acl = new AccessControlList("user1,user2");
-    assertEquals("Users [user1, user2] are allowed", acl.toString());
+    assertThat(acl.toString()).isEqualTo("Users [user1, user2] are allowed");
     validateGetAclString(acl);
 
     acl = new AccessControlList("user1,user2 ");// with space
-    assertEquals("Users [user1, user2] are allowed", acl.toString());
+    assertThat(acl.toString()).isEqualTo("Users [user1, user2] are allowed");
     validateGetAclString(acl);
 
     acl = new AccessControlList(" group1,group2");
-    assertTrue(acl.toString().equals(
-        "Members of the groups [group1, group2] are allowed"));
+    assertThat(acl.toString()).isEqualTo(
+        "Members of the groups [group1, group2] are allowed");
     validateGetAclString(acl);
 
     acl = new AccessControlList("user1,user2 group1,group2");
-    assertTrue(acl.toString().equals(
+    assertThat(acl.toString()).isEqualTo(
         "Users [user1, user2] and " +
-        "members of the groups [group1, group2] are allowed"));
+        "members of the groups [group1, group2] are allowed");
     validateGetAclString(acl);
   }
 
@@ -228,45 +227,45 @@ public class TestAccessControlList {
     
     acl = new AccessControlList("drwho tardis");
     users = acl.getUsers();
-    assertEquals(users.size(), 1);
-    assertEquals(users.iterator().next(), "drwho");
+    assertThat(users.size()).isOne();
+    assertThat(users.iterator().next()).isEqualTo("drwho");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 1);
-    assertEquals(groups.iterator().next(), "tardis");
+    assertThat(groups.size()).isOne();
+    assertThat(groups.iterator().next()).isEqualTo("tardis");
     
     acl = new AccessControlList("drwho");
     users = acl.getUsers();
-    assertEquals(users.size(), 1);
-    assertEquals(users.iterator().next(), "drwho");
+    assertThat(users.size()).isOne();
+    assertThat(users.iterator().next()).isEqualTo("drwho");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 0);
+    assertThat(groups.size()).isZero();
     
     acl = new AccessControlList("drwho ");
     users = acl.getUsers();
-    assertEquals(users.size(), 1);
-    assertEquals(users.iterator().next(), "drwho");
+    assertThat(users.size()).isOne();
+    assertThat(users.iterator().next()).isEqualTo("drwho");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 0);
+    assertThat(groups.size()).isZero();
     
     acl = new AccessControlList(" tardis");
     users = acl.getUsers();
-    assertEquals(users.size(), 0);
+    assertThat(users.size()).isZero();
     groups = acl.getGroups();
-    assertEquals(groups.size(), 1);
-    assertEquals(groups.iterator().next(), "tardis");
+    assertThat(groups.size()).isOne();
+    assertThat(groups.iterator().next()).isEqualTo("tardis");
 
     Iterator<String> iter;    
     acl = new AccessControlList("drwho,joe tardis, users");
     users = acl.getUsers();
-    assertEquals(users.size(), 2);
+    assertThat(users.size()).isEqualTo(2);
     iter = users.iterator();
-    assertEquals(iter.next(), "drwho");
-    assertEquals(iter.next(), "joe");
+    assertThat(iter.next()).isEqualTo("drwho");
+    assertThat(iter.next()).isEqualTo("joe");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 2);
+    assertThat(groups.size()).isEqualTo(2);
     iter = groups.iterator();
-    assertEquals(iter.next(), "tardis");
-    assertEquals(iter.next(), "users");
+    assertThat(iter.next()).isEqualTo("tardis");
+    assertThat(iter.next()).isEqualTo("users");
   }
 
   /**
@@ -278,58 +277,58 @@ public class TestAccessControlList {
     Collection<String> users;
     Collection<String> groups;
     acl = new AccessControlList(" ");
-    assertEquals(0, acl.getUsers().size());
-    assertEquals(0, acl.getGroups().size());
-    assertEquals(" ", acl.getAclString());
+    assertThat(acl.getUsers().size()).isZero();
+    assertThat(acl.getGroups().size()).isZero();
+    assertThat(acl.getAclString()).isEqualTo(" ");
     
     acl.addUser("drwho");
     users = acl.getUsers();
-    assertEquals(users.size(), 1);
-    assertEquals(users.iterator().next(), "drwho");
-    assertEquals("drwho ", acl.getAclString());
+    assertThat(users.size()).isOne();
+    assertThat(users.iterator().next()).isEqualTo("drwho");
+    assertThat(acl.getAclString()).isEqualTo("drwho ");
     
     acl.addGroup("tardis");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 1);
-    assertEquals(groups.iterator().next(), "tardis");
-    assertEquals("drwho tardis", acl.getAclString());
+    assertThat(groups.size()).isOne();
+    assertThat(groups.iterator().next()).isEqualTo("tardis");
+    assertThat(acl.getAclString()).isEqualTo("drwho tardis");
     
     acl.addUser("joe");
     acl.addGroup("users");
     users = acl.getUsers();
-    assertEquals(users.size(), 2);
+    assertThat(users.size()).isEqualTo(2);
     Iterator<String> iter = users.iterator();
-    assertEquals(iter.next(), "drwho");
-    assertEquals(iter.next(), "joe");
+    assertThat(iter.next()).isEqualTo("drwho");
+    assertThat(iter.next()).isEqualTo("joe");
     groups = acl.getGroups();
-    assertEquals(groups.size(), 2);
+    assertThat(groups.size()).isEqualTo(2);
     iter = groups.iterator();
-    assertEquals(iter.next(), "tardis");
-    assertEquals(iter.next(), "users");
-    assertEquals("drwho,joe tardis,users", acl.getAclString());
+    assertThat(iter.next()).isEqualTo("tardis");
+    assertThat(iter.next()).isEqualTo("users");
+    assertThat(acl.getAclString()).isEqualTo("drwho,joe tardis,users");
 
     acl.removeUser("joe");
     acl.removeGroup("users");
     users = acl.getUsers();
-    assertEquals(users.size(), 1);
+    assertThat(users.size()).isOne();
     assertFalse(users.contains("joe"));
     groups = acl.getGroups();
-    assertEquals(groups.size(), 1);
+    assertThat(groups.size()).isOne();
     assertFalse(groups.contains("users"));
-    assertEquals("drwho tardis", acl.getAclString());
+    assertThat(acl.getAclString()).isEqualTo("drwho tardis");
     
     acl.removeGroup("tardis");
     groups = acl.getGroups();
-    assertEquals(0, groups.size());
+    assertThat(groups.size()).isZero();
     assertFalse(groups.contains("tardis"));
-    assertEquals("drwho ", acl.getAclString());
+    assertThat(acl.getAclString()).isEqualTo("drwho ");
     
     acl.removeUser("drwho");
-    assertEquals(0, users.size());
+    assertThat(users.size()).isZero();
     assertFalse(users.contains("drwho"));
-    assertEquals(0, acl.getGroups().size());
-    assertEquals(0, acl.getUsers().size());
-    assertEquals(" ", acl.getAclString());
+    assertThat(acl.getGroups().size()).isZero();
+    assertThat(acl.getUsers().size()).isZero();
+    assertThat(acl.getAclString()).isEqualTo(" ");
   }
   
   /**
@@ -345,8 +344,8 @@ public class TestAccessControlList {
     } catch (Throwable t) {
       th = t;
     }
-    assertNotNull(th);
-    assertTrue(th instanceof IllegalArgumentException);
+    assertThat(th).isNotNull();
+    assertThat(th).isInstanceOf(IllegalArgumentException.class);
     
     th = null;
     try {
@@ -354,24 +353,24 @@ public class TestAccessControlList {
     } catch (Throwable t) {
       th = t;
     }
-    assertNotNull(th);
-    assertTrue(th instanceof IllegalArgumentException);
+    assertThat(th).isNotNull();
+    assertThat(th).isInstanceOf(IllegalArgumentException.class);
     th = null;
     try {
     acl.removeUser(" * ");
     } catch (Throwable t) {
       th = t;
     }
-    assertNotNull(th);
-    assertTrue(th instanceof IllegalArgumentException);
+    assertThat(th).isNotNull();
+    assertThat(th).isInstanceOf(IllegalArgumentException.class);
     th = null;
     try {
     acl.removeGroup(" * ");
     } catch (Throwable t) {
       th = t;
     }
-    assertNotNull(th);
-    assertTrue(th instanceof IllegalArgumentException);
+    assertThat(th).isNotNull();
+    assertThat(th).isInstanceOf(IllegalArgumentException.class);
   }
   
   /**
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/http/TestXFrameOptionsFilter.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/http/TestXFrameOptionsFilter.java
index 1c1d97d..0f9f691 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/http/TestXFrameOptionsFilter.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/http/TestXFrameOptionsFilter.java
@@ -31,6 +31,7 @@ import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.ArgumentMatchers.any;
 
 /**
@@ -84,8 +85,9 @@ public class TestXFrameOptionsFilter {
 
     filter.doFilter(request, response, chain);
 
-    Assert.assertEquals("X-Frame-Options count not equal to 1.",
-        headers.size(), 1);
+    assertThat(headers.size())
+        .withFailMessage("X-Frame-Options count not equal to 1.")
+        .isOne();
   }
 
   @Test
@@ -138,10 +140,12 @@ public class TestXFrameOptionsFilter {
 
     filter.doFilter(request, response, chain);
 
-    Assert.assertEquals("X-Frame-Options count not equal to 1.",
-        headers.size(), 1);
+    assertThat(headers.size())
+        .withFailMessage("X-Frame-Options count not equal to 1.")
+        .isOne();
 
-    Assert.assertEquals("X-Frame-Options count not equal to 1.",
-        headers.toArray()[0], "SAMEORIGIN");
+    assertThat(headers.toArray()[0])
+        .withFailMessage("X-Frame-Options count not equal to 1.")
+        .isEqualTo("SAMEORIGIN");
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
index df685cf..8bc881a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
@@ -50,6 +50,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.*;
 
 public class TestDelegationToken {
@@ -266,17 +267,17 @@ public class TestDelegationToken {
             3*1000, 1*1000, 3600000);
     try {
       dtSecretManager.startThreads();
-      Assert.assertEquals(dtSecretManager.getCurrentTokensSize(), 0);
+      assertThat(dtSecretManager.getCurrentTokensSize()).isZero();
       final Token<TestDelegationTokenIdentifier> token1 =
           generateDelegationToken(dtSecretManager, "SomeUser", "JobTracker");
-      Assert.assertEquals(dtSecretManager.getCurrentTokensSize(), 1);
+      assertThat(dtSecretManager.getCurrentTokensSize()).isOne();
       final Token<TestDelegationTokenIdentifier> token2 =
           generateDelegationToken(dtSecretManager, "SomeUser", "JobTracker");
-      Assert.assertEquals(dtSecretManager.getCurrentTokensSize(), 2);
+      assertThat(dtSecretManager.getCurrentTokensSize()).isEqualTo(2);
       dtSecretManager.cancelToken(token1, "JobTracker");
-      Assert.assertEquals(dtSecretManager.getCurrentTokensSize(), 1);
+      assertThat(dtSecretManager.getCurrentTokensSize()).isOne();
       dtSecretManager.cancelToken(token2, "JobTracker");
-      Assert.assertEquals(dtSecretManager.getCurrentTokensSize(), 0);
+      assertThat(dtSecretManager.getCurrentTokensSize()).isZero();
     } finally {
       dtSecretManager.stopThreads();
     }
@@ -386,7 +387,7 @@ public class TestDelegationToken {
 
       //after rolling, the length of the keys list must increase
       int currNumKeys = dtSecretManager.getAllKeys().length;
-      Assert.assertEquals((currNumKeys - prevNumKeys) >= 1, true);
+      assertThat(currNumKeys - prevNumKeys).isGreaterThanOrEqualTo(1);
       
       //after rolling, the token that was generated earlier must
       //still be valid (retrievePassword will fail if the token
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
index 19f213d..c043d1e 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java
@@ -27,7 +27,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.*;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Testing {@link LightWeightResizableGSet} */
 public class TestLightWeightResizableGSet {
@@ -132,23 +132,23 @@ public class TestLightWeightResizableGSet {
     final LightWeightResizableGSet<TestKey, TestElement> set =
         new LightWeightResizableGSet<TestKey, TestElement>();
 
-    assertEquals(set.size(), 0);
+    assertThat(set.size()).isZero();
 
     // put all elements
     for (int i = 0; i < elements.length; i++) {
       TestElement element = set.put(elements[i]);
-      assertTrue(element == null);
+      assertThat(element).isNull();
     }
 
     // check the set size
-    assertEquals(set.size(), elements.length);
+    assertThat(set.size()).isEqualTo(elements.length);
 
     // check all elements exist in the set and the data is correct
     for (int i = 0; i < elements.length; i++) {
-      assertTrue(set.contains(elements[i]));
+      assertThat(set.contains(elements[i])).isTrue();
 
       TestElement element = set.get(elements[i]);
-      assertEquals(elements[i].getData(), element.getData());
+      assertThat(elements[i].getData()).isEqualTo(element.getData());
     }
 
     TestKey[] keys = getKeys(elements);
@@ -157,39 +157,38 @@ public class TestLightWeightResizableGSet {
     // update the set
     for (int i = 0; i < newElements.length; i++) {
       TestElement element = set.put(newElements[i]);
-      assertTrue(element != null);
+      assertThat(element).isNotNull();
     }
 
     // check the set size
-    assertEquals(set.size(), elements.length);
+    assertThat(set.size()).isEqualTo(elements.length);
 
     // check all elements exist in the set and the data is updated to new value
     for (int i = 0; i < keys.length; i++) {
-      assertTrue(set.contains(keys[i]));
+      assertThat(set.contains(keys[i])).isTrue();
 
       TestElement element = set.get(keys[i]);
-      assertEquals(newElements[i].getData(), element.getData());
+      assertThat(newElements[i].getData()).isEqualTo(element.getData());
     }
 
     // test LightWeightHashGSet#values
     Collection<TestElement> cElements = set.values();
-    assertEquals(cElements.size(), elements.length);
+    assertThat(cElements.size()).isEqualTo(elements.length);
     for (TestElement element : cElements) {
-      assertTrue(set.contains(element));
+      assertThat(set.contains(element)).isTrue();
     }
 
     // remove elements
     for (int i = 0; i < keys.length; i++) {
       TestElement element = set.remove(keys[i]);
-
-      assertTrue(element != null);
+      assertThat(element).isNotNull();
 
       // the element should not exist after remove
-      assertFalse(set.contains(keys[i]));
+      assertThat(set.contains(keys[i])).isFalse();
     }
 
     // check the set size
-    assertEquals(set.size(), 0);
+    assertThat(set.size()).isZero();
   }
 
   @Test(timeout = 60000)
@@ -198,33 +197,33 @@ public class TestLightWeightResizableGSet {
     final LightWeightResizableGSet<TestKey, TestElement> set =
         new LightWeightResizableGSet<TestKey, TestElement>();
 
-    assertEquals(set.size(), 0);
+    assertThat(set.size()).isZero();
 
     // put all elements
     for (int i = 0; i < elements.length; i++) {
       TestElement element = set.put(elements[i]);
-      assertTrue(element == null);
+      assertThat(element).isNull();
     }
 
     // check the set size
-    assertEquals(set.size(), elements.length);
+    assertThat(set.size()).isEqualTo(elements.length);
 
     // remove all through clear
     {
       set.clear();
-      assertEquals(set.size(), 0);
+      assertThat(set.size()).isZero();
 
       // check all elements removed
       for (int i = 0; i < elements.length; i++) {
-        assertFalse(set.contains(elements[i]));
+        assertThat(set.contains(elements[i])).isFalse();
       }
-      assertFalse(set.iterator().hasNext());
+      assertThat(set.iterator().hasNext()).isFalse();
     }
 
     // put all elements back
     for (int i = 0; i < elements.length; i++) {
       TestElement element = set.put(elements[i]);
-      assertTrue(element == null);
+      assertThat(element).isNull();
     }
 
     // remove all through iterator
@@ -232,22 +231,22 @@ public class TestLightWeightResizableGSet {
       for (Iterator<TestElement> iter = set.iterator(); iter.hasNext(); ) {
         TestElement element = iter.next();
         // element should be there before removing
-        assertTrue(set.contains(element));
+        assertThat(set.contains(element)).isTrue();
         iter.remove();
         // element should not be there now
-        assertFalse(set.contains(element));
+        assertThat(set.contains(element)).isFalse();
       }
 
       // the deleted elements should not be there
       for (int i = 0; i < elements.length; i++) {
-        assertFalse(set.contains(elements[i]));
+        assertThat(set.contains(elements[i])).isFalse();
       }
 
       // iterator should not have next
-      assertFalse(set.iterator().hasNext());
+      assertThat(set.iterator().hasNext()).isFalse();
 
       // check the set size
-      assertEquals(set.size(), 0);
+      assertThat(set.size()).isZero();
     }
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java
index 11b254f..9604718 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java
@@ -26,13 +26,14 @@ import org.apache.curator.retry.RetryOneTime;
 import org.apache.curator.test.Timing;
 import org.apache.zookeeper.data.Stat;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
 import java.net.BindException;
 import java.util.Random;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 /**
  * This is a copy of Curator 2.7.1's TestChildReaper class, with minor
  * modifications to make it work with JUnit (some setup code taken from
@@ -90,7 +91,7 @@ public class TestChildReaper
       timing.forWaiting().sleepABit();
 
       Stat    stat = client.checkExists().forPath("/test");
-      Assert.assertEquals(stat.getNumChildren(), nonEmptyNodes);
+      assertThat(stat.getNumChildren()).isEqualTo(nonEmptyNodes);
     }
     finally
     {
@@ -120,7 +121,7 @@ public class TestChildReaper
       timing.forWaiting().sleepABit();
 
       Stat    stat = client.checkExists().forPath("/test");
-      Assert.assertEquals(stat.getNumChildren(), 0);
+      assertThat(stat.getNumChildren()).isZero();
     }
     finally
     {
@@ -153,11 +154,11 @@ public class TestChildReaper
       timing.forWaiting().sleepABit();
 
       Stat    stat = client.checkExists().forPath("/test1");
-      Assert.assertEquals(stat.getNumChildren(), 0);
+      assertThat(stat.getNumChildren()).isZero();
       stat = client.checkExists().forPath("/test2");
-      Assert.assertEquals(stat.getNumChildren(), 0);
+      assertThat(stat.getNumChildren()).isZero();
       stat = client.checkExists().forPath("/test3");
-      Assert.assertEquals(stat.getNumChildren(), 10);
+      assertThat(stat.getNumChildren()).isEqualTo(10);
     }
     finally
     {
@@ -193,11 +194,11 @@ public class TestChildReaper
       timing.forWaiting().sleepABit();
 
       Stat    stat = client.checkExists().forPath("/test");
-      Assert.assertEquals(stat.getNumChildren(), 0);
+      assertThat(stat.getNumChildren()).isZero();
 
       stat = client.usingNamespace(null).checkExists().forPath("/foo/test");
-      Assert.assertNotNull(stat);
-      Assert.assertEquals(stat.getNumChildren(), 0);
+      assertThat(stat).isNotNull();
+      assertThat(stat.getNumChildren()).isZero();
     }
     finally
     {
diff --git a/hadoop-common-project/hadoop-nfs/pom.xml 
b/hadoop-common-project/hadoop-nfs/pom.xml
index f3ef25a..e0fedaf 100644
--- a/hadoop-common-project/hadoop-nfs/pom.xml
+++ b/hadoop-common-project/hadoop-nfs/pom.xml
@@ -97,6 +97,11 @@
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.assertj</groupId>
+      <artifactId>assertj-core</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
diff --git 
a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java
 
b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java
index 5391664..70875c2 100644
--- 
a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java
+++ 
b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java
@@ -17,24 +17,23 @@
  */
 package org.apache.hadoop.nfs.nfs3;
 
-import org.junit.Assert;
-
-import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 import org.junit.Test;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 public class TestFileHandle {
   @Test
   public void testConstructor() {
     FileHandle handle = new FileHandle(1024);
     XDR xdr = new XDR();
     handle.serialize(xdr);
-    Assert.assertEquals(handle.getFileId(), 1024);
+    assertThat(handle.getFileId()).isEqualTo(1024);
 
     // Deserialize it back 
     FileHandle handle2 = new FileHandle();
     handle2.deserialize(xdr.asReadOnlyWrap());
-    Assert.assertEquals("Failed: Assert 1024 is id ", 1024, 
-            handle.getFileId());
+    assertThat(handle.getFileId())
+        .withFailMessage("Failed: Assert 1024 is id ").isEqualTo(1024);
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to