http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java index 14253fe..2adb818 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java @@ -170,9 +170,15 @@ public class TestFastFail { */ public Boolean call() throws Exception { try (Table table = connection.getTable(TableName.valueOf(tableName))) { - Thread.sleep(Math.abs(random.nextInt()) % 250); // Add some jitter here - byte[] row = longToByteArrayKey(Math.abs(random.nextLong()) - % numRows); + // Add some jitter here + int sleep = random.nextInt(250); + Thread.sleep(sleep); + long key = random.nextLong(); + if (key < 0) { + key = -key; + } + key %= numRows; + byte[] row = longToByteArrayKey(key); Get g = new Get(row); g.addColumn(FAMILY, QUALIFIER); try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java index 6622ae9..a5e3a65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java @@ -885,9 +885,9 @@ public class TestHCM { Thread t = new Thread() { @Override public void run() { - synchronized (syncBlockingFilter) { + synchronized (lock) { try { - syncBlockingFilter.wait(); + lock.wait(); } catch (InterruptedException e) { throw new RuntimeException(e); } @@ -914,6 +914,7 @@ public class TestHCM { table.close(); } + protected static final Object lock = new Object(); protected static final AtomicBoolean syncBlockingFilter = new AtomicBoolean(false); public static class BlockingFilter extends FilterBase { @@ -921,8 +922,8 @@ public class TestHCM { public boolean filterRowKey(byte[] buffer, int offset, int length) throws IOException { int i = 0; while (i++ < 1000 && !syncBlockingFilter.get()) { - synchronized (syncBlockingFilter) { - syncBlockingFilter.notifyAll(); + synchronized (lock) { + lock.notifyAll(); } Threads.sleep(100); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java index 1d5605e..2edabc5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java @@ -152,7 +152,7 @@ public class TestHTableMultiplexerFlushCache { // Find a new server to move that region to for (int i = 0; i < SLAVES; i++) { HRegionServer rs = hbaseCluster.getRegionServer(0); - if (!rs.getServerName().equals(originalServer.getServerName())) { + if (!rs.getServerName().equals(originalServer)) { newServer = rs.getServerName(); break; } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index d18f560..4a80a26 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -713,12 +713,15 @@ public class TestMultiParallel { int retryNum = 10; Result[] results = null; do { + boolean finished = false; results = table.get(gets); - boolean finished = true; - for (Result result : results) { - if (result.isEmpty()) { - finished = false; - break; + if (results != null) { + finished = true; + for (Result result : results) { + if (result.isEmpty()) { + finished = false; + break; + } } } if (finished) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java index 41b5a9c..b68bd95 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; import java.io.IOException; @@ -93,6 +94,7 @@ public class TestMvccConsistentScanner { break; } } + assertNotNull(rs); UTIL.getHBaseAdmin().move(region.getEncodedNameAsBytes(), Bytes.toBytes(rs.getServerName().getServerName())); while (UTIL.getRSForFirstRegionInTable(tableName) != rs) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java index 2988357..bbb9e75 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java @@ -200,8 +200,7 @@ public class TestAggregateProtocol { try { rowCount = aClient.rowCount(TEST_TABLE, ci, scan); } catch (Throwable e) { - myLog.error("Exception thrown in the invalidRange method" - + e.getStackTrace()); + myLog.error("Exception thrown in the invalidRange method", e); } assertEquals(-1, rowCount); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java index 54544dd..3385d0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java @@ -102,6 +102,9 @@ public class TestMasterCoprocessorExceptionWithAbort { private boolean postStartMasterCalled; @Override + @SuppressWarnings("null") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> env, HTableDescriptor desc, HRegionInfo[] regions) throws IOException { // cause a NullPointerException and don't catch it: this will cause the http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java index 08d1131..149e7f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java @@ -77,8 +77,10 @@ public class TestMasterCoprocessorExceptionWithRemove { private boolean startCalled; private boolean postStartMasterCalled; - @SuppressWarnings("null") @Override + @SuppressWarnings("null") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> env, HTableDescriptor desc, HRegionInfo[] regions) throws IOException { // Cause a NullPointerException and don't catch it: this should cause the http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java index 0b7c946..53c0029 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java @@ -23,8 +23,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -51,8 +49,7 @@ import org.junit.experimental.categories.Category; */ @Category(MediumTests.class) public class TestRegionServerCoprocessorExceptionWithAbort { - private static final Log LOG = LogFactory.getLog( - TestRegionServerCoprocessorExceptionWithAbort.class); + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("observed_table"); @@ -138,8 +135,10 @@ public class TestRegionServerCoprocessorExceptionWithAbort { } public static class FailedInitializationObserver extends SimpleRegionObserver { - @SuppressWarnings("null") @Override + @SuppressWarnings("null") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void start(CoprocessorEnvironment e) throws IOException { // Trigger a NPE to fail the coprocessor Integer i = null; @@ -148,8 +147,10 @@ public class TestRegionServerCoprocessorExceptionWithAbort { } public static class BuggyRegionObserver extends SimpleRegionObserver { - @SuppressWarnings("null") @Override + @SuppressWarnings("null") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, final Put put, final WALEdit edit, final Durability durability) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java index 69e3d80..22b3630 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java @@ -50,8 +50,10 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestRegionServerCoprocessorExceptionWithRemove { public static class BuggyRegionObserver extends SimpleRegionObserver { - @SuppressWarnings("null") @Override + @SuppressWarnings("null") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, final Put put, final WALEdit edit, final Durability durability) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java index 25496ed..b9724f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java @@ -39,6 +39,8 @@ import org.junit.experimental.categories.Category; import static org.mockito.Mockito.*; @Category(SmallTests.class) +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class TestExecutorService { private static final Log LOG = LogFactory.getLog(TestExecutorService.class); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java index a64c1e8..0a627d9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java @@ -242,12 +242,15 @@ public class HttpServerFunctionalTest extends Assert { */ protected static void deleteRecursively(File d) { if (d.isDirectory()) { - for (String name : d.list()) { - File child = new File(d, name); - if (child.isFile()) { - child.delete(); - } else { - deleteRecursively(child); + String[] dirs = d.list(); + if (dirs != null) { + for (String name: dirs) { + File child = new File(d, name); + if (child.isFile()) { + child.delete(); + } else { + deleteRecursively(child); + } } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java index 1736faa..da7f1aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java @@ -271,7 +271,7 @@ public class TestPrefixTreeEncoding { int batchId, boolean partial, boolean useTags, PrefixTreeCodec encoder, HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception { for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) { - if (partial && i / 10 % 2 == 1) + if (partial && i / 10 % 2 != 0) continue; for (int j = 0; j < NUM_COLS_PER_ROW; ++j) { if (!useTags) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 00ada5e..c677ccf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -347,7 +347,7 @@ public class TestHFileBlockIndex { while (msgPrefix.length() + padding.length() < 70) padding.append(' '); msgPrefix += padding; - if (i % 2 == 1) { + if (i % 2 != 0) { dos.writeInt(curAllEntriesSize); secondaryIndexEntries[i] = curAllEntriesSize; LOG.info(msgPrefix + "secondary index entry #" + ((i - 1) / 2) + @@ -409,7 +409,7 @@ public class TestHFileBlockIndex { int expectedResult; int referenceItem; - if (i % 2 == 1) { + if (i % 2 != 0) { // This key is in the array we search as the element (i - 1) / 2. Make // sure we find it. expectedResult = (i - 1) / 2; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java index 49b3fe3..014a796 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java @@ -20,14 +20,12 @@ package org.apache.hadoop.hbase.io.hfile.bucket; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Random; @@ -92,7 +90,6 @@ public class TestBucketCache { final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS; final int writerQLen = BucketCache.DEFAULT_WRITER_QUEUE_ITEMS; String ioEngineName = "heap"; - String persistencePath = null; private class MockedBucketCache extends BucketCache { @@ -126,7 +123,7 @@ public class TestBucketCache { public void setup() throws FileNotFoundException, IOException { cache = new MockedBucketCache(ioEngineName, capacitySize, constructedBlockSize, - constructedBlockSizes, writeThreads, writerQLen, persistencePath); + constructedBlockSizes, writeThreads, writerQLen, null); } @After @@ -288,7 +285,7 @@ public class TestBucketCache { conf.setFloat(BucketCache.MEMORY_FACTOR_CONFIG_NAME, 0.2f); BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, - constructedBlockSizes, writeThreads, writerQLen, persistencePath, 100, conf); + constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); validateGetPartitionSize(cache, 0.1f, 0.5f); validateGetPartitionSize(cache, 0.7f, 0.5f); @@ -306,7 +303,7 @@ public class TestBucketCache { conf.setFloat(BucketCache.MEMORY_FACTOR_CONFIG_NAME, 0.2f); BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, - constructedBlockSizes, writeThreads, writerQLen, persistencePath, 100, conf); + constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); assertEquals(BucketCache.ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getAcceptableFactor(), 0.9f, 0); assertEquals(BucketCache.MIN_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMinFactor(), 0.5f, 0); @@ -366,8 +363,8 @@ public class TestBucketCache { for (String configName : configNames) { conf.setFloat(configName, configMap.get(configName)[i]); } - BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, - constructedBlockSizes, writeThreads, writerQLen, persistencePath, 100, conf); + new BucketCache(ioEngineName, capacitySize, constructedBlockSize, + constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); assertTrue("Created BucketCache and expected it to succeed: " + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]); } catch (IllegalArgumentException e) { assertFalse("Created BucketCache and expected it to succeed: " + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java index 3bb188d..dedc568 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java @@ -43,6 +43,7 @@ import java.util.List; import static org.junit.Assert.assertTrue; @Category({ LargeTests.class }) +@edu.umd.cs.findbugs.annotations.SuppressWarnings("NM_SAME_SIMPLE_NAME_AS_SUPERCLASS") public class TestMultiTableSnapshotInputFormat extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java index b4b8056..5c93c0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java @@ -38,7 +38,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; @@ -120,13 +119,41 @@ public class TestMultiTableSnapshotInputFormatImpl { } @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((startRow == null) ? 0 : startRow.hashCode()); + result = prime * result + ((stopRow == null) ? 0 : stopRow.hashCode()); + return result; + } + + @Override public boolean equals(Object obj) { - if (!(obj instanceof ScanWithEquals)) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ScanWithEquals other = (ScanWithEquals) obj; + if (startRow == null) { + if (other.startRow != null) { + return false; + } + } else if (!startRow.equals(other.startRow)) { + return false; + } + if (stopRow == null) { + if (other.stopRow != null) { + return false; + } + } else if (!stopRow.equals(other.stopRow)) { return false; } - ScanWithEquals otherScan = (ScanWithEquals) obj; - return Objects.equals(this.startRow, otherScan.startRow) && Objects - .equals(this.stopRow, otherScan.stopRow); + return true; } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index 1cd2432..40bab4b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -156,7 +156,7 @@ public class TestMultithreadedTableMapper { Bytes.toString(table.getTableName()), IdentityTableReducer.class, job); FileOutputFormat.setOutputPath(job, new Path("test")); - LOG.info("Started " + table.getTableName()); + LOG.info("Started " + table.getName().getNameAsString()); assertTrue(job.waitForCompletion(true)); LOG.info("After map/reduce completion"); // verify map-reduce results http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index a6c1214..ae52e86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -87,41 +87,45 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa @Test public void testGetBestLocations() throws IOException { - TableSnapshotInputFormatImpl tsif = new TableSnapshotInputFormatImpl(); Configuration conf = UTIL.getConfiguration(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); - Assert.assertEquals(Lists.newArrayList(), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList(), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution = new HDFSBlocksDistribution(); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2); Assert.assertEquals(Lists.newArrayList("h1", "h2"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3); Assert.assertEquals(Lists.newArrayList("h2", "h1"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9); Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); } public static enum TestTableSnapshotCounters { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java index 34b9570..06fede7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java @@ -185,7 +185,6 @@ public class TestWALRecordReader { final WALFactory walfactory = new WALFactory(conf, null, getName()); WAL log = walfactory.getWAL(info.getEncodedNameAsBytes(), info.getTable().getNamespace()); byte [] value = Bytes.toBytes("value"); - final AtomicLong sequenceId = new AtomicLong(0); WALEdit edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), System.currentTimeMillis(), value)); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java index d2bed9b..34cf19f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java @@ -178,8 +178,7 @@ public class TestCatalogJanitorInMemoryStates { } Threads.sleep(100); } - - if (pair.getFirst() == null || pair.getSecond() == null) { + if (pair == null || pair.getFirst() == null || pair.getSecond() == null) { throw new IOException("Failed to get daughters, for parent region: " + r); } return pair; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java index a809996..0f712e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java @@ -27,6 +27,7 @@ import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_err; import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_resigned; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -699,6 +700,7 @@ public class TestDistributedLogSplitting { slm.markRegionsRecovering(firstFailedServer, regionSet); slm.markRegionsRecovering(secondFailedServer, regionSet); + assertNotNull(zkw.recoveringRegionsZNode); List<String> recoveringRegions = ZKUtil.listChildrenNoWatch(zkw, ZKUtil.joinZNode(zkw.recoveringRegionsZNode, region.getEncodedName())); @@ -943,6 +945,9 @@ public class TestDistributedLogSplitting { break; } + assertNotNull(region); + assertNotNull(dstRS); + slm.markRegionsRecovering(hrs.getServerName(), regionSet); // move region in order for the region opened in recovering state final HRegionInfo hri = region; @@ -1239,7 +1244,6 @@ public class TestDistributedLogSplitting { conf.setLong("hbase.regionserver.hlog.blocksize", 15 * 1024); conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, true); startCluster(NUM_RS); - final AtomicLong sequenceId = new AtomicLong(100); final int NUM_REGIONS_TO_CREATE = 40; final int NUM_LOG_LINES = 1000; // turn off load balancing to prevent regions from moving around otherwise @@ -1336,7 +1340,6 @@ public class TestDistributedLogSplitting { conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 30 * 1024); conf.setInt("hbase.hstore.compactionThreshold", 3); startCluster(NUM_RS); - final AtomicLong sequenceId = new AtomicLong(100); final int NUM_REGIONS_TO_CREATE = 40; final int NUM_LOG_LINES = 2000; // turn off load balancing to prevent regions from moving around otherwise @@ -1563,8 +1566,6 @@ public class TestDistributedLogSplitting { // remove root and meta region regions.remove(HRegionInfo.FIRST_META_REGIONINFO); // using one sequenceId for edits across all regions is ok. - final AtomicLong sequenceId = new AtomicLong(10); - for(Iterator<HRegionInfo> iter = regions.iterator(); iter.hasNext(); ) { HRegionInfo regionInfo = iter.next(); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java index 292ec72..2228188 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java @@ -971,6 +971,9 @@ public class TestMasterFailover { } } + assertNotNull(metaRegion); + assertNotNull(metaRegionServer); + TEST_UTIL.shutdownMiniHBaseCluster(); // Create a ZKW to use in the test http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java index a065872..d732c13 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java @@ -421,8 +421,7 @@ public class TestRegionPlacement { for (Region region: rs.getOnlineRegions(TableName.valueOf("testRegionAssignment"))) { InetSocketAddress[] favoredSocketAddress = rs.getFavoredNodesForRegion( region.getRegionInfo().getEncodedName()); - String regionName = region.getRegionInfo().getRegionNameAsString(); - List<ServerName> favoredServerList = plan.getAssignmentMap().get(regionName); + List<ServerName> favoredServerList = plan.getAssignmentMap().get(region.getRegionInfo()); // All regions are supposed to have favored nodes, // except for hbase:meta and ROOT @@ -474,8 +473,8 @@ public class TestRegionPlacement { try { @SuppressWarnings("deprecation") HRegionInfo info = MetaScanner.getHRegionInfo(result); - if(info.getTable().getNamespaceAsString() - .equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR)) { + if(info != null && NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR + .equals(info.getTable().getNamespaceAsString())) { return true; } byte[] server = result.getValue(HConstants.CATALOG_FAMILY, http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java index 355fc9a..7799a13 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java @@ -53,7 +53,7 @@ public class TestDefaultLoadBalancer extends BalancerTestBase { } // int[testnum][servernumber] -> numregions - int[][] clusterStateMocks = new int[][] { + int[][] ourClusterStateMocks = new int[][] { // 1 node new int[] { 0 }, new int[] { 1 }, @@ -113,7 +113,7 @@ public class TestDefaultLoadBalancer extends BalancerTestBase { @Test (timeout=60000) public void testBalanceCluster() throws Exception { - for (int[] mockCluster : clusterStateMocks) { + for (int[] mockCluster : ourClusterStateMocks) { Map<ServerName, List<HRegionInfo>> servers = mockClusterServers(mockCluster); List<ServerAndLoad> list = convertToList(servers); LOG.info("Mock Cluster : " + printMock(list) + " " + printStats(list)); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java index 4997049..c63dfe5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.SortedMap; import java.util.TreeMap; @@ -345,13 +346,13 @@ public class TestFavoredNodeAssignmentHelper { regionMap.put(regionsOnRack3, 3); assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1, regionsOnRack2, regionsOnRack3), - rackMap.get(firstRackSize) == regionMap.get(regionsOnRack1)); + Objects.equals(rackMap.get(firstRackSize), regionMap.get(regionsOnRack1))); assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1, regionsOnRack2, regionsOnRack3), - rackMap.get(secondRackSize) == regionMap.get(regionsOnRack2)); + Objects.equals(rackMap.get(secondRackSize), regionMap.get(regionsOnRack2))); assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1, regionsOnRack2, regionsOnRack3), - rackMap.get(thirdRackSize) == regionMap.get(regionsOnRack3)); + Objects.equals(rackMap.get(thirdRackSize), regionMap.get(regionsOnRack3))); } private String printProportions(int firstRackSize, int secondRackSize, http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure2.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure2.java index 3a8795a..f27150e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure2.java @@ -58,7 +58,7 @@ public class TestCreateTableProcedure2 { builder.setState(ZooKeeperProtos.Table.State.ENABLED); byte [] data = ProtobufUtil.prependPBMagic(builder.build().toByteArray()); ZKUtil.createSetData(zkw, znode, data); - LOG.info("Create an orphaned Znode " + znode + " with data " + data); + LOG.info("Create an orphaned Znode " + znode); // Step 3: link the zk cluster to hbase cluster TEST_UTIL.setZkCluster(zkCluster); // Step 4: start hbase cluster and expect master to start successfully. http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index e067122..81fd6a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -56,7 +57,6 @@ public class TestSnapshotFileCache { private static final Log LOG = LogFactory.getLog(TestSnapshotFileCache.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static long sequenceId = 0; private static FileSystem fs; private static Path rootDir; @@ -80,6 +80,7 @@ public class TestSnapshotFileCache { } @Test(timeout = 10000000) + @Ignore("See HBASE-19275") public void testLoadAndDelete() throws IOException { // don't refresh the cache unless we tell it to long period = Long.MAX_VALUE; @@ -94,6 +95,7 @@ public class TestSnapshotFileCache { } @Test + @Ignore("See HBASE-19275") public void testReloadModifiedDirectory() throws IOException { // don't refresh the cache unless we tell it to long period = Long.MAX_VALUE; @@ -150,9 +152,6 @@ public class TestSnapshotFileCache { SnapshotMock.SnapshotBuilder complete = createAndTestSnapshotV1(cache, "snapshot", false, false); - SnapshotMock.SnapshotBuilder inProgress = - createAndTestSnapshotV1(cache, "snapshotInProgress", true, false); - int countBeforeCheck = count.get(); FSUtils.logFileSystemState(fs, rootDir, LOG); @@ -224,12 +223,11 @@ public class TestSnapshotFileCache { List<Path> files = new ArrayList<Path>(); for (int i = 0; i < 3; ++i) { for (Path filePath: builder.addRegion()) { - String fileName = filePath.getName(); if (tmp) { // We should be able to find all the files while the snapshot creation is in-progress FSUtils.logFileSystemState(fs, rootDir, LOG); - Iterable<FileStatus> nonSnapshot = getNonSnapshotFiles(cache, filePath); - assertFalse("Cache didn't find " + fileName, Iterables.contains(nonSnapshot, fileName)); + assertFalse("Cache didn't find " + filePath, + contains(getNonSnapshotFiles(cache, filePath), filePath)); } files.add(filePath); } @@ -242,9 +240,7 @@ public class TestSnapshotFileCache { // Make sure that all files are still present for (Path path: files) { - Iterable<FileStatus> nonSnapshotFiles = getNonSnapshotFiles(cache, path); - assertFalse("Cache didn't find " + path.getName(), - Iterables.contains(nonSnapshotFiles, path.getName())); + assertFalse("Cache didn't find " + path, contains(getNonSnapshotFiles(cache, path), path)); } FSUtils.logFileSystemState(fs, rootDir, LOG); @@ -255,23 +251,31 @@ public class TestSnapshotFileCache { // The files should be in cache until next refresh for (Path filePath: files) { - Iterable<FileStatus> nonSnapshotFiles = getNonSnapshotFiles(cache, filePath); - assertFalse("Cache didn't find " + filePath.getName(), Iterables.contains(nonSnapshotFiles, - filePath.getName())); + assertFalse("Cache didn't find " + filePath, + contains(getNonSnapshotFiles(cache, filePath), filePath)); } // then trigger a refresh cache.triggerCacheRefreshForTesting(); + // and not it shouldn't find those files for (Path filePath: files) { - Iterable<FileStatus> nonSnapshotFiles = getNonSnapshotFiles(cache, filePath); - assertTrue("Cache found '" + filePath.getName() + "', but it shouldn't have.", - !Iterables.contains(nonSnapshotFiles, filePath.getName())); + assertFalse("Cache found '" + filePath + "', but it shouldn't have.", + contains(getNonSnapshotFiles(cache, filePath), filePath)); + } + } + } + + private static boolean contains(Iterable<FileStatus> files, Path filePath) { + for (FileStatus status: files) { + if (filePath.equals(status.getPath())) { + return true; } } + return false; } - private Iterable<FileStatus> getNonSnapshotFiles(SnapshotFileCache cache, Path storeFile) + private static Iterable<FileStatus> getNonSnapshotFiles(SnapshotFileCache cache, Path storeFile) throws IOException { return cache.getUnreferencedFiles( Arrays.asList(FSUtils.listStatus(fs, storeFile.getParent())) http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java index 62afaa9..e988826 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java @@ -110,7 +110,7 @@ public class SimpleMasterProcedureManager extends MasterProcedureManager { monitor.receive(e); } // return the first value for testing - return returnData.values().iterator().next(); + return returnData != null ? returnData.values().iterator().next() : null; } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java index 933190c..6132c87 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java @@ -123,6 +123,7 @@ public class DataBlockEncodingTool { private long totalValueLength = 0; private long totalKeyRedundancyLength = 0; private long totalCFLength = 0; + private long totalTagsLength = 0; private byte[] rawKVs; private boolean useHBaseChecksum = false; @@ -201,15 +202,17 @@ public class DataBlockEncodingTool { int vLen = currentKV.getValueLength(); int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset()); int restLen = currentKV.getLength() - kLen - vLen; + int tagsLen = currentKV.getTagsLength(); totalKeyLength += kLen; totalValueLength += vLen; totalPrefixLength += restLen; totalCFLength += cfLen; + totalTagsLength += tagsLen; } rawKVs = uncompressedOutputStream.toByteArray(); - boolean useTag = (currentKV.getTagsLength() > 0); + boolean useTag = (totalTagsLength > 0); for (DataBlockEncoding encoding : encodings) { if (encoding == DataBlockEncoding.NONE) { continue; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java index 6b1382f..b1f94fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -477,6 +477,7 @@ public class TestCompaction { try { isInCompact = true; synchronized (this) { + // FIXME: This wait may spuriously wake up, so this test is likely to be flaky this.wait(); } } catch (InterruptedException e) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java index 03072e2..233f00f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java @@ -56,6 +56,8 @@ import java.util.concurrent.atomic.AtomicReference; * and HRegion.close(); */ @Category({RegionServerTests.class, MediumTests.class}) +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class TestCompactionArchiveConcurrentClose { public HBaseTestingUtility testUtil; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index cc509a4..e4a91c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -247,7 +247,10 @@ public class TestFSErrorsExposed { */ public void startFaults() { for (SoftReference<FaultyInputStream> is: inStreams) { - is.get().startFaults(); + FaultyInputStream fis = is.get(); + if (fis != null) { + fis.startFaults(); + } } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 1ec2fe4..09d0b4f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -574,8 +574,10 @@ public class TestHRegion { } finally { // Make it so all writes succeed from here on out so can close clean ffs.fault.set(false); - region.getWAL().rollWriter(true); - HRegion.closeHRegion(region); + if (region != null) { + region.getWAL().rollWriter(true); + HRegion.closeHRegion(region); + } } return null; } @@ -2756,6 +2758,8 @@ public class TestHRegion { finally { parent.clearSplit(); } + + assertNotNull(result); return new HRegion[] { (HRegion)result.getFirst(), (HRegion)result.getSecond() }; } @@ -2880,7 +2884,7 @@ public class TestHRegion { try { region.closed.set(true); try { - region.getScanner(null); + region.getScanner(new Scan()); fail("Expected to get an exception during getScanner on a region that is closed"); } catch (NotServingRegionException e) { // this is the correct exception that is expected @@ -4034,8 +4038,8 @@ public class TestHRegion { } protected class PutThread extends Thread { - private volatile boolean done; - private volatile int numPutsFinished = 0; + private boolean done; + private int numPutsFinished = 0; private Throwable error = null; private int numRows; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index deeb4ae..ede7a3c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -129,7 +129,7 @@ public class TestJoinedScanners { puts.clear(); } } - if (puts.size() >= 0) { + if (puts.size() > 0) { ht.put(puts); puts.clear(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java index 3413d44..bc9e39b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java @@ -46,7 +46,6 @@ public class TestMultiVersionConcurrencyControl extends TestCase { public boolean failed = false; public void run() { - AtomicLong startPoint = new AtomicLong(); while (!finished.get()) { MultiVersionConcurrencyControl.WriteEntry e = mvcc.begin(); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java index 3ed839b..e5f842b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java @@ -95,7 +95,7 @@ public class TestRegionMergeTransaction { public void teardown() throws IOException { for (HRegion region : new HRegion[] { region_a, region_b, region_c }) { if (region != null && !region.isClosed()) region.close(); - if (this.fs.exists(region.getRegionFileSystem().getRegionDir()) + if (region != null && this.fs.exists(region.getRegionFileSystem().getRegionDir()) && !this.fs.delete(region.getRegionFileSystem().getRegionDir(), true)) { throw new IOException("Failed deleting of " + region.getRegionFileSystem().getRegionDir()); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java index 6c43b10..fa214f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java @@ -93,6 +93,7 @@ public class TestRegionServerHostname { List<String> servers = ZKUtil.listChildrenNoWatch(zkw, zkw.rsZNode); while (servers == null) { Threads.sleep(10); + servers = ZKUtil.listChildrenNoWatch(zkw, zkw.rsZNode); } assertTrue(servers.size() == NUM_RS); for (String server : servers) { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java index 77cb5b7..2f60bef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.MiniHBaseCluster.MiniHBaseClusterRegionServer; -import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.ServerManager; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; @@ -166,6 +165,8 @@ public class TestRegionServerReportForDuty { } @Override + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SWL_SLEEP_WITH_LOCK_HELD", + justification="Intended") protected synchronized ServerName createRegionServerStatusStub(boolean refresh) { sn = super.createRegionServerStatusStub(refresh); rpcStubCreatedFlag = true; http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java index 3b52671..b0b0636 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java @@ -132,7 +132,6 @@ public class TestScannerRetriableFailure { } public void loadTable(final Table table, int numRows) throws IOException { - List<Put> puts = new ArrayList<Put>(numRows); for (int i = 0; i < numRows; ++i) { byte[] row = Bytes.toBytes(String.format("%09d", i)); Put put = new Put(row); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java index b548b65..f13dd61 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java @@ -83,7 +83,8 @@ public class TestSplitTransaction { private static boolean preRollBackCalled = false; private static boolean postRollBackCalled = false; - @Before public void setup() throws IOException { + @Before + public void setup() throws IOException { this.fs = FileSystem.get(TEST_UTIL.getConfiguration()); TEST_UTIL.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, CustomObserver.class.getName()); this.fs.delete(this.testdir, true); @@ -97,11 +98,14 @@ public class TestSplitTransaction { TEST_UTIL.getConfiguration().setBoolean("hbase.testing.nocluster", true); } - @After public void teardown() throws IOException { + @After + public void teardown() throws IOException { if (this.parent != null && !this.parent.isClosed()) this.parent.close(); - Path regionDir = this.parent.getRegionFileSystem().getRegionDir(); - if (this.fs.exists(regionDir) && !this.fs.delete(regionDir, true)) { - throw new IOException("Failed delete of " + regionDir); + if (this.parent != null) { + Path regionDir = this.parent.getRegionFileSystem().getRegionDir(); + if (this.fs.exists(regionDir) && !this.fs.delete(regionDir, true)) { + throw new IOException("Failed delete of " + regionDir); + } } if (this.wals != null) { this.wals.close(); @@ -109,7 +113,8 @@ public class TestSplitTransaction { this.fs.delete(this.testdir, true); } - @Test public void testFailAfterPONR() throws IOException, KeeperException { + @Test + public void testFailAfterPONR() throws IOException, KeeperException { final int rowcount = TEST_UTIL.loadRegion(this.parent, CF); assertTrue(rowcount > 0); int parentRowCount = countRows(this.parent); @@ -153,7 +158,8 @@ public class TestSplitTransaction { * Test straight prepare works. Tries to split on {@link #GOOD_SPLIT_ROW} * @throws IOException */ - @Test public void testPrepare() throws IOException { + @Test + public void testPrepare() throws IOException { prepareGOOD_SPLIT_ROW(); } @@ -171,7 +177,8 @@ public class TestSplitTransaction { /** * Pass a reference store */ - @Test public void testPrepareWithRegionsWithReference() throws IOException { + @Test + public void testPrepareWithRegionsWithReference() throws IOException { HStore storeMock = Mockito.mock(HStore.class); when(storeMock.hasReferences()).thenReturn(true); when(storeMock.getFamily()).thenReturn(new HColumnDescriptor("cf")); @@ -187,7 +194,8 @@ public class TestSplitTransaction { /** * Test SplitTransactionListener */ - @Test public void testSplitTransactionListener() throws IOException { + @Test + public void testSplitTransactionListener() throws IOException { SplitTransactionImpl st = new SplitTransactionImpl(this.parent, GOOD_SPLIT_ROW); SplitTransaction.TransactionListener listener = Mockito.mock(SplitTransaction.TransactionListener.class); @@ -207,7 +215,8 @@ public class TestSplitTransaction { /** * Pass an unreasonable split row. */ - @Test public void testPrepareWithBadSplitRow() throws IOException { + @Test + public void testPrepareWithBadSplitRow() throws IOException { // Pass start row as split key. SplitTransactionImpl st = new SplitTransactionImpl(this.parent, STARTROW); assertFalse(st.prepare()); @@ -219,13 +228,15 @@ public class TestSplitTransaction { assertFalse(st.prepare()); } - @Test public void testPrepareWithClosedRegion() throws IOException { + @Test + public void testPrepareWithClosedRegion() throws IOException { this.parent.close(); SplitTransactionImpl st = new SplitTransactionImpl(this.parent, GOOD_SPLIT_ROW); assertFalse(st.prepare()); } - @Test public void testWholesomeSplit() throws IOException { + @Test + public void testWholesomeSplit() throws IOException { final int rowcount = TEST_UTIL.loadRegion(this.parent, CF, true); assertTrue(rowcount > 0); int parentRowCount = countRows(this.parent); @@ -302,8 +313,8 @@ public class TestSplitTransaction { assertTrue(expectedException); } - - @Test public void testRollback() throws IOException { + @Test + public void testRollback() throws IOException { final int rowcount = TEST_UTIL.loadRegion(this.parent, CF); assertTrue(rowcount > 0); int parentRowCount = countRows(this.parent); @@ -416,4 +427,3 @@ public class TestSplitTransaction { } } - http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index b0a84f79..9c9b4b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -1043,9 +1043,6 @@ public class TestStoreFile extends HBaseTestCase { DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF; - HFileDataBlockEncoder dataBlockEncoder = - new HFileDataBlockEncoderImpl( - dataBlockEncoderAlgo); cacheConf = new CacheConfig(conf); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java index 6d89d73..e79f168 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.OPEN_KEY; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -182,17 +183,17 @@ public class TestStripeStoreFileManager { sf5.splitPoint = new byte[1]; manager.insertNewFiles(al(sf5)); manager.insertNewFiles(al(createFile(1, 0))); - assertEquals(sf5.splitPoint, manager.getSplitPoint()); + assertArrayEquals(sf5.splitPoint, manager.getSplitPoint()); // Same if there's one stripe but the biggest file is still in L0. manager.addCompactionResults(al(), al(createFile(2, 0, OPEN_KEY, OPEN_KEY))); - assertEquals(sf5.splitPoint, manager.getSplitPoint()); + assertArrayEquals(sf5.splitPoint, manager.getSplitPoint()); // If the biggest file is in the stripe, should get from it. MockStoreFile sf6 = createFile(6, 0, OPEN_KEY, OPEN_KEY); sf6.splitPoint = new byte[1]; manager.addCompactionResults(al(), al(sf6)); - assertEquals(sf6.splitPoint, manager.getSplitPoint()); + assertArrayEquals(sf6.splitPoint, manager.getSplitPoint()); } @Test http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java index 440d215..ab0175b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java @@ -387,11 +387,13 @@ public class TestCompactedHFilesDischarger { } catch (InterruptedException e) { } while (!next) { - resScanner.next(results); + next = resScanner.next(results); } } finally { scanCompletedCounter.incrementAndGet(); - resScanner.close(); + if (resScanner != null) { + resScanner.close(); + } } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index 3a23a05..769d4de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.DefaultWALProvider; import org.apache.hadoop.hbase.wal.WAL; @@ -285,13 +284,13 @@ public class TestFSHLog { // return only one region. byte[][] regionsToFlush = wal.findRegionsToForceFlush(); assertEquals(1, regionsToFlush.length); - assertEquals(hri1.getEncodedNameAsBytes(), regionsToFlush[0]); + assertTrue(Bytes.equals(hri1.getEncodedNameAsBytes(), regionsToFlush[0])); // insert edits in second region addEdits(wal, hri2, t2, 2, mvcc); // get the regions to flush, it should still read region1. regionsToFlush = wal.findRegionsToForceFlush(); assertEquals(regionsToFlush.length, 1); - assertEquals(hri1.getEncodedNameAsBytes(), regionsToFlush[0]); + assertTrue(Bytes.equals(hri1.getEncodedNameAsBytes(), regionsToFlush[0])); // flush region 1, and roll the wal file. Only last wal which has entries for region1 should // remain. flushRegion(wal, hri1.getEncodedNameAsBytes(), t1.getFamiliesKeys()); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index 3a02378..938520e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -1117,12 +1117,11 @@ public class TestWALReplay { // Flusher used in this test. Keep count of how often we are called and // actually run the flush inside here. class TestFlusher implements FlushRequester { - private HRegion r; @Override public void requestFlush(Region region, boolean force) { try { - r.flush(force); + region.flush(force); } catch (IOException e) { throw new RuntimeException("Exception flushing", e); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java index b042a57..297bc09 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java @@ -456,6 +456,8 @@ public class TestReplicationSourceManager { } @Test + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RU_INVOKE_RUN", + justification="Intended") public void testCleanupUnknownPeerZNode() throws Exception { final Server server = new DummyServer("hostname2.example.org"); ReplicationQueues rq = http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java index f5f6859..2e37437 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java @@ -109,8 +109,8 @@ public class SecureTestCluster { public static void tearDown() throws Exception { if (CLUSTER != null) { CLUSTER.shutdown(); + CLUSTER.join(); } - CLUSTER.join(); if (KDC != null) { KDC.stop(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java index a15db00..eb97ad9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -483,6 +483,7 @@ public abstract class TestVisibilityLabels { } } catch (Throwable e) { } + assertNotNull(authsResponse); List<String> authsList = new ArrayList<String>(); for (ByteString authBS : authsResponse.getAuthList()) { authsList.add(Bytes.toString(authBS.toByteArray())); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index a3f5382..f045ada 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -120,7 +120,6 @@ public class TestExportSnapshot { admin.snapshot(emptySnapshotName, tableName); // Add some rows - Table table = new HTable(TEST_UTIL.getConfiguration(), tableName); SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY); tableNumFiles = admin.getTableRegions(tableName).size(); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java index 8be2bea..542695b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LauncherSecurityManager.java @@ -39,6 +39,11 @@ public class LauncherSecurityManager extends SecurityManager { private SecurityManager securityManager; public LauncherSecurityManager() { + this(null); + } + + public LauncherSecurityManager(SecurityManager securityManager) { + this.securityManager = securityManager; reset(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java index 0d25a68..75966fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java @@ -509,9 +509,13 @@ public abstract class MultiThreadedAction { } catch (IOException e) { LOG.warn("Couldn't get locations for row " + Bytes.toString(r.getRow())); } - HRegionLocation locations[] = rl.getRegionLocations(); - for (HRegionLocation h : locations) { - LOG.info("LOCATION " + h); + if (rl != null) { + HRegionLocation locations[] = rl.getRegionLocations(); + if (locations != null) { + for (HRegionLocation h : locations) { + LOG.info("LOCATION " + h); + } + } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java index 10e9f49..f5d8061 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java @@ -169,8 +169,9 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase { get = dataGenerator.beforeGet(rowKeyBase, get); } catch (Exception e) { // Ideally wont happen - LOG.warn("Failed to modify the get from the load generator = [" + get.getRow() - + "], column family = [" + Bytes.toString(cf) + "]", e); + LOG.warn("Failed to modify the get from the load generator = [" + + Bytes.toString(get.getRow()) + "], column family = [" + + Bytes.toString(cf) + "]", e); } Result result = getRow(get, rowKeyBase, cf); Map<byte[], byte[]> columnValues = @@ -236,7 +237,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase { if (isBatchUpdate) { if (verbose) { LOG.debug("Preparing increment and append for key = [" - + rowKey + "], " + columnCount + " columns"); + + Bytes.toString(rowKey) + "], " + columnCount + " columns"); } mutate(table, inc, rowKeyBase); mutate(table, app, rowKeyBase); @@ -269,8 +270,8 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase { result = table.get(get); } catch (IOException ie) { LOG.warn( - "Failed to get the row for key = [" + get.getRow() + "], column family = [" - + Bytes.toString(cf) + "]", ie); + "Failed to get the row for key = [" + Bytes.toString(get.getRow()) + + "], column family = [" + Bytes.toString(cf) + "]", ie); } return result; } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java index 5554f59..a1716de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java @@ -77,7 +77,6 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater { public class HBaseUpdaterThreadWithACL extends HBaseUpdaterThread { - private Table table; private MutateAccessAction mutateAction = new MutateAccessAction(); public HBaseUpdaterThreadWithACL(int updaterId) throws IOException { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java index 4d1c286..5b545f1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java @@ -124,7 +124,8 @@ public class MultiThreadedWriter extends MultiThreadedWriterBase { } if (isMultiPut) { if (verbose) { - LOG.debug("Preparing put for key = [" + rowKey + "], " + columnCount + " columns"); + LOG.debug("Preparing put for key = [" + Bytes.toString(rowKey) + "], " + columnCount + + " columns"); } insert(table, put, rowKeyBase); numCols.addAndGet(columnCount); http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java index a51e532..8626280 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java @@ -483,11 +483,14 @@ public class ProcessBasedLocalHBaseCluster { dirList.clear(); dirList.addAll(logTailDirs); for (String d : dirList) { - for (File f : new File(d).listFiles(LOG_FILES)) { - String filePath = f.getAbsolutePath(); - if (!tailedFiles.contains(filePath)) { - tailedFiles.add(filePath); - startTailingFile(filePath); + File[] files = new File(d).listFiles(LOG_FILES); + if (files != null) { + for (File f : files) { + String filePath = f.getAbsolutePath(); + if (!tailedFiles.contains(filePath)) { + tailedFiles.add(filePath); + startTailingFile(filePath); + } } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java index cf9dbee..fae7073 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.fail; +import java.io.IOException; import java.security.Key; import org.apache.hadoop.conf.Configuration; @@ -51,7 +52,7 @@ public class TestEncryptionTest { conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, FailingKeyProvider.class.getName()); EncryptionTest.testKeyProvider(conf); fail("Instantiation of bad test key provider should have failed check"); - } catch (Exception e) { } + } catch (IOException e) { } } @Test @@ -67,7 +68,7 @@ public class TestEncryptionTest { conf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, FailingCipherProvider.class.getName()); EncryptionTest.testCipherProvider(conf); fail("Instantiation of bad test cipher provider should have failed check"); - } catch (Exception e) { } + } catch (IOException e) { } } @Test @@ -84,7 +85,7 @@ public class TestEncryptionTest { try { EncryptionTest.testEncryption(conf, "foobar", null); fail("Test for bogus cipher should have failed"); - } catch (Exception e) { } + } catch (IOException e) { } } public static class FailingKeyProvider implements KeyProvider { http://git-wip-us.apache.org/repos/asf/hbase/blob/108ea30e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index f1dc1a2..6859a11 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -742,7 +742,9 @@ public class TestHBaseFsck { htd = admin.getTableDescriptor(table); assertEquals(htd.getValue("NOT_DEFAULT"), "true"); } finally { - fs.rename(new Path("/.tableinfo"), tableinfo); + if (fs != null) { + fs.rename(new Path("/.tableinfo"), tableinfo); + } cleanupTable(table); } }