Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java Tue Apr 8 15:24:08 2014 @@ -19,6 +19,12 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; +import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -29,6 +35,7 @@ import org.apache.hadoop.hbase.TableName import org.apache.hadoop.hbase.catalog.TestMetaReaderEditor; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; @@ -37,12 +44,14 @@ import org.apache.hadoop.hbase.protobuf. import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.ZKAssign; +import org.apache.hadoop.util.StringUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.mortbay.log.Log; import com.google.protobuf.ServiceException; @@ -296,4 +305,126 @@ public class TestRegionReplicas { closeRegion(hriSecondary); } } + + @Test(timeout = 300000) + public void testFlushAndCompactionsInPrimary() throws Exception { + + long runtime = 30 * 1000; + // enable store file refreshing + final int refreshPeriod = 100; // 100ms refresh is a lot + HTU.getConfiguration().setInt("hbase.hstore.compactionThreshold", 3); + HTU.getConfiguration().setInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, refreshPeriod); + // restart the region server so that it starts the refresher chore + restartRegionServer(); + final int startKey = 0, endKey = 1000; + + try { + openRegion(hriSecondary); + + //load some data to primary so that reader won't fail + HTU.loadNumericRows(table, f, startKey, endKey); + HTU.getHBaseAdmin().flush(table.getTableName()); + // ensure that chore is run + Threads.sleep(2 * refreshPeriod); + + final AtomicBoolean running = new AtomicBoolean(true); + @SuppressWarnings("unchecked") + final AtomicReference<Exception>[] exceptions = new AtomicReference[3]; + for (int i=0; i < exceptions.length; i++) { + exceptions[i] = new AtomicReference<Exception>(); + } + + Runnable writer = new Runnable() { + int key = startKey; + @Override + public void run() { + try { + while (running.get()) { + byte[] data = Bytes.toBytes(String.valueOf(key)); + Put put = new Put(data); + put.add(f, null, data); + table.put(put); + key++; + if (key == endKey) key = startKey; + } + } catch (Exception ex) { + Log.warn(ex); + exceptions[0].compareAndSet(null, ex); + } + } + }; + + Runnable flusherCompactor = new Runnable() { + Random random = new Random(); + @Override + public void run() { + try { + while (running.get()) { + // flush or compact + if (random.nextBoolean()) { + HTU.getHBaseAdmin().flush(table.getTableName()); + } else { + HTU.compact(table.getName(), random.nextBoolean()); + } + } + } catch (Exception ex) { + Log.warn(ex); + exceptions[1].compareAndSet(null, ex); + } + } + }; + + Runnable reader = new Runnable() { + Random random = new Random(); + @Override + public void run() { + try { + while (running.get()) { + // whether to do a close and open + if (random.nextInt(10) == 0) { + try { + closeRegion(hriSecondary); + } catch (Exception ex) { + Log.warn("Failed closing the region " + hriSecondary + " " + StringUtils.stringifyException(ex)); + exceptions[2].compareAndSet(null, ex); + } + try { + openRegion(hriSecondary); + } catch (Exception ex) { + Log.warn("Failed opening the region " + hriSecondary + " " + StringUtils.stringifyException(ex)); + exceptions[2].compareAndSet(null, ex); + } + } + + int key = random.nextInt(endKey - startKey) + startKey; + assertGetRpc(hriSecondary, key, true); + } + } catch (Exception ex) { + Log.warn("Failed getting the value in the region " + hriSecondary + " " + StringUtils.stringifyException(ex)); + exceptions[2].compareAndSet(null, ex); + } + } + }; + + Log.info("Starting writer and reader"); + ExecutorService executor = Executors.newFixedThreadPool(3); + executor.submit(writer); + executor.submit(flusherCompactor); + executor.submit(reader); + + // wait for threads + Threads.sleep(runtime); + running.set(false); + executor.shutdown(); + executor.awaitTermination(30, TimeUnit.SECONDS); + + for (AtomicReference<Exception> exRef : exceptions) { + Assert.assertNull(exRef.get()); + } + + } finally { + HTU.deleteNumericRows(table, HConstants.CATALOG_FAMILY, startKey, endKey); + closeRegion(hriSecondary); + } + } }
Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Tue Apr 8 15:24:08 2014 @@ -81,6 +81,7 @@ import org.apache.hadoop.hbase.util.Incr import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.util.Progressable; import org.junit.experimental.categories.Category; +import org.junit.Test; import org.mockito.Mockito; import com.google.common.collect.Lists; @@ -917,6 +918,7 @@ public class TestStore extends TestCase store.getRegionFileSystem().removeStoreFiles(store.getColumnFamilyName(), Lists.newArrayList(sf)); } + @Test public void testRefreshStoreFiles() throws Exception { init(this.getName()); @@ -963,6 +965,7 @@ public class TestStore extends TestCase } @SuppressWarnings("unchecked") + @Test public void testRefreshStoreFilesNotChanged() throws IOException { init(this.getName()); Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java Tue Apr 8 15:24:08 2014 @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.client.Ge import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.StoppableImplementation; import org.junit.Assert; import org.junit.Before; @@ -62,6 +63,7 @@ public class TestStoreFileRefresherChore public void setUp() { TEST_UTIL = new HBaseTestingUtility(); testDir = TEST_UTIL.getDataTestDir("TestStoreFileRefresherChore"); + TEST_UTIL.getConfiguration().set(HConstants.HBASE_DIR, testDir.toString()); } private HTableDescriptor getTableDesc(TableName tableName, byte[]... families) { @@ -92,7 +94,7 @@ public class TestStoreFileRefresherChore private HRegion initHRegion(HTableDescriptor htd, byte[] startKey, byte[] stopKey, int replicaId) throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); - Path tableDir = new Path(testDir, htd.getTableName().getNameAsString()); + Path tableDir = FSUtils.getTableDir(testDir, htd.getTableName()); HRegionInfo info = new HRegionInfo(htd.getTableName(), startKey, stopKey, false, 0, replicaId); Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java Tue Apr 8 15:24:08 2014 @@ -120,7 +120,9 @@ public class MultiThreadedReader extends } protected HBaseReaderThread createReaderThread(int readerId) throws IOException { - return new HBaseReaderThread(readerId); + HBaseReaderThread reader = new HBaseReaderThread(readerId); + Threads.setLoggingUncaughtExceptionHandler(reader); + return reader; } public class HBaseReaderThread extends Thread { Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java Tue Apr 8 15:24:08 2014 @@ -73,6 +73,7 @@ public class MultiThreadedWriter extends protected void createWriterThreads(int numThreads) throws IOException { for (int i = 0; i < numThreads; ++i) { HBaseWriterThread writer = new HBaseWriterThread(i); + Threads.setLoggingUncaughtExceptionHandler(writer); writers.add(writer); } } @@ -89,6 +90,7 @@ public class MultiThreadedWriter extends return new HTable(conf, tableName); } + @Override public void run() { try { long rowKeyBase; Modified: hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java URL: http://svn.apache.org/viewvc/hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java?rev=1585757&r1=1585756&r2=1585757&view=diff ============================================================================== --- hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java (original) +++ hbase/branches/hbase-10070/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java Tue Apr 8 15:24:08 2014 @@ -101,8 +101,8 @@ public abstract class MultiThreadedWrite if (cached != null) { result = "cached: " + cached.toString(); } - if (real != null) { - if (real.equals(cached)) { + if (real != null && real.getServerName() != null) { + if (cached != null && cached.getServerName() != null && real.equals(cached)) { result += "; cache is up to date"; } else { result = (cached != null) ? (result + "; ") : "";
