http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java index 96f02fa..e48516c 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java @@ -55,7 +55,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.HAEventWrapper; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; import com.gemstone.gemfire.test.dunit.WaitCriterion; @@ -354,11 +354,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -392,11 +392,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -431,11 +431,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -468,11 +468,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -510,11 +510,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -565,11 +565,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "40000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -621,11 +621,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { Integer port3 = (Integer)serverVM0.invoke(HARQueueNewImplDUnitTest.class, "createOneMoreBridgeServer", new Object[] { Boolean.TRUE }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), port3, "0"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), port3, "0"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -652,8 +652,8 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { Integer port3 = (Integer)serverVM0.invoke(HARQueueNewImplDUnitTest.class, "createOneMoreBridgeServer", new Object[] { Boolean.FALSE }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1", Boolean.TRUE); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1", Boolean.TRUE); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, port3, new Integer(PORT2), "1", Boolean.TRUE }); @@ -690,11 +690,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -740,11 +740,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -784,11 +784,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "30000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -822,11 +822,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { */ public void testCMRNotReturnedByRootRegionsMethod() throws Exception { - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" }); @@ -871,11 +871,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase { serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart", new Object[] { "60000" }); - createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); - final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost()); + createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1"); + final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost()); clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" }); - final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost()); + final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost()); clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache", new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java index 5f63b21..7b410d6 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java @@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.internal.cache.HARegion; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.Threads; +import com.gemstone.gemfire.test.dunit.ThreadUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; import com.gemstone.gemfire.test.dunit.WaitCriterion; @@ -854,7 +854,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase if (opThreads[i].isInterrupted()) { fail("Test failed because thread encountered exception"); } - Threads.join(opThreads[i], 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()); + ThreadUtils.join(opThreads[i], 30 * 1000); } } }; @@ -969,7 +969,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase } }; Wait.waitForCriterion(ev, 30 * 1000, 200, true); - Threads.join(createQueuesThread, 300 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()); + ThreadUtils.join(createQueuesThread, 300 * 1000); } }; @@ -983,7 +983,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase if (opThreads[0].isInterrupted()) { fail("The test has failed as it encountered interrupts in puts & takes"); } - Threads.join(opThreads[0], 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()); + ThreadUtils.join(opThreads[0], 30 * 1000); } }; vm0.invoke(joinWithThread); http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java index 8f9ba9c..48da630 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java @@ -54,7 +54,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem; import com.gemstone.gemfire.internal.cache.Conflatable; import com.gemstone.gemfire.internal.cache.EventID; import com.gemstone.gemfire.internal.cache.RegionQueue; -import com.gemstone.gemfire.test.dunit.Threads; +import com.gemstone.gemfire.test.dunit.ThreadUtils; import com.gemstone.gemfire.test.junit.categories.IntegrationTest; /** @@ -292,7 +292,7 @@ public class HARegionQueueJUnitTest // call join on the put-threads so that this thread waits till they complete // before doing verfication for (i = 0; i < TOTAL_PUT_THREADS; i++) { - Threads.join(putThreads[i], 30 * 1000, null); + ThreadUtils.join(putThreads[i], 30 * 1000); } assertFalse(encounteredException); @@ -384,7 +384,7 @@ public class HARegionQueueJUnitTest long startTime = System.currentTimeMillis(); for (int k = 0; k < threads.length; k++) { - Threads.join(threads[k], 60 * 1000, null); + ThreadUtils.join(threads[k], 60 * 1000); } long totalTime = System.currentTimeMillis() - startTime; @@ -1252,8 +1252,8 @@ public class HARegionQueueJUnitTest }; thread1.start(); thread2.start(); - Threads.join(thread1, 30 * 1000, null); - Threads.join(thread2, 30 * 1000, null); + ThreadUtils.join(thread1, 30 * 1000); + ThreadUtils.join(thread2, 30 * 1000); List list2 = HARegionQueue.createMessageListForTesting(); Iterator iterator = list1.iterator(); boolean doOnce = false; @@ -1363,8 +1363,8 @@ public class HARegionQueueJUnitTest }; thread1.start(); thread2.start(); - Threads.join(thread1, 30 * 1000, null); - Threads.join(thread2, 30 * 1000, null); + ThreadUtils.join(thread1, 30 * 1000); + ThreadUtils.join(thread2, 30 * 1000); List list2 = HARegionQueue.createMessageListForTesting(); Iterator iterator = list1.iterator(); boolean doOnce = false; @@ -1483,8 +1483,8 @@ public class HARegionQueueJUnitTest }; thread1.start(); thread2.start(); - Threads.join(thread1, 30 * 1000, null); - Threads.join(thread2, 30 * 1000, null); + ThreadUtils.join(thread1, 30 * 1000); + ThreadUtils.join(thread2, 30 * 1000); List list2 = HARegionQueue.createMessageListForTesting(); Iterator iterator = list1.iterator(); boolean doOnce = true; @@ -1649,8 +1649,8 @@ public class HARegionQueueJUnitTest }; thread1.start(); thread2.start(); - Threads.join(thread1, 30 * 1000, null); - Threads.join(thread2, 30 * 1000, null); + ThreadUtils.join(thread1, 30 * 1000); + ThreadUtils.join(thread2, 30 * 1000); List list2 = HARegionQueue.createMessageListForTesting(); Iterator iterator = list1.iterator(); boolean doOnce = true; @@ -1769,7 +1769,7 @@ public class HARegionQueueJUnitTest long startTime = System.currentTimeMillis(); for (int k = 0; k < 3; k++) { - Threads.join(threads[k], 180 * 1000, null); + ThreadUtils.join(threads[k], 180 * 1000); } long totalTime = System.currentTimeMillis() - startTime; @@ -1848,7 +1848,7 @@ public class HARegionQueueJUnitTest long startTime = System.currentTimeMillis(); for (int k = 0; k < 3; k++) { - Threads.join(threads[k], 60 * 1000, null); + ThreadUtils.join(threads[k], 60 * 1000); } long totalTime = System.currentTimeMillis() - startTime; http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java index 9823fa6..6dfcc02 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java @@ -40,7 +40,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.IgnoredException; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; @@ -251,7 +251,7 @@ public class HASlowReceiverDUnitTest extends DistributedTestCase { public void testSlowClient() throws Exception { setBridgeObeserverForAfterQueueDestroyMessage(); clientVM.invoke(HASlowReceiverDUnitTest.class, "createClientCache", - new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT0), + new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT0), new Integer(PORT1), new Integer(PORT2), new Integer(2) }); clientVM.invoke(HASlowReceiverDUnitTest.class, "registerInterest"); // add expected socket exception string http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java index 4e47c5f..1aa3642 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java @@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; import com.gemstone.gemfire.test.dunit.WaitCriterion; @@ -118,7 +118,7 @@ public class OperationsPropagationDUnitTest extends DistributedTestCase PORT2 = ((Integer)server2.invoke(OperationsPropagationDUnitTest.class, "createServerCache")).intValue(); client1.invoke(OperationsPropagationDUnitTest.class, "createClientCache", - new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT2) }); + new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT2) }); } http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java index 2176a1c..9201a5f 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java @@ -44,8 +44,8 @@ import com.gemstone.gemfire.internal.cache.EntryEventImpl; import com.gemstone.gemfire.internal.cache.EventID; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; /** @@ -136,11 +136,11 @@ public class PutAllDUnitTest extends DistributedTestCase PORT2 = ((Integer)server2.invoke(PutAllDUnitTest.class, "createServerCache")).intValue(); client1.invoke(PutAllDUnitTest.class, "createClientCache1", - new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) }); + new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) }); client2.invoke(PutAllDUnitTest.class, "createClientCache2", - new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2) }); + new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2) }); try { - createClientCache2(NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2)); + createClientCache2(NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2)); } catch (Exception e) { fail(" test failed due to "+e); @@ -550,7 +550,7 @@ public class PutAllDUnitTest extends DistributedTestCase public void afterCreate(EntryEvent event) { - LogWriterSupport.getLogWriter().fine(" entered after created with "+event.getKey()); + LogWriterUtils.getLogWriter().fine(" entered after created with "+event.getKey()); boolean shouldNotify = false; Object key = event.getKey(); if (key.equals(PUTALL_KEY1)) { http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java index 0e3b9b4..7ab929e 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java @@ -36,8 +36,8 @@ import com.gemstone.gemfire.distributed.DistributedSystem; import com.gemstone.gemfire.internal.AvailablePort; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; import com.gemstone.gemfire.test.junit.categories.DistributedTest; @@ -179,9 +179,9 @@ public class StatsBugDUnitTest extends DistributedTestCase */ public void testBug36109() throws Exception { - LogWriterSupport.getLogWriter().info("testBug36109 : BEGIN"); + LogWriterUtils.getLogWriter().info("testBug36109 : BEGIN"); client1.invoke(StatsBugDUnitTest.class, "createClientCacheForInvalidates", new Object[] { - NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) }); + NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) }); client1.invoke(StatsBugDUnitTest.class, "prepopulateClient"); primary.invoke(StatsBugDUnitTest.class, "doEntryOperations", new Object[] { primaryPrefix }); @@ -204,7 +204,7 @@ public class StatsBugDUnitTest extends DistributedTestCase } client1.invoke(StatsBugDUnitTest.class, "verifyNumInvalidates"); - LogWriterSupport.getLogWriter().info("testBug36109 : END"); + LogWriterUtils.getLogWriter().info("testBug36109 : END"); } /** @@ -232,7 +232,7 @@ public class StatsBugDUnitTest extends DistributedTestCase server.setNotifyBySubscription(false); server.setSocketBufferSize(32768); server.start(); - LogWriterSupport.getLogWriter().info("Server started at PORT = " + port); + LogWriterUtils.getLogWriter().info("Server started at PORT = " + port); return new Integer(port); } @@ -257,7 +257,7 @@ public class StatsBugDUnitTest extends DistributedTestCase RegionAttributes attrs = factory.create(); Region region = cache.createRegion(REGION_NAME, attrs); region.registerInterest("ALL_KEYS"); - LogWriterSupport.getLogWriter().info("Client cache created"); + LogWriterUtils.getLogWriter().info("Client cache created"); } /** @@ -281,7 +281,7 @@ public class StatsBugDUnitTest extends DistributedTestCase RegionAttributes attrs = factory.create(); Region region = cache.createRegion(REGION_NAME, attrs); region.registerInterest("ALL_KEYS", false, false); - LogWriterSupport.getLogWriter().info("Client cache created"); + LogWriterUtils.getLogWriter().info("Client cache created"); } /** @@ -292,11 +292,11 @@ public class StatsBugDUnitTest extends DistributedTestCase public static void verifyNumInvalidates() { long invalidatesRecordedByStats = pool.getInvalidateCount(); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "invalidatesRecordedByStats = " + invalidatesRecordedByStats); int expectedInvalidates = TOTAL_SERVERS * PUTS_PER_SERVER; - LogWriterSupport.getLogWriter().info("expectedInvalidates = " + expectedInvalidates); + LogWriterUtils.getLogWriter().info("expectedInvalidates = " + expectedInvalidates); if (invalidatesRecordedByStats != expectedInvalidates) { fail("Invalidates received by client(" + invalidatesRecordedByStats http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java index 89bc65f..934772c 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java @@ -38,9 +38,9 @@ import com.gemstone.gemfire.internal.cache.TXRegionLockRequestImpl; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.Invoke; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.SerializableRunnable; -import com.gemstone.gemfire.test.dunit.Threads; +import com.gemstone.gemfire.test.dunit.ThreadUtils; /** * This class tests distributed ownership via the DistributedLockService api. @@ -126,7 +126,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { } public void disable_testTXRecoverGrantorMessageProcessor() throws Exception { - LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor]"); + LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor]"); TXLockService.createDTLS(); checkDLockRecoverGrantorMessageProcessor(); @@ -176,7 +176,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { dtls.release(txLockId); // check results to verify no locks were provided in reply - Threads.join(thread, 30 * 1000, LogWriterSupport.getLogWriter()); + ThreadUtils.join(thread, 30 * 1000); assertEquals("testTXRecoverGrantor_replyCode_PASS is false", true, testTXRecoverGrantor_replyCode_PASS); assertEquals("testTXRecoverGrantor_heldLocks_PASS is false", true, @@ -185,7 +185,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { protected static volatile TXLockId testTXLock_TXLockId; public void testTXLock() { - LogWriterSupport.getLogWriter().info("[testTXLock]"); + LogWriterUtils.getLogWriter().info("[testTXLock]"); final int grantorVM = 0; final int clientA = 1; final int clientB = 2; @@ -203,7 +203,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { )); // create grantor - LogWriterSupport.getLogWriter().info("[testTXLock] create grantor"); + LogWriterUtils.getLogWriter().info("[testTXLock] create grantor"); Host.getHost(0).getVM(grantorVM).invoke(new SerializableRunnable() { public void run() { @@ -213,7 +213,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { sleep(20); // create client and request txLock - LogWriterSupport.getLogWriter().info("[testTXLock] create clientA and request txLock"); + LogWriterUtils.getLogWriter().info("[testTXLock] create clientA and request txLock"); Host.getHost(0).getVM(clientA).invoke(new SerializableRunnable() { public void run() { @@ -231,7 +231,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { }); // create nuther client and request overlapping txLock... verify fails - LogWriterSupport.getLogWriter().info("[testTXLock] create clientB and fail txLock"); + LogWriterUtils.getLogWriter().info("[testTXLock] create clientB and fail txLock"); Host.getHost(0).getVM(clientB).invoke(new SerializableRunnable() { public void run() { @@ -261,7 +261,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { */ // release txLock - LogWriterSupport.getLogWriter().info("[testTXLock] clientA releases txLock"); + LogWriterUtils.getLogWriter().info("[testTXLock] clientA releases txLock"); Host.getHost(0).getVM(clientA).invoke( new SerializableRunnable("[testTXLock] clientA releases txLock") { @@ -273,7 +273,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { sleep(20); // try nuther client again and verify success - LogWriterSupport.getLogWriter().info("[testTXLock] clientB requests txLock"); + LogWriterUtils.getLogWriter().info("[testTXLock] clientB requests txLock"); Host.getHost(0).getVM(clientB).invoke( new SerializableRunnable("[testTXLock] clientB requests txLock") { @@ -285,7 +285,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { }); // release txLock - LogWriterSupport.getLogWriter().info("[testTXLock] clientB releases txLock"); + LogWriterUtils.getLogWriter().info("[testTXLock] clientB releases txLock"); Host.getHost(0).getVM(clientB).invoke( new SerializableRunnable("[testTXLock] clientB releases txLock") { @@ -298,7 +298,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { protected static volatile TXLockId testTXOriginatorRecoveryProcessor_TXLockId; public void testTXOriginatorRecoveryProcessor() { - LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor]"); + LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor]"); final int originatorVM = 0; final int grantorVM = 1; final int particpantA = 2; @@ -324,7 +324,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { } // create grantor - LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor] grantorVM becomes grantor"); + LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor] grantorVM becomes grantor"); Host.getHost(0).getVM(grantorVM).invoke(new SerializableRunnable() { public void run() { @@ -341,7 +341,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { Boolean.TRUE, isGrantor); // have a originatorVM get a txLock with three participants including grantor - LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor] originatorVM requests txLock"); + LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor] originatorVM requests txLock"); Host.getHost(0).getVM(originatorVM).invoke(new SerializableRunnable() { public void run() { @@ -425,14 +425,14 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { } public void testDTLSIsDistributed() { - LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed]"); + LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed]"); // have all vms lock and hold the same LTLS lock simultaneously final Host host = Host.getHost(0); int vmCount = host.getVMCount(); for (int vm = 0; vm < vmCount; vm++) { final int finalvm = vm; - LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] testing vm " + finalvm); + LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] testing vm " + finalvm); Host.getHost(0).getVM(finalvm).invoke(new SerializableRunnable() { public void run() { @@ -445,21 +445,21 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { TXLockServiceDUnitTest.class, "isDistributed_DTLS", new Object[] {}); assertEquals("isDistributed should be true for DTLS", Boolean.TRUE, isDistributed); - LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] isDistributed=" + isDistributed); + LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] isDistributed=" + isDistributed); // lock a key... Boolean gotLock = (Boolean)host.getVM(finalvm).invoke( TXLockServiceDUnitTest.class, "lock_DTLS", new Object[] {"KEY"}); assertEquals("gotLock is false after calling lock_DTLS", Boolean.TRUE, gotLock); - LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] gotLock=" + gotLock); + LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] gotLock=" + gotLock); // unlock it... Boolean unlock = (Boolean)host.getVM(finalvm).invoke( TXLockServiceDUnitTest.class, "unlock_DTLS", new Object[] {"KEY"}); assertEquals("unlock is false after calling unlock_DTLS", Boolean.TRUE, unlock); - LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] unlock=" + unlock); + LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] unlock=" + unlock); } } @@ -670,14 +670,14 @@ public class TXLockServiceDUnitTest extends DistributedTestCase { Host host = Host.getHost(0); int vmCount = host.getVMCount(); for (int i=0; i<vmCount; i++) { - LogWriterSupport.getLogWriter().info("Invoking " + methodName + "on VM#" + i); + LogWriterUtils.getLogWriter().info("Invoking " + methodName + "on VM#" + i); host.getVM(i).invoke(this.getClass(), methodName, args); } } public Properties getDistributedSystemProperties() { Properties props = super.getDistributedSystemProperties(); - props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel()); + props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel()); return props; } http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java index 80f5df5..0371df7 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java @@ -111,7 +111,7 @@ public class Bug39356DUnitTest extends CacheTestCase { SerializableRunnable verifyBuckets = new SerializableRunnable("Verify buckets") { public void run() { - LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter(); + LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter(); Cache cache = getCache(); PartitionedRegion r = (PartitionedRegion) cache.getRegion(REGION_NAME); for(int i = 0; i < r.getAttributes().getPartitionAttributes().getTotalNumBuckets(); i++) { http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java index 379ab04..d7e6a16 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java @@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion; import com.gemstone.gemfire.internal.cache.RegionEntry; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.test.dunit.DistributedTestCase; -import com.gemstone.gemfire.test.dunit.DistributedTestSupport; +import com.gemstone.gemfire.test.dunit.DistributedTestUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.IgnoredException; import com.gemstone.gemfire.test.dunit.VM; @@ -237,7 +237,7 @@ public class Bug43684DUnitTest extends DistributedTestCase { public static Integer createServerCache(Boolean isReplicated, Boolean isPrimaryEmpty) throws Exception { DistributedTestCase.disconnectFromDS(); Properties props = new Properties(); - props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]"); + props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]"); // props.setProperty("log-file", "server_" + OSProcess.getId() + ".log"); // props.setProperty("log-level", "fine"); props.setProperty("statistic-archive-file", "server_" + OSProcess.getId() http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java index 0b81cec..0f9a3b7 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java @@ -37,9 +37,9 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy; import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxyStats; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.DistributedTestCase; -import com.gemstone.gemfire.test.dunit.DistributedTestSupport; +import com.gemstone.gemfire.test.dunit.DistributedTestUtils; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; /** @@ -94,7 +94,7 @@ public class Bug51400DUnitTest extends DistributedTestCase { public static Integer createServerCache(Integer mcastPort, Integer maxMessageCount) throws Exception { Properties props = new Properties(); - props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]"); + props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]"); // props.setProperty("log-file", "server_" + OSProcess.getId() + ".log"); // props.setProperty("log-level", "fine"); // props.setProperty("statistic-archive-file", "server_" + OSProcess.getId() @@ -177,7 +177,7 @@ public class Bug51400DUnitTest extends DistributedTestCase { "createServerCache", new Object[] { maxQSize }); client1.invoke(Bug51400DUnitTest.class, "createClientCache", - new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer[]{port1}, ackInterval}); + new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer[]{port1}, ackInterval}); // Do puts from server as well as from client on the same key. AsyncInvocation ai1 = server0.invokeAsync(Bug51400DUnitTest.class, http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java index 56ced6c..d74a94b 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java @@ -29,8 +29,7 @@ import com.gemstone.gemfire.internal.cache.DiskStoreImpl; import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.IgnoredException; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; -import com.gemstone.gemfire.test.dunit.Threads; +import com.gemstone.gemfire.test.dunit.ThreadUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; @@ -102,7 +101,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase { } } }); - Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter()); + ThreadUtils.join(async1, MAX_WAIT); closeCache(vm0); // update @@ -145,7 +144,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase { } } }); - Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter()); + ThreadUtils.join(async1, MAX_WAIT); closeCache(vm0); // destroy @@ -188,7 +187,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase { } } }); - Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter()); + ThreadUtils.join(async1, MAX_WAIT); checkData(vm0, 0, 10, "a"); checkData(vm0, 10, 11, null); http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java index 4364b0f..abf546d 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java @@ -78,8 +78,8 @@ import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.IgnoredException; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.RMIException; import com.gemstone.gemfire.test.dunit.SerializableCallable; @@ -1387,7 +1387,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR Cache cache = getCache(); PoolFactory pf = PoolManager.createFactory(); - pf.addServer(NetworkSupport.getServerHostName(host), serverPort); + pf.addServer(NetworkUtils.getServerHostName(host), serverPort); pf.setSubscriptionEnabled(true); pf.create("pool"); AttributesFactory af = new AttributesFactory(); @@ -1450,7 +1450,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR DistributedTestCase.disconnectFromDS(); await().atMost(30, SECONDS).until(() -> {return (cache == null || cache.isClosed());}); - LogWriterSupport.getLogWriter().info("Cache is confirmed closed"); + LogWriterUtils.getLogWriter().info("Cache is confirmed closed"); } } }); @@ -1661,7 +1661,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR } }); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPR(vm0, 1, 0, 1); //Make sure we create a bucket @@ -1671,7 +1671,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR IgnoredException ex = IgnoredException.addIgnoredException("PartitionOfflineException"); try { - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPR(vm1, 1, 0, 1); //Make sure get a partition offline exception http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java index 17cd552..3b89271 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java @@ -59,7 +59,7 @@ import com.gemstone.gemfire.internal.cache.persistence.PersistentMemberID; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.Invoke; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.SerializableCallable; import com.gemstone.gemfire.test.dunit.SerializableRunnable; import com.gemstone.gemfire.test.dunit.VM; @@ -203,7 +203,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase protected void createData(VM vm, final int startKey, final int endKey, final String value) { - LogWriterSupport.getLogWriter().info("createData invoked. PR_REGION_NAME is " + PR_REGION_NAME); + LogWriterUtils.getLogWriter().info("createData invoked. PR_REGION_NAME is " + PR_REGION_NAME); createData(vm, startKey, endKey,value, PR_REGION_NAME); } @@ -213,7 +213,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase public void run() { Cache cache = getCache(); - LogWriterSupport.getLogWriter().info("creating data in " + regionName); + LogWriterUtils.getLogWriter().info("creating data in " + regionName); Region region = cache.getRegion(regionName); for(int i =startKey; i < endKey; i++) { @@ -790,7 +790,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line; while((line = br.readLine()) != null) { - LogWriterSupport.getLogWriter().fine("OUTPUT:" + line); + LogWriterUtils.getLogWriter().fine("OUTPUT:" + line); //TODO validate output }; http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java index e81ba89..90118d1 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java @@ -24,7 +24,7 @@ import com.gemstone.gemfire.internal.cache.TXManagerImpl; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.Invoke; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.SerializableRunnable; import com.gemstone.gemfire.test.dunit.VM; @@ -131,12 +131,12 @@ public class PersistentPartitionedRegionWithTransactionDUnitTest extends Persist @Override protected void createData(VM vm, final int startKey, final int endKey, final String value, final String regionName) { - LogWriterSupport.getLogWriter().info("creating runnable to create data for region " + regionName); + LogWriterUtils.getLogWriter().info("creating runnable to create data for region " + regionName); SerializableRunnable createData = new SerializableRunnable() { public void run() { Cache cache = getCache(); - LogWriterSupport.getLogWriter().info("getting region " + regionName); + LogWriterUtils.getLogWriter().info("getting region " + regionName); Region region = cache.getRegion(regionName); for(int i =startKey; i < endKey; i++) { @@ -168,7 +168,7 @@ public class PersistentPartitionedRegionWithTransactionDUnitTest extends Persist public void run() { Cache cache = getCache(); - LogWriterSupport.getLogWriter().info("checking data in " + regionName); + LogWriterUtils.getLogWriter().info("checking data in " + regionName); Region region = cache.getRegion(regionName); for(int i =startKey; i < endKey; i++) { http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java index bf6cad8..1b00b8c 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java @@ -73,7 +73,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy; import com.gemstone.gemfire.internal.cache.tier.sockets.Message; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.test.dunit.Wait; import com.gemstone.gemfire.test.dunit.WaitCriterion; @@ -463,7 +463,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }"); + LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }"); } } @@ -487,7 +487,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); + LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); } } } @@ -514,7 +514,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Shipment :- { " + shipmentId + " : " + shipment + " }"); } } @@ -538,7 +538,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }"); + LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }"); } } @@ -562,7 +562,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); + LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); } } } @@ -589,7 +589,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Shipment :- { " + shipmentId + " : " + shipment + " }"); } } @@ -614,7 +614,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ", e); } - LogWriterSupport.getLogWriter() + LogWriterUtils.getLogWriter() .info("Customer :- { " + custid + " : " + customer + " }"); } } @@ -641,7 +641,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); + LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); } } } @@ -670,7 +670,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Shipment :- { " + shipmentId + " : " + shipment + " }"); } } @@ -696,7 +696,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ", e); } - LogWriterSupport.getLogWriter() + LogWriterUtils.getLogWriter() .info("Customer :- { " + custid + " : " + customer + " }"); } } @@ -723,7 +723,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); + LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }"); } } } @@ -752,7 +752,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { "putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ", e); } - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Shipment :- { " + shipmentId + " : " + shipment + " }"); } } @@ -853,7 +853,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { // assertNotNull(orderPartitionedregion.get(orderId)); if (custId.equals(orderId.getCustId())) { - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( orderId + "belongs to node " + idmForCustomer + " " + idmForOrder); assertEquals(idmForCustomer, idmForOrder); @@ -865,7 +865,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { ShipmentId shipmentId = (ShipmentId)shipmentIterator.next(); // assertNotNull(shipmentPartitionedregion.get(shipmentId)); if (orderId.equals(shipmentId.getOrderId())) { - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( shipmentId + "belongs to node " + idmForOrder + " " + idmForShipment); } @@ -1089,15 +1089,15 @@ public class FixedPartitioningTestBase extends DistributedTestCase { Integer primaryBuckets) { HashMap localBucket2RegionMap = (HashMap)region_FPR.getDataStore() .getSizeLocally(); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Size of the " + region_FPR + " in this VM :- " + localBucket2RegionMap.size() + "List of buckets : " + localBucket2RegionMap.keySet()); assertEquals(numBuckets.intValue(), localBucket2RegionMap.size()); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Size of primary buckets the " + region_FPR + " in this VM :- " + region_FPR.getDataStore().getNumberOfPrimaryBucketsManaged()); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Lit of Primaries in this VM :- " + region_FPR.getDataStore().getAllLocalPrimaryBucketIds()); @@ -1109,15 +1109,15 @@ public class FixedPartitioningTestBase extends DistributedTestCase { Integer numBuckets, Integer primaryBuckets) { HashMap localBucket2RegionMap = (HashMap)region_FPR.getDataStore() .getSizeLocally(); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Size of the " + region_FPR + " in this VM :- " + localBucket2RegionMap.size() + "List of buckets : " + localBucket2RegionMap.keySet()); assertEquals(numBuckets.intValue(), localBucket2RegionMap.size()); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Size of primary buckets the " + region_FPR + " in this VM :- " + region_FPR.getDataStore().getNumberOfPrimaryBucketsManaged()); - LogWriterSupport.getLogWriter().info( + LogWriterUtils.getLogWriter().info( "Lit of Primaries in this VM :- " + region_FPR.getDataStore().getAllLocalPrimaryBucketIds()); @@ -1307,7 +1307,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { } }; Wait.waitForCriterion(wc, 20000, 500, false); - LogWriterSupport.getLogWriter().info("end of beforeCalculatingStartingBucketId"); + LogWriterUtils.getLogWriter().info("end of beforeCalculatingStartingBucketId"); } }); } @@ -1364,7 +1364,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { throw e; } catch (Throwable t) { - LogWriterSupport.getLogWriter().error(t); + LogWriterUtils.getLogWriter().error(t); } } } @@ -1378,7 +1378,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { throw e; } catch (Throwable t) { - LogWriterSupport.getLogWriter().error("Error in closing the cache ", t); + LogWriterUtils.getLogWriter().error("Error in closing the cache ", t); } } @@ -1386,7 +1386,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase { try { cleanDiskDirs(); } catch(IOException e) { - LogWriterSupport.getLogWriter().error("Error cleaning disk dirs", e); + LogWriterUtils.getLogWriter().error("Error cleaning disk dirs", e); } } http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java index 5f62346..ecaa1ba 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java @@ -68,7 +68,7 @@ import com.gemstone.gemfire.internal.cache.versions.VersionTag; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.IgnoredException; import com.gemstone.gemfire.test.dunit.Invoke; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.SerializableCallable; import com.gemstone.gemfire.test.dunit.SerializableRunnable; @@ -415,7 +415,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase Region.Entry entry = ((PartitionedRegion)region).getEntry("testKey", true /*Entry is destroyed*/); RegionEntry re = ((EntrySnapshot)entry).getRegionEntry(); - LogWriterSupport.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region)); + LogWriterUtils.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region)); assertTrue(re.getValueInVM((LocalRegion) region) instanceof Tombstone); VersionTag tag = re.getVersionStamp().asVersionTag(); @@ -839,7 +839,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase for(int i = 0; i < 3; i++) { NonTXEntry entry = (NonTXEntry) recoveredRegion.getEntry("key" + i); tagsFromKrf[i] = entry.getRegionEntry().getVersionStamp().asVersionTag(); - LogWriterSupport.getLogWriter().info("krfTag[" + i + "]="+ tagsFromKrf[i] + ",value=" + entry.getValue()); + LogWriterUtils.getLogWriter().info("krfTag[" + i + "]="+ tagsFromKrf[i] + ",value=" + entry.getValue()); } closeCache(); @@ -854,7 +854,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase for(int i = 0; i < 3; i++) { NonTXEntry entry = (NonTXEntry) recoveredRegion.getEntry("key" + i); tagsFromCrf[i] = entry.getRegionEntry().getVersionStamp().asVersionTag(); - LogWriterSupport.getLogWriter().info("crfTag[" + i + "]="+ tagsFromCrf[i] + ",value=" + entry.getValue()); + LogWriterUtils.getLogWriter().info("crfTag[" + i + "]="+ tagsFromCrf[i] + ",value=" + entry.getValue()); } //Make sure the version tags from the krf and the crf match. http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java index 0a7e64e..f20762c 100644 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java @@ -73,7 +73,7 @@ import com.gemstone.gemfire.internal.cache.versions.RegionVersionVector; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.AsyncInvocation; import com.gemstone.gemfire.test.dunit.IgnoredException; -import com.gemstone.gemfire.test.dunit.LogWriterSupport; +import com.gemstone.gemfire.test.dunit.LogWriterUtils; import com.gemstone.gemfire.test.dunit.Host; import com.gemstone.gemfire.test.dunit.SerializableCallable; import com.gemstone.gemfire.test.dunit.SerializableRunnable; @@ -111,31 +111,31 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); putAnEntry(vm0); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeRegion(vm0); updateTheEntry(vm1); - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeRegion(vm1); //This ought to wait for VM1 to come back - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); AsyncInvocation future = createPersistentRegionAsync(vm0); waitForBlockedInitialization(vm0); assertTrue(future.isAlive()); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); future.join(MAX_WAIT); @@ -164,9 +164,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); putAnEntry(vm0); @@ -181,17 +181,17 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa } }); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeRegion(vm0); updateTheEntry(vm1); - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeCache(vm1); //This ought to wait for VM1 to come back - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); AsyncInvocation future = createPersistentRegionAsync(vm0); waitForBlockedInitialization(vm0); @@ -209,7 +209,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa adminDS = AdminDistributedSystemFactory.getDistributedSystem(config); adminDS.connect(); Set<PersistentID> missingIds = adminDS.getMissingPersistentMembers(); - LogWriterSupport.getLogWriter().info("waiting members=" + missingIds); + LogWriterUtils.getLogWriter().info("waiting members=" + missingIds); assertEquals(1, missingIds.size()); PersistentID missingMember = missingIds.iterator().next(); adminDS.revokePersistentMember( @@ -252,7 +252,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa //Now, we should not be able to create a region //in vm1, because the this member was revoked - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); IgnoredException e = IgnoredException.addIgnoredException(RevokedPersistentDataException.class.getSimpleName(), vm1); try { createPersistentRegion(vm1); @@ -299,9 +299,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); putAnEntry(vm0); @@ -316,12 +316,12 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa } }); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeRegion(vm0); updateTheEntry(vm1); - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeRegion(vm1); final File dirToRevoke = getDiskDirForVM(vm1); @@ -347,7 +347,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa }); //This shouldn't wait, because we revoked the member - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); checkForRecoveryStat(vm0, true); @@ -366,7 +366,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa //Now, we should not be able to create a region //in vm1, because the this member was revoked - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); IgnoredException e = IgnoredException.addIgnoredException(RevokedPersistentDataException.class.getSimpleName(), vm1); try { createPersistentRegion(vm1); @@ -392,9 +392,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); createPersistentRegion(vm2); @@ -410,28 +410,28 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa } }); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeRegion(vm0); updateTheEntry(vm1); - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeRegion(vm1); updateTheEntry(vm2, "D"); - LogWriterSupport.getLogWriter().info("closing region in vm2"); + LogWriterUtils.getLogWriter().info("closing region in vm2"); closeRegion(vm2); //These ought to wait for VM2 to come back - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); AsyncInvocation future0 = createPersistentRegionAsync(vm0); waitForBlockedInitialization(vm0); assertTrue(future0.isAlive()); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); final AsyncInvocation future1 = createPersistentRegionAsync(vm1); waitForBlockedInitialization(vm1); assertTrue(future1.isAlive()); @@ -447,7 +447,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa adminDS = AdminDistributedSystemFactory.getDistributedSystem(config); adminDS.connect(); Set<PersistentID> missingIds = adminDS.getMissingPersistentMembers(); - LogWriterSupport.getLogWriter().info("waiting members=" + missingIds); + LogWriterUtils.getLogWriter().info("waiting members=" + missingIds); assertEquals(1, missingIds.size()); } catch (AdminException e) { throw new RuntimeException(e); @@ -582,12 +582,12 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa restoreBackup(vm1); //This ought to wait for VM1 to come back - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); AsyncInvocation future = createPersistentRegionAsync(vm0); waitForBlockedInitialization(vm0); assertTrue(future.isAlive()); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); future.join(MAX_WAIT); @@ -1077,24 +1077,24 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); putAnEntry(vm0); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeRegion(vm0); updateTheEntry(vm1); - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeRegion(vm1); //This ought to wait for VM1 to come back - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); AsyncInvocation future = createPersistentRegionAsync(vm0); waitForBlockedInitialization(vm0); @@ -1180,7 +1180,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa final VM vm1 = host.getVM(1); final VM vm2 = host.getVM(2); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); //Add a hook which will disconnect from the distributed @@ -1386,10 +1386,10 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa Cache cache = getCache(); Region region = cache.getRegion(REGION_NAME); if (region == null) { - LogWriterSupport.getLogWriter().severe("removing listener for PersistentRecoveryOrderDUnitTest because region was not found: " + REGION_NAME); + LogWriterUtils.getLogWriter().severe("removing listener for PersistentRecoveryOrderDUnitTest because region was not found: " + REGION_NAME); Object old = DistributionMessageObserver.setInstance(null); if (old != this) { - LogWriterSupport.getLogWriter().severe("removed listener was not the invoked listener", new Exception("stack trace")); + LogWriterUtils.getLogWriter().severe("removed listener was not the invoked listener", new Exception("stack trace")); } return; } @@ -1673,17 +1673,17 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); createPersistentRegion(vm0); putAnEntry(vm0); - LogWriterSupport.getLogWriter().info("closing region in vm0"); + LogWriterUtils.getLogWriter().info("closing region in vm0"); closeCache(vm0); - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); createPersistentRegion(vm1); putAnEntry(vm1); - LogWriterSupport.getLogWriter().info("Creating region in VM0"); + LogWriterUtils.getLogWriter().info("Creating region in VM0"); IgnoredException ex = IgnoredException.addIgnoredException("ConflictingPersistentDataException", vm0); try { //this should cause a conflict @@ -1697,7 +1697,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa ex.remove(); } - LogWriterSupport.getLogWriter().info("closing region in vm1"); + LogWriterUtils.getLogWriter().info("closing region in vm1"); closeCache(vm1); //This should work now @@ -1707,7 +1707,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa ex = IgnoredException.addIgnoredException("ConflictingPersistentDataException", vm1); //Now make sure vm1 gets a conflict - LogWriterSupport.getLogWriter().info("Creating region in VM1"); + LogWriterUtils.getLogWriter().info("Creating region in VM1"); try { //this should cause a conflict createPersistentRegion(vm1); @@ -1780,11 +1780,11 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa @Override public Properties getDistributedSystemProperties() { - LogWriterSupport.getLogWriter().info("Looking for ack-wait-threshold"); + LogWriterUtils.getLogWriter().info("Looking for ack-wait-threshold"); String s = System.getProperty("gemfire.ack-wait-threshold"); if (s != null) { SAVED_ACK_WAIT_THRESHOLD = s; - LogWriterSupport.getLogWriter().info("removing system property gemfire.ack-wait-threshold"); + LogWriterUtils.getLogWriter().info("removing system property gemfire.ack-wait-threshold"); System.getProperties().remove("gemfire.ack-wait-threshold"); } Properties props = super.getDistributedSystemProperties(); http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java ---------------------------------------------------------------------- diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java index e9fb1ed..a3976a7 100755 --- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java +++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java @@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.cache.tier.ConnectionProxy; import com.gemstone.gemfire.test.dunit.Assert; import com.gemstone.gemfire.test.dunit.DistributedTestCase; import com.gemstone.gemfire.test.dunit.Host; -import com.gemstone.gemfire.test.dunit.NetworkSupport; +import com.gemstone.gemfire.test.dunit.NetworkUtils; import com.gemstone.gemfire.test.dunit.VM; import com.gemstone.gemfire.cache.client.PoolManager; import com.gemstone.gemfire.cache.client.internal.ConnectionFactoryImpl; @@ -170,7 +170,7 @@ public class BackwardCompatibilityHigherVersionClientDUnitTest extends "setHandshakeVersionForTesting"); client1.invoke(BackwardCompatibilityHigherVersionClientDUnitTest.class, "createClientCache", new Object[] { - NetworkSupport.getServerHostName(server1.getHost()), port1 }); + NetworkUtils.getServerHostName(server1.getHost()), port1 }); client1.invoke(BackwardCompatibilityHigherVersionClientDUnitTest.class, "verifyConnectionToServerFailed"); }
