Revision: 13183
Author:   [email protected]
Date:     Mon Dec 10 07:14:20 2012
Log:      Make unit tests resilient against GC Stress.

[email protected]
TEST=cctest --gc-interval=500 --stress-compaction

Review URL: https://codereview.chromium.org/11498012
http://code.google.com/p/v8/source/detail?r=13183

Modified:
 /branches/bleeding_edge/test/cctest/test-alloc.cc
 /branches/bleeding_edge/test/cctest/test-heap.cc

=======================================
--- /branches/bleeding_edge/test/cctest/test-alloc.cc Wed Nov 21 02:01:05 2012 +++ /branches/bleeding_edge/test/cctest/test-alloc.cc Mon Dec 10 07:14:20 2012
@@ -40,18 +40,7 @@
   Heap* heap = Isolate::Current()->heap();

   // New space.
-  NewSpace* new_space = heap->new_space();
-  static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
-  while (new_space->Available() > kNewSpaceFillerSize) {
-    int available_before = static_cast<int>(new_space->Available());
-    CHECK(!heap->AllocateByteArray(0)->IsFailure());
-    if (available_before == new_space->Available()) {
-      // It seems that we are avoiding new space allocations when
-      // allocation is forced, so no need to fill up new space
-      // in order to make the test harder.
-      break;
-    }
-  }
+  SimulateFullSpace(heap->new_space());
   CHECK(!heap->AllocateByteArray(100)->IsFailure());
   CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());

@@ -90,6 +79,7 @@
   CHECK(!heap->AllocateMap(JS_OBJECT_TYPE, instance_size)->IsFailure());

   // Test that we can allocate in old pointer space and code space.
+  SimulateFullSpace(heap->code_space());
   CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
   CHECK(!heap->CopyCode(Isolate::Current()->builtins()->builtin(
       Builtins::kIllegal))->IsFailure());
=======================================
--- /branches/bleeding_edge/test/cctest/test-heap.cc Fri Dec 7 05:01:38 2012 +++ /branches/bleeding_edge/test/cctest/test-heap.cc Mon Dec 10 07:14:20 2012
@@ -26,8 +26,10 @@
 // Go through all incremental marking steps in one swoop.
 static void SimulateIncrementalMarking() {
   IncrementalMarking* marking = HEAP->incremental_marking();
-  CHECK(marking->IsStopped());
-  marking->Start();
+  CHECK(marking->IsMarking() || marking->IsStopped());
+  if (marking->IsStopped()) {
+    marking->Start();
+  }
   CHECK(marking->IsMarking());
   while (!marking->IsComplete()) {
     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
@@ -413,9 +415,10 @@
     h2 = global_handles->Create(*u);
   }

+  // Make sure the objects are promoted.
   HEAP->CollectGarbage(OLD_POINTER_SPACE);
   HEAP->CollectGarbage(NEW_SPACE);
-  // Make sure the object is promoted.
+  CHECK(!HEAP->InNewSpace(*h1) && !HEAP->InNewSpace(*h2));

   global_handles->MakeWeak(h2.location(),
                            reinterpret_cast<void*>(1234),
@@ -423,7 +426,8 @@
   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
   CHECK(!GlobalHandles::IsNearDeath(h2.location()));

-  HEAP->CollectGarbage(OLD_POINTER_SPACE);
+ // Incremental marking potentially marked handles before they turned weak.
+  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);

   CHECK((*h1)->IsString());

@@ -1035,7 +1039,6 @@
   // Simulate several GCs that use incremental marking.
   const int kAgingThreshold = 6;
   for (int i = 0; i < kAgingThreshold; i++) {
-    HEAP->incremental_marking()->Abort();
     SimulateIncrementalMarking();
     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   }
@@ -1050,7 +1053,6 @@
   // Simulate several GCs that use incremental marking but make sure
   // the loop breaks once the function is enqueued as a candidate.
   for (int i = 0; i < kAgingThreshold; i++) {
-    HEAP->incremental_marking()->Abort();
     SimulateIncrementalMarking();
     if (!function->next_function_link()->IsUndefined()) break;
     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
@@ -1225,6 +1227,10 @@
 TEST(TestInternalWeakLists) {
   v8::V8::Initialize();

+  // Some flags turn Scavenge collections into Mark-sweep collections
+  // and hence are incompatible with this test case.
+  if (FLAG_gc_global || FLAG_stress_compaction) return;
+
   static const int kNumTestContexts = 10;

   v8::HandleScope scope;
@@ -1946,6 +1952,7 @@
   i::FLAG_allow_natives_syntax = true;
   InitializeVM();
   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
+  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   v8::HandleScope scope;

   SimulateFullSpace(HEAP->new_space());
@@ -2121,7 +2128,7 @@
   // Triggering one GC will cause a lot of garbage to be discovered but
   // even spread across all allocated pages.
   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
-  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());

   // Triggering subsequent GCs should cause at least half of the pages
   // to be released to the OS after at most two cycles.

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to