Reviewers: Hannes Payer, Michael Starzinger,
Message:
PTAL. This is the quick fix with regression test we discussed.
Description:
Skip write barriers when updating the weak hash table.
Write barrier on the weak hash table makes all its pointers strong,
which can cause a memory leak.
BUG=359401
TEST=cctest/test-heap/NoWeakHashTableLeakWithIncrementalMarking
Please review this at https://codereview.chromium.org/284773004/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+58, -6 lines):
M src/heap.cc
M src/mark-compact.cc
M src/objects.cc
M test/cctest/test-heap.cc
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
f0c9154b5e9aa15cb45f00df5b5e43533b966979..17f31ff6904e32b68b00a8768a5528d99b51fba3
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -5460,6 +5460,8 @@ void
Heap::AddWeakObjectToCodeDependency(Handle<Object> obj,
Handle<DependentCode> dep) {
ASSERT(!InNewSpace(*obj));
ASSERT(!InNewSpace(*dep));
+ // This handle scope keeps the table handle local to this function, which
+ // allows us to safely skip write barriers in table update operations.
HandleScope scope(isolate());
Handle<WeakHashTable>
table(WeakHashTable::cast(weak_object_to_code_table_),
isolate());
Index: src/mark-compact.cc
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index
ff6d2e304937c71dec099fb5f8fd1de5be207315..91039100f938895a4ede3b482f62907c1e278c9c
100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -2634,6 +2634,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
ClearDependentCode(DependentCode::cast(value));
table->set(key_index, heap_->the_hole_value());
table->set(value_index, heap_->the_hole_value());
+ table->ElementRemoved();
}
}
}
Index: src/objects.cc
diff --git a/src/objects.cc b/src/objects.cc
index
f9a52e59c7455d2986ade2ac70296c2dd5db5034..9d52a49491f17e619f35ab968a1795cdb642f659
100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -16168,7 +16168,7 @@ Handle<WeakHashTable>
WeakHashTable::Put(Handle<WeakHashTable> table,
int entry = table->FindEntry(key);
// Key is already in table, just overwrite value.
if (entry != kNotFound) {
- table->set(EntryToValueIndex(entry), *value);
+ table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
return table;
}
@@ -16184,8 +16184,8 @@ void WeakHashTable::AddEntry(int entry,
Handle<Object> key,
Handle<Object> value) {
DisallowHeapAllocation no_allocation;
- set(EntryToIndex(entry), *key);
- set(EntryToValueIndex(entry), *value);
+ set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
+ set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
ElementAdded();
}
Index: test/cctest/test-heap.cc
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index
22d2458ceb98f02b155c36e608a5add4c0a97b09..78bb0cb48413e5490e0e2f2d92fac818b5c1eb82
100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -40,9 +40,7 @@
using namespace v8::internal;
-
-// Go through all incremental marking steps in one swoop.
-static void SimulateIncrementalMarking() {
+static void StartIncrementalMarking() {
MarkCompactCollector* collector =
CcTest::heap()->mark_compact_collector();
IncrementalMarking* marking = CcTest::heap()->incremental_marking();
if (collector->IsConcurrentSweepingInProgress()) {
@@ -53,6 +51,12 @@ static void SimulateIncrementalMarking() {
marking->Start();
}
CHECK(marking->IsMarking());
+}
+
+
+static void CompleteIncrementalMarking() {
+ IncrementalMarking* marking = CcTest::heap()->incremental_marking();
+ CHECK(marking->IsMarking());
while (!marking->IsComplete()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
@@ -60,6 +64,13 @@ static void SimulateIncrementalMarking() {
}
+// Go through all incremental marking steps in one swoop.
+static void SimulateIncrementalMarking() {
+ StartIncrementalMarking();
+ CompleteIncrementalMarking();
+}
+
+
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
@@ -3888,6 +3899,44 @@ TEST(ObjectsInOptimizedCodeAreWeak) {
}
+TEST(NoWeakHashTableLeakWithIncrementalMarking) {
+ if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
+ if (!i::FLAG_incremental_marking) return;
+ i::FLAG_weak_embedded_objects_in_optimized_code = true;
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_compilation_cache = false;
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ v8::internal::Heap* heap = CcTest::heap();
+
+ if (!isolate->use_crankshaft()) return;
+ HandleScope outer_scope(heap->isolate());
+ for (int i = 0; i < 3; i++) {
+ StartIncrementalMarking();
+ {
+ LocalContext context;
+ HandleScope scope(heap->isolate());
+ EmbeddedVector<char, 256> source;
+ OS::SNPrintF(source,
+ "function bar%d() {"
+ " return foo%d(1);"
+ "};"
+ "function foo%d(x) { with (x) { return 1 + x; } };"
+ "bar%d();"
+ "bar%d();"
+ "bar%d();"
+ "%OptimizeFunctionOnNextCall(bar%d);"
+ "bar%d();", i, i, i, i, i, i, i, i);
+ CompileRun(source.start());
+ }
+ StartIncrementalMarking();
+ CompleteIncrementalMarking();
+ heap->CollectAllGarbage(i::Heap::kNoGCFlags);
+ }
+ WeakHashTable* table =
WeakHashTable::cast(heap->weak_object_to_code_table());
+ CHECK_EQ(0, table->NumberOfElements());
+}
+
static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
EmbeddedVector<char, 256> source;
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.