Revision: 13584
Author: [email protected]
Date: Mon Feb 4 02:56:50 2013
Log: Fix clearing of dead dependent codes and verify weak embedded
maps on full GC.
BUG=172488,172489
[email protected]
Review URL: https://chromiumcodereview.appspot.com/12094036
http://code.google.com/p/v8/source/detail?r=13584
Modified:
/branches/bleeding_edge/src/heap-inl.h
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/heap.h
/branches/bleeding_edge/src/lithium.cc
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/mark-compact.h
/branches/bleeding_edge/src/objects-debug.cc
/branches/bleeding_edge/src/objects-visiting-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
=======================================
--- /branches/bleeding_edge/src/heap-inl.h Wed Jan 16 04:52:27 2013
+++ /branches/bleeding_edge/src/heap-inl.h Mon Feb 4 02:56:50 2013
@@ -779,6 +779,18 @@
HEAP->always_allocate_scope_depth_--;
ASSERT(HEAP->always_allocate_scope_depth_ == 0);
}
+
+
+#ifdef VERIFY_HEAP
+NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope()
{
+ HEAP->no_weak_embedded_maps_verification_scope_depth_++;
+}
+
+
+NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope()
{
+ HEAP->no_weak_embedded_maps_verification_scope_depth_--;
+}
+#endif
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Jan 30 04:19:32 2013
+++ /branches/bleeding_edge/src/heap.cc Mon Feb 4 02:56:50 2013
@@ -157,6 +157,9 @@
ms_count_at_last_idle_notification_(0),
gc_count_at_last_idle_gc_(0),
scavenges_since_last_idle_round_(kIdleScavengeThreshold),
+#ifdef VERIFY_HEAP
+ no_weak_embedded_maps_verification_scope_depth_(0),
+#endif
promotion_queue_(this),
configured_(false),
chunks_queued_for_free_(NULL),
=======================================
--- /branches/bleeding_edge/src/heap.h Wed Jan 30 04:19:32 2013
+++ /branches/bleeding_edge/src/heap.h Mon Feb 4 02:56:50 2013
@@ -1322,6 +1322,11 @@
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
+
+
+ bool weak_embedded_maps_verification_enabled() {
+ return no_weak_embedded_maps_verification_scope_depth_ == 0;
+ }
#endif
#ifdef DEBUG
@@ -2214,6 +2219,10 @@
unsigned int gc_count_at_last_idle_gc_;
int scavenges_since_last_idle_round_;
+#ifdef VERIFY_HEAP
+ int no_weak_embedded_maps_verification_scope_depth_;
+#endif
+
static const int kMaxMarkSweepsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5;
@@ -2243,6 +2252,9 @@
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class MapCompact;
+#ifdef VERIFY_HEAP
+ friend class NoWeakEmbeddedMapsVerificationScope;
+#endif
DISALLOW_COPY_AND_ASSIGN(Heap);
};
@@ -2303,6 +2315,14 @@
DisallowAllocationFailure disallow_allocation_failure_;
};
+#ifdef VERIFY_HEAP
+class NoWeakEmbeddedMapsVerificationScope {
+ public:
+ inline NoWeakEmbeddedMapsVerificationScope();
+ inline ~NoWeakEmbeddedMapsVerificationScope();
+};
+#endif
+
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers
have to
=======================================
--- /branches/bleeding_edge/src/lithium.cc Mon Jan 28 02:25:38 2013
+++ /branches/bleeding_edge/src/lithium.cc Mon Feb 4 02:56:50 2013
@@ -452,6 +452,12 @@
}
}
}
+#ifdef VERIFY_HEAP
+ // This disables verification of weak embedded maps after full GC.
+ // AddDependentCode can cause a GC, which would observe the state where
+ // this code is not yet in the depended code lists of the embedded maps.
+ NoWeakEmbeddedMapsVerificationScope
disable_verification_of_embedded_maps;
+#endif
for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(code);
}
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Thu Jan 31 06:23:36 2013
+++ /branches/bleeding_edge/src/mark-compact.cc Mon Feb 4 02:56:50 2013
@@ -87,7 +87,7 @@
void VisitEmbeddedPointer(RelocInfo* rinfo) {
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- if (!FLAG_weak_embedded_maps_in_optimized_code ||
+ if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
!rinfo->target_object()->IsMap() ||
!Map::cast(rinfo->target_object())->CanTransition()) {
@@ -414,6 +414,13 @@
}
#endif
+#ifdef VERIFY_HEAP
+ if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code &&
+ heap()->weak_embedded_maps_verification_enabled()) {
+ VerifyWeakEmbeddedMapsInOptimizedCode();
+ }
+#endif
+
Finish();
if (marking_parity_ == EVEN_MARKING_PARITY) {
@@ -465,6 +472,19 @@
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
+
+
+void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
+ HeapObjectIterator code_iterator(heap()->code_space());
+ for (HeapObject* obj = code_iterator.Next();
+ obj != NULL;
+ obj = code_iterator.Next()) {
+ Code* code = Code::cast(obj);
+ if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
+ if (code->marked_for_deoptimization()) continue;
+ code->VerifyEmbeddedMapsDependency();
+ }
+}
#endif // VERIFY_HEAP
@@ -889,6 +909,7 @@
}
#endif
}
+
class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
public:
@@ -2367,10 +2388,10 @@
if (IsMarked(code) && !code->marked_for_deoptimization()) {
if (new_number_of_codes != i) {
codes->set_code_at(new_number_of_codes, code);
- Object** slot = codes->code_slot_at(new_number_of_codes);
- RecordSlot(slot, slot, code);
- new_number_of_codes++;
}
+ Object** slot = codes->code_slot_at(new_number_of_codes);
+ RecordSlot(slot, slot, code);
+ new_number_of_codes++;
}
}
for (int i = new_number_of_codes; i < number_of_codes; i++) {
=======================================
--- /branches/bleeding_edge/src/mark-compact.h Thu Jan 31 06:23:36 2013
+++ /branches/bleeding_edge/src/mark-compact.h Mon Feb 4 02:56:50 2013
@@ -607,6 +607,7 @@
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
+ void VerifyWeakEmbeddedMapsInOptimizedCode();
#endif
// Sweep a single page from the given space conservatively.
=======================================
--- /branches/bleeding_edge/src/objects-debug.cc Mon Jan 14 07:17:56 2013
+++ /branches/bleeding_edge/src/objects-debug.cc Mon Feb 4 02:56:50 2013
@@ -30,6 +30,7 @@
#include "disassembler.h"
#include "disasm.h"
#include "jsregexp.h"
+#include "macro-assembler.h"
#include "objects-visiting.h"
namespace v8 {
@@ -593,6 +594,21 @@
}
}
}
+
+
+void Code::VerifyEmbeddedMapsDependency() {
+ int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
+ for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
+ RelocInfo::Mode mode = it.rinfo()->rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT &&
+ it.rinfo()->target_object()->IsMap()) {
+ Map* map = Map::cast(it.rinfo()->target_object());
+ if (map->CanTransition()) {
+ CHECK(map->dependent_codes()->Contains(this));
+ }
+ }
+ }
+}
void JSArray::JSArrayVerify() {
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Thu Jan 31 02:50:42
2013
+++ /branches/bleeding_edge/src/objects-visiting-inl.h Mon Feb 4 02:56:50
2013
@@ -175,8 +175,8 @@
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
ASSERT(!rinfo->target_object()->IsConsString());
HeapObject* object = HeapObject::cast(rinfo->target_object());
- if (!FLAG_weak_embedded_maps_in_optimized_code ||
- !FLAG_collect_maps || rinfo->host()->kind() !=
Code::OPTIMIZED_FUNCTION ||
+ if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
+ rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
!object->IsMap() || !Map::cast(object)->CanTransition()) {
heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
StaticVisitor::MarkObject(heap, object);
=======================================
--- /branches/bleeding_edge/src/objects.cc Thu Jan 31 02:50:42 2013
+++ /branches/bleeding_edge/src/objects.cc Mon Feb 4 02:56:50 2013
@@ -9499,6 +9499,15 @@
codes->set_number_of_codes(append_index + 1);
return codes;
}
+
+
+bool DependentCodes::Contains(Code* code) {
+ int limit = number_of_codes();
+ for (int i = 0; i < limit; i++) {
+ if (code_at(i) == code) return true;
+ }
+ return false;
+}
MaybeObject* JSReceiver::SetPrototype(Object* value,
=======================================
--- /branches/bleeding_edge/src/objects.h Thu Jan 31 02:50:42 2013
+++ /branches/bleeding_edge/src/objects.h Mon Feb 4 02:56:50 2013
@@ -4547,6 +4547,10 @@
void PrintDeoptLocation(int bailout_id);
+#ifdef VERIFY_HEAP
+ void VerifyEmbeddedMapsDependency();
+#endif
+
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
@@ -4693,6 +4697,7 @@
static Handle<DependentCodes> Append(Handle<DependentCodes> codes,
Handle<Code> value);
static inline DependentCodes* cast(Object* object);
+ bool Contains(Code* code);
private:
static const int kNumberOfCodesIndex = 0;
static const int kCodesIndex = 1;
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.