Revision: 8817
Author: [email protected]
Date: Wed Aug 3 05:48:30 2011
Log: Prototype of mark-and-compact support for Harmony weak maps.
[email protected]
BUG=v8:1565
TEST=cctest/test-weakmaps
Review URL: http://codereview.chromium.org/7553012
http://code.google.com/p/v8/source/detail?r=8817
Added:
/branches/bleeding_edge/test/cctest/test-weakmaps.cc
Modified:
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/heap.h
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/mark-compact.h
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects-visiting.cc
/branches/bleeding_edge/src/objects-visiting.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/test/cctest/SConscript
/branches/bleeding_edge/test/cctest/cctest.status
/branches/bleeding_edge/test/mjsunit/harmony/weakmaps.js
=======================================
--- /dev/null
+++ /branches/bleeding_edge/test/cctest/test-weakmaps.cc Wed Aug 3
05:48:30 2011
@@ -0,0 +1,149 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "global-handles.h"
+#include "snapshot.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+
+static Handle<JSWeakMap> AllocateJSWeakMap() {
+ Handle<Map> map = FACTORY->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
+ Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
+ Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
+ // Do not use handles for the hash table, it would make entries strong.
+ Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
+ ObjectHashTable* table = ObjectHashTable::cast(table_obj);
+ weakmap->set_table(table);
+ weakmap->set_next(Smi::FromInt(0));
+ return weakmap;
+}
+
+static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
+ Handle<JSObject> key,
+ int value) {
+ Handle<ObjectHashTable> table = PutIntoObjectHashTable(
+ Handle<ObjectHashTable>(weakmap->table()),
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(value)));
+ weakmap->set_table(*table);
+}
+
+static int NumberOfWeakCalls = 0;
+static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void*
id) {
+ ASSERT(id == reinterpret_cast<void*>(1234));
+ NumberOfWeakCalls++;
+ handle.Dispose();
+}
+
+
+TEST(Weakness) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
+
+ // Keep global reference to the key.
+ Handle<Object> key;
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE,
JSObject::kHeaderSize);
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ key = global_handles->Create(*object);
+ }
+ CHECK(!global_handles->IsWeak(key.location()));
+
+ // Put entry into weak map.
+ {
+ v8::HandleScope scope;
+ PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ }
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+
+ // Force a full GC.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+
+ // Make the global reference to the key weak.
+ {
+ v8::HandleScope scope;
+ global_handles->MakeWeak(key.location(),
+ reinterpret_cast<void*>(1234),
+ &WeakPointerCallback);
+ }
+ CHECK(global_handles->IsWeak(key.location()));
+
+ // Force a full GC.
+ // Perform two consecutive GCs because the first one will only clear
+ // weak references whereas the second one will also clear weak maps.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(1, weakmap->table()->NumberOfDeletedElements());
+}
+
+
+TEST(Shrinking) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Check initial capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+
+ // Fill up weak map to trigger capacity change.
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE,
JSObject::kHeaderSize);
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ PutIntoWeakMap(weakmap, object, i);
+ }
+ }
+
+ // Check increased capacity.
+ CHECK_EQ(128, weakmap->table()->Capacity());
+
+ // Force a full GC.
+ CHECK_EQ(32, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(32, weakmap->table()->NumberOfDeletedElements());
+
+ // Check shrunk capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+}
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Jul 27 08:08:50 2011
+++ /branches/bleeding_edge/src/heap.cc Wed Aug 3 05:48:30 2011
@@ -1292,6 +1292,10 @@
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template
VisitSpecialized<SharedFunctionInfo::kSize>);
+ table_.Register(kVisitJSWeakMap,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ Visit);
+
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
=======================================
--- /branches/bleeding_edge/src/heap.h Wed Jul 27 08:08:50 2011
+++ /branches/bleeding_edge/src/heap.h Wed Aug 3 05:48:30 2011
@@ -1646,6 +1646,7 @@
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
+ friend class StaticMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Wed Jul 13 06:29:10 2011
+++ /branches/bleeding_edge/src/mark-compact.cc Wed Aug 3 05:48:30 2011
@@ -64,13 +64,15 @@
live_bytes_(0),
#endif
heap_(NULL),
- code_flusher_(NULL) { }
+ code_flusher_(NULL),
+ encountered_weak_maps_(NULL) { }
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below
will
// update the state as they proceed.
ASSERT(state_ == PREPARE_GC);
+ ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
// Prepare has selected whether to compact the old generation or not.
// Tell the tracer.
@@ -80,6 +82,8 @@
if (FLAG_collect_maps) ClearNonLiveTransitions();
+ ClearWeakMaps();
+
SweepLargeObjectSpace();
if (IsCompacting()) {
@@ -407,6 +411,8 @@
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+ table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
+
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
@@ -556,6 +562,34 @@
StructBodyDescriptor,
void> StructObjectVisitor;
+ static void VisitJSWeakMap(Map* map, HeapObject* object) {
+ MarkCompactCollector* collector =
map->heap()->mark_compact_collector();
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
+
+ // Enqueue weak map in linked list of encountered weak maps.
+ ASSERT(weak_map->next() == Smi::FromInt(0));
+ weak_map->set_next(collector->encountered_weak_maps());
+ collector->set_encountered_weak_maps(weak_map);
+
+ // Skip visiting the backing hash table containing the mappings.
+ int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::BodyDescriptor::kStartOffset,
+ JSWeakMap::kTableOffset);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::kTableOffset + kPointerSize,
+ object_size);
+
+ // Mark the backing hash table without pushing it on the marking stack.
+ ASSERT(!weak_map->unchecked_table()->IsMarked());
+ ASSERT(weak_map->unchecked_table()->map()->IsMarked());
+ collector->SetMark(weak_map->unchecked_table());
+ }
+
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
map->heap());
@@ -1369,20 +1403,26 @@
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack_.is_empty()) {
- HeapObject* object = marking_stack_.Pop();
- ASSERT(object->IsHeapObject());
- ASSERT(heap()->Contains(object));
- ASSERT(object->IsMarked());
- ASSERT(!object->IsOverflowed());
-
- // Because the object is marked, we have to recover the original map
- // pointer and use it to mark the object's body.
- MapWord map_word = object->map_word();
- map_word.ClearMark();
- Map* map = map_word.ToMap();
- MarkObject(map);
-
- StaticMarkingVisitor::IterateBody(map, object);
+ while (!marking_stack_.is_empty()) {
+ HeapObject* object = marking_stack_.Pop();
+ ASSERT(object->IsHeapObject());
+ ASSERT(heap()->Contains(object));
+ ASSERT(object->IsMarked());
+ ASSERT(!object->IsOverflowed());
+
+ // Because the object is marked, we have to recover the original map
+ // pointer and use it to mark the object's body.
+ MapWord map_word = object->map_word();
+ map_word.ClearMark();
+ Map* map = map_word.ToMap();
+ MarkObject(map);
+
+ StaticMarkingVisitor::IterateBody(map, object);
+ }
+
+ // Process encountered weak maps, mark objects only reachable by those
+ // weak maps and repeat until fix-point is reached.
+ ProcessWeakMaps();
}
}
@@ -1734,6 +1774,45 @@
}
}
}
+
+
+void MarkCompactCollector::ProcessWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ Object* value = table->get(table->EntryToValueIndex(i));
+ StaticMarkingVisitor::MarkObjectByPointer(heap(), &value);
+ table->set_unchecked(heap(),
+ table->EntryToValueIndex(i),
+ value,
+ UPDATE_WRITE_BARRIER);
+ }
+ }
+ weak_map_obj = weak_map->next();
+ }
+}
+
+
+void MarkCompactCollector::ClearWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ table->RemoveEntry(i, heap());
+ }
+ }
+ weak_map_obj = weak_map->next();
+ weak_map->set_next(Smi::FromInt(0));
+ }
+ set_encountered_weak_maps(Smi::FromInt(0));
+}
//
-------------------------------------------------------------------------
// Phase 2: Encode forwarding addresses.
=======================================
--- /branches/bleeding_edge/src/mark-compact.h Fri Jul 1 05:47:18 2011
+++ /branches/bleeding_edge/src/mark-compact.h Wed Aug 3 05:48:30 2011
@@ -192,6 +192,11 @@
CodeFlusher* code_flusher() { return code_flusher_; }
inline bool is_code_flushing_enabled() const { return code_flusher_ !=
NULL; }
void EnableCodeFlushing(bool enable);
+
+ inline Object* encountered_weak_maps() { return encountered_weak_maps_; }
+ inline void set_encountered_weak_maps(Object* weak_map) {
+ encountered_weak_maps_ = weak_map;
+ }
private:
MarkCompactCollector();
@@ -329,6 +334,16 @@
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
+ // Mark all values associated with reachable keys in weak maps
encountered
+ // so far. This might push new object or even new weak maps onto the
+ // marking stack.
+ void ProcessWeakMaps();
+
+ // After all reachable objects have been marked those weak map entries
+ // with an unreachable key are removed from all encountered weak maps.
+ // The linked list of all encountered weak maps is destroyed.
+ void ClearWeakMaps();
+
//
-----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection, or else computing and encoding
@@ -499,6 +514,7 @@
Heap* heap_;
MarkingStack marking_stack_;
CodeFlusher* code_flusher_;
+ Object* encountered_weak_maps_;
friend class Heap;
friend class OverflowedObjectsScanner;
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed Aug 3 04:55:13 2011
+++ /branches/bleeding_edge/src/objects-inl.h Wed Aug 3 05:48:30 2011
@@ -3861,6 +3861,12 @@
ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
+ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
+
+
+ObjectHashTable* JSWeakMap::unchecked_table() {
+ return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this,
kTableOffset));
+}
Address Foreign::address() {
@@ -4469,6 +4475,11 @@
MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
return key;
}
+
+
+void ObjectHashTable::RemoveEntry(int entry) {
+ RemoveEntry(entry, GetHeap());
+}
void Map::ClearCodeCache(Heap* heap) {
=======================================
--- /branches/bleeding_edge/src/objects-visiting.cc Wed Aug 3 04:55:13 2011
+++ /branches/bleeding_edge/src/objects-visiting.cc Wed Aug 3 05:48:30 2011
@@ -88,6 +88,9 @@
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
+ case JS_WEAK_MAP_TYPE:
+ return kVisitJSWeakMap;
+
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
@@ -115,7 +118,6 @@
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
- case JS_WEAK_MAP_TYPE:
return GetVisitorIdForSize(kVisitJSObject,
kVisitJSObjectGeneric,
instance_size);
=======================================
--- /branches/bleeding_edge/src/objects-visiting.h Mon Jul 11 02:12:17 2011
+++ /branches/bleeding_edge/src/objects-visiting.h Wed Aug 3 05:48:30 2011
@@ -121,6 +121,7 @@
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
+ kVisitJSWeakMap,
kVisitJSRegExp,
kVisitorIdCount,
@@ -317,7 +318,9 @@
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
- table_.Register(kVisitJSRegExp, &VisitJSRegExp);
+ table_.Register(kVisitJSWeakMap, &VisitJSObject);
+
+ table_.Register(kVisitJSRegExp, &VisitJSObject);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
@@ -355,15 +358,15 @@
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
return FixedDoubleArray::SizeFor(length);
}
+
+ static inline int VisitJSObject(Map* map, HeapObject* object) {
+ return JSObjectVisitor::Visit(map, object);
+ }
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
-
- static inline int VisitJSRegExp(Map* map, HeapObject* object) {
- return JSObjectVisitor::Visit(map, object);
- }
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
=======================================
--- /branches/bleeding_edge/src/objects.cc Wed Aug 3 05:42:04 2011
+++ /branches/bleeding_edge/src/objects.cc Wed Aug 3 05:48:30 2011
@@ -11640,10 +11640,9 @@
}
-void ObjectHashTable::RemoveEntry(int entry) {
- Object* null_value = GetHeap()->null_value();
- set(EntryToIndex(entry), null_value);
- set(EntryToIndex(entry) + 1, null_value);
+void ObjectHashTable::RemoveEntry(int entry, Heap* heap) {
+ set_null(heap, EntryToIndex(entry));
+ set_null(heap, EntryToIndex(entry) + 1);
ElementRemoved();
}
=======================================
--- /branches/bleeding_edge/src/objects.h Wed Aug 3 05:42:04 2011
+++ /branches/bleeding_edge/src/objects.h Wed Aug 3 05:48:30 2011
@@ -2984,8 +2984,16 @@
MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
private:
+ friend class MarkCompactCollector;
+
void AddEntry(int entry, JSObject* key, Object* value);
- void RemoveEntry(int entry);
+ void RemoveEntry(int entry, Heap* heap);
+ inline void RemoveEntry(int entry);
+
+ // Returns the index to the value of an entry.
+ static inline int EntryToValueIndex(int entry) {
+ return EntryToIndex(entry) + 1;
+ }
};
@@ -6642,6 +6650,12 @@
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, ObjectHashTable)
+ // [next]: linked list of encountered weak maps during GC.
+ DECL_ACCESSORS(next, Object)
+
+ // Unchecked accessors to be used during GC.
+ inline ObjectHashTable* unchecked_table();
+
// Casting.
static inline JSWeakMap* cast(Object* obj);
@@ -6656,7 +6670,8 @@
#endif
static const int kTableOffset = JSObject::kHeaderSize;
- static const int kSize = kTableOffset + kPointerSize;
+ static const int kNextOffset = kTableOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);
=======================================
--- /branches/bleeding_edge/src/runtime.cc Wed Aug 3 05:44:17 2011
+++ /branches/bleeding_edge/src/runtime.cc Wed Aug 3 05:48:30 2011
@@ -642,6 +642,7 @@
ASSERT(weakmap->map()->inobject_properties() == 0);
Handle<ObjectHashTable> table =
isolate->factory()->NewObjectHashTable(0);
weakmap->set_table(*table);
+ weakmap->set_next(Smi::FromInt(0));
return *weakmap;
}
=======================================
--- /branches/bleeding_edge/test/cctest/SConscript Thu Jul 28 10:21:22 2011
+++ /branches/bleeding_edge/test/cctest/SConscript Wed Aug 3 05:48:30 2011
@@ -96,7 +96,8 @@
'test-threads.cc',
'test-unbound-queue.cc',
'test-utils.cc',
- 'test-version.cc'
+ 'test-version.cc',
+ 'test-weakmaps.cc'
],
'arch:arm': [
'test-assembler-arm.cc',
=======================================
--- /branches/bleeding_edge/test/cctest/cctest.status Wed Jun 22 13:23:48
2011
+++ /branches/bleeding_edge/test/cctest/cctest.status Wed Aug 3 05:48:30
2011
@@ -38,6 +38,9 @@
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
+# We do not yet shrink weak maps after they have been emptied by the GC
+test-weakmaps/Shrinking: FAIL
+
##############################################################################
[ $arch == arm ]
=======================================
--- /branches/bleeding_edge/test/mjsunit/harmony/weakmaps.js Wed Aug 3
04:55:13 2011
+++ /branches/bleeding_edge/test/mjsunit/harmony/weakmaps.js Wed Aug 3
05:48:30 2011
@@ -112,3 +112,9 @@
assertTrue(m instanceof WeakMap);
assertTrue(WeakMap.prototype.set instanceof Function)
assertTrue(WeakMap.prototype.get instanceof Function)
+
+
+// Stress Test
+// There is a proposed stress-test available at the es-discuss mailing list
+// which cannot be reasonably automated. Check it out by hand if you like:
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev