Revision: 11909
Author: [email protected]
Date: Fri Jun 22 06:44:28 2012
Log: Version 3.12.2
Made near-jump check more strict in LoadNamedFieldPolymorphic on ia32/x64.
(Chromium issue 134055)
Fixed lazy sweeping heuristics to prevent old-space expansion. (issue 2194)
Performance and stability improvements on all platforms.
http://code.google.com/p/v8/source/detail?r=11909
Added:
/trunk/test/mjsunit/regress/regress-crbug-134055.js
Modified:
/trunk/ChangeLog
/trunk/src/arm/lithium-codegen-arm.cc
/trunk/src/arm/stub-cache-arm.cc
/trunk/src/bootstrapper.cc
/trunk/src/debug.cc
/trunk/src/debug.h
/trunk/src/flag-definitions.h
/trunk/src/heap.cc
/trunk/src/hydrogen.cc
/trunk/src/ia32/lithium-codegen-ia32.cc
/trunk/src/ia32/stub-cache-ia32.cc
/trunk/src/ic.cc
/trunk/src/mark-compact.cc
/trunk/src/mips/lithium-codegen-mips.cc
/trunk/src/mips/stub-cache-mips.cc
/trunk/src/objects.cc
/trunk/src/property.h
/trunk/src/runtime.cc
/trunk/src/version.cc
/trunk/src/x64/lithium-codegen-x64.cc
/trunk/src/x64/stub-cache-x64.cc
/trunk/test/cctest/test-alloc.cc
/trunk/test/cctest/test-compiler.cc
/trunk/test/cctest/test-decls.cc
/trunk/test/cctest/test-heap.cc
=======================================
--- /dev/null
+++ /trunk/test/mjsunit/regress/regress-crbug-134055.js Fri Jun 22 06:44:28
2012
@@ -0,0 +1,63 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function crash(obj) {
+ return obj.foo;
+}
+
+function base(number_of_properties) {
+ var result = new Array();
+ for (var i = 0; i < number_of_properties; i++) {
+ result["property" + i] = "value" + i;
+ }
+ result.foo = number_of_properties;
+ return result;
+}
+
+var a = base(12);
+var b = base(13);
+var c = base(14);
+var d = base(15);
+
+crash(a); // Premonomorphic.
+crash(a);
+crash(b);
+crash(c);
+crash(d); // Polymorphic, degree 4.
+
+//Prepare ElementsKind transition map chain.
+var x = base(13);
+x[0] = "object";
+x = base(14);
+x[0] = "object";
+x = base(15);
+x[0] = "object";
+
+%OptimizeFunctionOnNextCall(crash);
+crash(a);
=======================================
--- /trunk/ChangeLog Thu Jun 21 04:16:20 2012
+++ /trunk/ChangeLog Fri Jun 22 06:44:28 2012
@@ -1,3 +1,14 @@
+2012-06-22: Version 3.12.2
+
+ Made near-jump check more strict in LoadNamedFieldPolymorphic on
+ ia32/x64. (Chromium issue 134055)
+
+ Fixed lazy sweeping heuristics to prevent old-space expansion.
+ (issue 2194)
+
+ Performance and stability improvements on all platforms.
+
+
2012-06-21: Version 3.12.1
Performance and stability improvements on all platforms.
=======================================
--- /trunk/src/arm/lithium-codegen-arm.cc Thu Jun 21 04:16:20 2012
+++ /trunk/src/arm/lithium-codegen-arm.cc Fri Jun 22 06:44:28 2012
@@ -2579,7 +2579,7 @@
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
ASSERT(lookup.IsFound() || lookup.IsCacheable());
- if (lookup.IsFound() && lookup.type() == FIELD) {
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2591,7 +2591,7 @@
__ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ ldr(result, FieldMemOperand(result, offset +
FixedArray::kHeaderSize));
}
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
} else {
=======================================
--- /trunk/src/arm/stub-cache-arm.cc Wed Jun 13 04:51:58 2012
+++ /trunk/src/arm/stub-cache-arm.cc Fri Jun 22 06:44:28 2012
@@ -1303,7 +1303,7 @@
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
@@ -1377,7 +1377,7 @@
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's
holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), r0, holder_reg,
=======================================
--- /trunk/src/bootstrapper.cc Wed Jun 13 04:51:58 2012
+++ /trunk/src/bootstrapper.cc Fri Jun 22 06:44:28 2012
@@ -1080,11 +1080,11 @@
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->callee_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() >
Heap::kArgumentsCalleeIndex);
@@ -1178,7 +1178,7 @@
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() >
Heap::kArgumentsLengthIndex);
=======================================
--- /trunk/src/debug.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/debug.cc Fri Jun 22 06:44:28 2012
@@ -896,8 +896,18 @@
Address c_entry_fp,
Address last_fp,
Address larger_fp,
+ Address last_in_fp,
+ Address last_out_fp,
int count,
int end) {
+ OS::PrintError("start: %d\n", start);
+ OS::PrintError("c_entry_fp: %p\n", static_cast<void*>(c_entry_fp));
+ OS::PrintError("last_fp: %p\n", static_cast<void*>(last_fp));
+ OS::PrintError("larger_fp: %p\n", static_cast<void*>(larger_fp));
+ OS::PrintError("last_in_fp: %p\n", static_cast<void*>(last_in_fp));
+ OS::PrintError("last_out_fp: %p\n", static_cast<void*>(last_out_fp));
+ OS::PrintError("count: %d\n", count);
+ OS::PrintError("end: %d\n", end);
OS::Abort();
}
@@ -1010,6 +1020,8 @@
frame->fp(),
thread_local_.last_fp_,
NULL,
+ thread_local_.step_into_fp_,
+ thread_local_.step_out_fp_,
count,
0xFEEEEEEE);
} else if (it.frame()->fp() != thread_local_.last_fp_) {
@@ -1018,6 +1030,8 @@
frame->fp(),
thread_local_.last_fp_,
it.frame()->fp(),
+ thread_local_.step_into_fp_,
+ thread_local_.step_out_fp_,
count,
0xFEEEEEEE);
}
=======================================
--- /trunk/src/debug.h Wed Jun 20 04:29:00 2012
+++ /trunk/src/debug.h Fri Jun 22 06:44:28 2012
@@ -236,6 +236,8 @@
Address c_entry_fp,
Address last_fp,
Address larger_fp,
+ Address last_in_fp,
+ Address last_out_fp,
int count,
int end));
Object* Break(Arguments args);
=======================================
--- /trunk/src/flag-definitions.h Wed Jun 20 04:29:00 2012
+++ /trunk/src/flag-definitions.h Fri Jun 22 06:44:28 2012
@@ -209,7 +209,7 @@
DEFINE_bool(lookup_sample_by_shared, true,
"when picking a function to optimize, watch for shared
function "
"info, not JSFunction itself")
-DEFINE_bool(cache_optimized_code, true,
+DEFINE_bool(cache_optimized_code, false,
"cache optimized code for closures")
DEFINE_bool(inline_construct, true, "inline constructor calls")
DEFINE_bool(inline_arguments, true, "inline functions with arguments
object")
=======================================
--- /trunk/src/heap.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/heap.cc Fri Jun 22 06:44:28 2012
@@ -5014,7 +5014,11 @@
bool Heap::IdleNotification(int hint) {
+ // Hints greater than this value indicate that
+ // the embedder is requesting a lot of GC work.
const int kMaxHint = 1000;
+ // Minimal hint that allows to do full GC.
+ const int kMinHintForFullGC = 100;
intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
// The size factor is in range [5..250]. The numbers here are chosen from
// experiments. If you changes them, make sure to test with
@@ -5082,16 +5086,30 @@
mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
ms_count_at_last_idle_notification_ = ms_count_;
- if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
+ int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound -
+ mark_sweeps_since_idle_round_started_;
+
+ if (remaining_mark_sweeps <= 0) {
FinishIdleRound();
return true;
}
if (incremental_marking()->IsStopped()) {
- incremental_marking()->Start();
- }
-
- AdvanceIdleIncrementalMarking(step_size);
+ // If there are no more than two GCs left in this idle round and we are
+ // allowed to do a full GC, then make those GCs full in order to
compact
+ // the code space.
+ // TODO(ulan): Once we enable code compaction for incremental marking,
+ // we can get rid of this special case and always start incremental
marking.
+ if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) {
+ CollectAllGarbage(kReduceMemoryFootprintMask,
+ "idle notification: finalize idle round");
+ } else {
+ incremental_marking()->Start();
+ }
+ }
+ if (!incremental_marking()->IsStopped()) {
+ AdvanceIdleIncrementalMarking(step_size);
+ }
return false;
}
=======================================
--- /trunk/src/hydrogen.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/hydrogen.cc Fri Jun 22 06:44:28 2012
@@ -4536,8 +4536,7 @@
}
Handle<GlobalObject> global(info()->global_object());
global->Lookup(*var->name(), lookup);
- if (!lookup->IsFound() ||
- lookup->type() != NORMAL ||
+ if (!lookup->IsNormal() ||
(is_store && lookup->IsReadOnly()) ||
lookup->holder() != *global) {
return kUseGeneric;
@@ -4916,9 +4915,8 @@
LookupResult* lookup,
bool is_store) {
type->LookupInDescriptors(NULL, *name, lookup);
- if (!lookup->IsFound()) return false;
- if (lookup->type() == FIELD) return true;
- return is_store && (lookup->type() == MAP_TRANSITION) &&
+ if (lookup->IsField()) return true;
+ return is_store && lookup->IsMapTransition() &&
(type->unused_property_fields() > 0);
}
@@ -4926,8 +4924,8 @@
static int ComputeLoadStoreFieldIndex(Handle<Map> type,
Handle<String> name,
LookupResult* lookup) {
- ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
- if (lookup->type() == FIELD) {
+ ASSERT(lookup->IsField() || lookup->type() == MAP_TRANSITION);
+ if (lookup->IsField()) {
return lookup->GetLocalFieldIndexFromMap(*type);
} else {
Map* transition = lookup->GetTransitionMapFromMap(*type);
@@ -5626,13 +5624,13 @@
Handle<String> name) {
LookupResult lookup(isolate());
map->LookupInDescriptors(NULL, *name, &lookup);
- if (lookup.IsFound() && lookup.type() == FIELD) {
+ if (lookup.IsField()) {
return BuildLoadNamedField(obj,
expr,
map,
&lookup,
true);
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsConstantFunction()) {
AddInstruction(new(zone()) HCheckNonSmi(obj));
AddInstruction(HCheckMaps::NewWithTransitions(obj, map, zone()));
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
@@ -8116,9 +8114,7 @@
Handle<GlobalObject> global(info()->global_object());
LookupResult lookup(isolate());
global->Lookup(*name, &lookup);
- if (lookup.IsFound() &&
- lookup.type() == NORMAL &&
- lookup.GetValue()->IsJSFunction()) {
+ if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
=======================================
--- /trunk/src/ia32/lithium-codegen-ia32.cc Thu Jun 21 04:16:20 2012
+++ /trunk/src/ia32/lithium-codegen-ia32.cc Fri Jun 22 06:44:28 2012
@@ -2412,7 +2412,7 @@
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
ASSERT(lookup.IsFound() || lookup.IsCacheable());
- if (lookup.IsFound() && lookup.type() == FIELD) {
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2424,7 +2424,7 @@
__ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ mov(result, FieldOperand(result, offset +
FixedArray::kHeaderSize));
}
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
} else {
@@ -2464,13 +2464,17 @@
// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
// prototype chain, which causes unbounded code generation.
-static bool CompactEmit(
- SmallMapList* list, Handle<String> name, int i, Isolate* isolate) {
- LookupResult lookup(isolate);
+static bool CompactEmit(SmallMapList* list,
+ Handle<String> name,
+ int i,
+ Isolate* isolate) {
Handle<Map> map = list->at(i);
+ // If the map has ElementsKind transitions, we will generate map checks
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
+ if (map->elements_transition_map() != NULL) return false;
+ LookupResult lookup(isolate);
map->LookupInDescriptors(NULL, *name, &lookup);
- return lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION);
+ return lookup.IsField() || lookup.IsConstantFunction();
}
=======================================
--- /trunk/src/ia32/stub-cache-ia32.cc Wed Jun 13 04:51:58 2012
+++ /trunk/src/ia32/stub-cache-ia32.cc Fri Jun 22 06:44:28 2012
@@ -1157,7 +1157,7 @@
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
@@ -1242,7 +1242,7 @@
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's
holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), eax, holder_reg,
=======================================
--- /trunk/src/ic.cc Thu Jun 14 02:26:57 2012
+++ /trunk/src/ic.cc Fri Jun 22 06:44:28 2012
@@ -435,9 +435,7 @@
// Besides normal conditions (property not found or it's not
// an interceptor), bail out if lookup is not cacheable: we won't
// be able to IC it anyway and regular lookup should work fine.
- if (!lookup->IsFound()
- || (lookup->type() != INTERCEPTOR)
- || !lookup->IsCacheable()) {
+ if (!lookup->IsInterceptor() || !lookup->IsCacheable()) {
return;
}
@@ -448,7 +446,7 @@
holder->LocalLookupRealNamedProperty(*name, lookup);
if (lookup->IsProperty()) {
- ASSERT(lookup->type() != INTERCEPTOR);
+ ASSERT(!lookup->IsInterceptor());
return;
}
@@ -554,7 +552,7 @@
Object::GetProperty(object, object, &lookup, name, &attr);
RETURN_IF_EMPTY_HANDLE(isolate(), result);
- if (lookup.type() == INTERCEPTOR && attr == ABSENT) {
+ if (lookup.IsInterceptor() && attr == ABSENT) {
// If the object does not have the requested property, check which
// exception we need to throw.
return IsContextual(object)
@@ -915,8 +913,7 @@
}
PropertyAttributes attr;
- if (lookup.IsFound() &&
- (lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) {
+ if (lookup.IsInterceptor() || lookup.IsHandler()) {
// Get the property.
Handle<Object> result =
Object::GetProperty(object, object, &lookup, name, &attr);
@@ -1177,7 +1174,7 @@
}
PropertyAttributes attr;
- if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
+ if (lookup.IsInterceptor()) {
// Get the property.
Handle<Object> result =
Object::GetProperty(object, object, &lookup, name, &attr);
@@ -1321,7 +1318,7 @@
return false;
}
- if (lookup->type() == INTERCEPTOR &&
+ if (lookup->IsInterceptor() &&
receiver->GetNamedInterceptor()->setter()->IsUndefined()) {
receiver->LocalLookupRealNamedProperty(*name, lookup);
return StoreICableLookup(lookup);
@@ -1438,7 +1435,7 @@
ASSERT(!receiver->IsJSGlobalProxy());
ASSERT(StoreICableLookup(lookup));
// These are not cacheable, so we never see such LookupResults here.
- ASSERT(lookup->type() != HANDLER);
+ ASSERT(!lookup->IsHandler());
// We get only called for properties or transitions, see
StoreICableLookup.
ASSERT(lookup->type() != NULL_DESCRIPTOR);
@@ -1940,7 +1937,7 @@
ASSERT(!receiver->IsJSGlobalProxy());
ASSERT(StoreICableLookup(lookup));
// These are not cacheable, so we never see such LookupResults here.
- ASSERT(lookup->type() != HANDLER);
+ ASSERT(!lookup->IsHandler());
// We get only called for properties or transitions, see
StoreICableLookup.
ASSERT(lookup->type() != NULL_DESCRIPTOR);
@@ -2116,7 +2113,7 @@
// The length property has to be a writable callback property.
LookupResult debug_lookup(isolate);
receiver->LocalLookup(isolate->heap()->length_symbol(), &debug_lookup);
- ASSERT(debug_lookup.type() == CALLBACKS && !debug_lookup.IsReadOnly());
+ ASSERT(debug_lookup.IsCallbacks() && !debug_lookup.IsReadOnly());
#endif
Object* result;
=======================================
--- /trunk/src/mark-compact.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/mark-compact.cc Fri Jun 22 06:44:28 2012
@@ -500,12 +500,10 @@
space->identity() == OLD_DATA_SPACE ||
space->identity() == CODE_SPACE);
+ static const int kMaxMaxEvacuationCandidates = 1000;
int number_of_pages = space->CountTotalPages();
-
- const int kMaxMaxEvacuationCandidates = 1000;
- int max_evacuation_candidates = Min(
- kMaxMaxEvacuationCandidates,
- static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
+ int max_evacuation_candidates =
+ static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1);
if (FLAG_stress_compaction || FLAG_always_compact) {
max_evacuation_candidates = kMaxMaxEvacuationCandidates;
@@ -535,17 +533,27 @@
intptr_t over_reserved = reserved - space->SizeOfObjects();
static const intptr_t kFreenessThreshold = 50;
- if (over_reserved >= 2 * space->AreaSize() &&
- reduce_memory_footprint_) {
- mode = REDUCE_MEMORY_FOOTPRINT;
-
- // We expect that empty pages are easier to compact so slightly bump
the
- // limit.
- max_evacuation_candidates += 2;
-
- if (FLAG_trace_fragmentation) {
- PrintF("Estimated over reserved memory: %.1f MB (setting
threshold %d)\n",
+ if (over_reserved >= 2 * space->AreaSize()) {
+ // If reduction of memory footprint was requested, we are aggressive
+ // about choosing pages to free. We expect that half-empty pages
+ // are easier to compact so slightly bump the limit.
+ if (reduce_memory_footprint_) {
+ mode = REDUCE_MEMORY_FOOTPRINT;
+ max_evacuation_candidates += 2;
+ }
+
+ // If over-usage is very high (more than a third of the space), we
+ // try to free all mostly empty pages. We expect that almost empty
+ // pages are even easier to compact so bump the limit even more.
+ if (over_reserved > reserved / 3) {
+ mode = REDUCE_MEMORY_FOOTPRINT;
+ max_evacuation_candidates *= 2;
+ }
+
+ if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
+ PrintF("Estimated over reserved memory: %.1f / %.1f MB
(threshold %d)\n",
static_cast<double>(over_reserved) / MB,
+ static_cast<double>(reserved) / MB,
static_cast<int>(kFreenessThreshold));
}
}
@@ -554,6 +562,9 @@
Candidate candidates[kMaxMaxEvacuationCandidates];
+ max_evacuation_candidates =
+ Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
+
int count = 0;
int fragmentation = 0;
Candidate* least = NULL;
@@ -3812,12 +3823,6 @@
bool lazy_sweeping_active = false;
bool unused_page_present = false;
- intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
- intptr_t space_left =
- Min(heap()->OldGenLimit(old_space_size, Heap::kMinPromotionLimit),
- heap()->OldGenLimit(old_space_size, Heap::kMinAllocationLimit)) -
- old_space_size;
-
while (it.has_next()) {
Page* p = it.next();
@@ -3877,7 +3882,7 @@
}
freed_bytes += SweepConservatively(space, p);
pages_swept++;
- if (space_left + freed_bytes > newspace_size) {
+ if (freed_bytes > 2 * newspace_size) {
space->SetPagesToSweep(p->next_page());
lazy_sweeping_active = true;
} else {
=======================================
--- /trunk/src/mips/lithium-codegen-mips.cc Thu Jun 21 04:16:20 2012
+++ /trunk/src/mips/lithium-codegen-mips.cc Fri Jun 22 06:44:28 2012
@@ -2323,7 +2323,7 @@
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
ASSERT(lookup.IsFound() || lookup.IsCacheable());
- if (lookup.IsFound() && lookup.type() == FIELD) {
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2335,7 +2335,7 @@
__ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ lw(result, FieldMemOperand(result, offset +
FixedArray::kHeaderSize));
}
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
} else {
=======================================
--- /trunk/src/mips/stub-cache-mips.cc Wed Jun 13 04:51:58 2012
+++ /trunk/src/mips/stub-cache-mips.cc Fri Jun 22 06:44:28 2012
@@ -1318,7 +1318,7 @@
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
@@ -1391,7 +1391,7 @@
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's
holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), v0, holder_reg,
=======================================
--- /trunk/src/objects.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/objects.cc Fri Jun 22 06:44:28 2012
@@ -415,7 +415,10 @@
break;
}
- default:
+ case HANDLER:
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NONEXISTENT:
UNREACHABLE();
}
}
@@ -2417,10 +2420,11 @@
// We return all of these result types because
// LocalLookupRealNamedProperty is used when setting properties
// where map transitions and null descriptors are handled.
- ASSERT(result->holder() == this && result->type() != NORMAL);
+ ASSERT(result->holder() == this && result->IsFastPropertyType());
// Disallow caching for uninitialized constants. These can only
// occur as fields.
- if (result->IsReadOnly() && result->type() == FIELD &&
+ if (result->IsField() &&
+ result->IsReadOnly() &&
FastPropertyAt(result->GetFieldIndex())->IsTheHole()) {
result->DisallowCaching();
}
@@ -2537,7 +2541,7 @@
PropertyAttributes attributes,
StrictModeFlag strict_mode,
JSReceiver::StoreFromKeyed
store_mode) {
- if (result->IsFound() && result->type() == HANDLER) {
+ if (result->IsHandler()) {
return result->proxy()->SetPropertyWithHandler(
this, key, value, attributes, strict_mode);
} else {
@@ -3908,7 +3912,7 @@
return isolate->heap()->false_value();
}
// Check for interceptor.
- if (result.type() == INTERCEPTOR) {
+ if (result.IsInterceptor()) {
// Skip interceptor if forcing a deletion.
if (mode == FORCE_DELETION) {
return DeletePropertyPostInterceptor(name, mode);
@@ -4267,7 +4271,7 @@
current != heap->null_value() && current->IsJSObject();
current = JSObject::cast(current)->GetPrototype()) {
JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
- if (result->IsFound() && result->type() == CALLBACKS) return;
+ if (result->IsCallbacks()) return;
}
result->NotFound();
}
@@ -4370,7 +4374,7 @@
MaybeObject* JSObject::CreateAccessorPairFor(String* name) {
LookupResult result(GetHeap()->isolate());
LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty() && result.type() == CALLBACKS) {
+ if (result.IsProperty() && result.IsCallbacks()) {
// Note that the result can actually have IsDontDelete() == true when
we
// e.g. have to fall back to the slow case while adding a setter after
// successfully reusing a map transition for a getter. Nevertheless,
this is
@@ -4840,7 +4844,7 @@
JSObject::cast(obj)->LocalLookup(name, &result);
if (result.IsProperty()) {
if (result.IsReadOnly()) return heap->undefined_value();
- if (result.type() == CALLBACKS) {
+ if (result.IsCallbacks()) {
Object* obj = result.GetCallbackObject();
if (obj->IsAccessorPair()) {
return AccessorPair::cast(obj)->GetComponent(component);
@@ -7788,9 +7792,7 @@
LookupResult result(heap->isolate());
String* name = GetThisPropertyAssignmentName(i);
js_object->LocalLookupRealNamedProperty(name, &result);
- if (result.IsFound() && result.type() == CALLBACKS) {
- return false;
- }
+ if (result.IsCallbacks()) return false;
}
}
@@ -10385,7 +10387,7 @@
LookupResult result(isolate);
LocalLookupRealNamedProperty(key, &result);
- return result.IsProperty() && (result.type() != INTERCEPTOR);
+ return result.IsProperty() && !result.IsInterceptor();
}
@@ -10465,7 +10467,7 @@
LookupResult result(isolate);
LocalLookupRealNamedProperty(key, &result);
- return result.IsFound() && (result.type() == CALLBACKS);
+ return result.IsCallbacks();
}
=======================================
--- /trunk/src/property.h Wed Jun 13 04:51:58 2012
+++ /trunk/src/property.h Fri Jun 22 06:44:28 2012
@@ -189,7 +189,7 @@
lookup_type_(NOT_FOUND),
holder_(NULL),
cacheable_(true),
- details_(NONE, NORMAL) {
+ details_(NONE, NONEXISTENT) {
isolate->SetTopLookupResult(this);
}
@@ -237,6 +237,7 @@
void NotFound() {
lookup_type_ = NOT_FOUND;
+ details_ = PropertyDetails(NONE, NONEXISTENT);
holder_ = NULL;
}
@@ -264,12 +265,47 @@
return details_;
}
- bool IsReadOnly() { return details_.IsReadOnly(); }
+ bool IsFastPropertyType() {
+ ASSERT(IsFound());
+ return type() != NORMAL;
+ }
+
+ bool IsReadOnly() {
+ ASSERT(IsFound());
+ return details_.IsReadOnly();
+ }
+
+ bool IsCallbacks() {
+ ASSERT(!(details_.type() == CALLBACKS && !IsFound()));
+ return details_.type() == CALLBACKS;
+ }
+
+ bool IsField() {
+ ASSERT(!(details_.type() == FIELD && !IsFound()));
+ return details_.type() == FIELD;
+ }
+
+ bool IsNormal() {
+ ASSERT(!(details_.type() == NORMAL && !IsFound()));
+ return details_.type() == NORMAL;
+ }
+
+ bool IsConstantFunction() {
+ ASSERT(!(details_.type() == CONSTANT_FUNCTION && !IsFound()));
+ return details_.type() == CONSTANT_FUNCTION;
+ }
+
+ bool IsMapTransition() {
+ ASSERT(!(details_.type() == MAP_TRANSITION && !IsFound()));
+ return details_.type() == MAP_TRANSITION;
+ }
+
bool IsDontDelete() { return details_.IsDontDelete(); }
bool IsDontEnum() { return details_.IsDontEnum(); }
bool IsDeleted() { return details_.IsDeleted(); }
bool IsFound() { return lookup_type_ != NOT_FOUND; }
bool IsHandler() { return lookup_type_ == HANDLER_TYPE; }
+ bool IsInterceptor() { return lookup_type_ == INTERCEPTOR_TYPE; }
// Is the result is a property excluding transitions and the null
descriptor?
bool IsProperty() {
@@ -297,7 +333,6 @@
return Smi::FromInt(0);
}
}
-
Map* GetTransitionMap() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
@@ -314,13 +349,13 @@
int GetFieldIndex() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == FIELD);
+ ASSERT(IsField());
return Descriptor::IndexFromValue(GetValue());
}
int GetLocalFieldIndexFromMap(Map* map) {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == FIELD);
+ ASSERT(IsField());
return Descriptor::IndexFromValue(
map->instance_descriptors()->GetValue(number_)) -
map->inobject_properties();
=======================================
--- /trunk/src/runtime.cc Wed Jun 20 04:29:00 2012
+++ /trunk/src/runtime.cc Fri Jun 22 06:44:28 2012
@@ -1115,7 +1115,7 @@
elms->set(ENUMERABLE_INDEX, heap->ToBoolean(!result.IsDontEnum()));
elms->set(CONFIGURABLE_INDEX, heap->ToBoolean(!result.IsDontDelete()));
- bool is_js_accessor = (result.type() == CALLBACKS) &&
+ bool is_js_accessor = result.IsCallbacks() &&
(result.GetCallbackObject()->IsAccessorPair());
if (is_js_accessor) {
@@ -1318,8 +1318,9 @@
Object* obj = *global;
do {
JSObject::cast(obj)->LocalLookup(*name, &lookup);
+ if (lookup.IsProperty()) break;
obj = obj->GetPrototype();
- } while (!lookup.IsFound() && obj->IsJSObject() &&
+ } while (obj->IsJSObject() &&
JSObject::cast(obj)->map()->is_hidden_prototype());
} else {
global->Lookup(*name, &lookup);
@@ -1327,7 +1328,7 @@
if (lookup.IsProperty()) {
// We found an existing property. Unless it was an interceptor
// that claims the property is absent, skip this declaration.
- if (lookup.type() != INTERCEPTOR) continue;
+ if (!lookup.IsInterceptor()) continue;
PropertyAttributes attributes =
global->GetPropertyAttribute(*name);
if (attributes != ABSENT) continue;
// Fall-through and introduce the absent property by using
@@ -1365,7 +1366,7 @@
// as required for function declarations.
if (lookup.IsProperty() && lookup.IsDontDelete()) {
if (lookup.IsReadOnly() || lookup.IsDontEnum() ||
- lookup.type() == CALLBACKS) {
+ lookup.IsCallbacks()) {
return ThrowRedeclarationError(
isolate, is_function ? "function" : "module", name);
}
@@ -1474,7 +1475,7 @@
!object->IsJSContextExtensionObject()) {
LookupResult lookup(isolate);
object->Lookup(*name, &lookup);
- if (lookup.IsFound() && (lookup.type() == CALLBACKS)) {
+ if (lookup.IsCallbacks()) {
return ThrowRedeclarationError(isolate, "const", name);
}
}
@@ -1527,7 +1528,7 @@
JSObject::cast(object)->map()->is_hidden_prototype()) {
JSObject* raw_holder = JSObject::cast(object);
raw_holder->LocalLookup(*name, &lookup);
- if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
+ if (lookup.IsInterceptor()) {
HandleScope handle_scope(isolate);
Handle<JSObject> holder(raw_holder);
PropertyAttributes intercepted = holder->GetPropertyAttribute(*name);
@@ -1605,14 +1606,13 @@
// constant. For now, we determine this by checking if the
// current value is the hole.
// Strict mode handling not needed (const is disallowed in strict mode).
- PropertyType type = lookup.type();
- if (type == FIELD) {
+ if (lookup.IsField()) {
FixedArray* properties = global->properties();
int index = lookup.GetFieldIndex();
if (properties->get(index)->IsTheHole() || !lookup.IsReadOnly()) {
properties->set(index, *value);
}
- } else if (type == NORMAL) {
+ } else if (lookup.IsNormal()) {
if (global->GetNormalizedProperty(&lookup)->IsTheHole() ||
!lookup.IsReadOnly()) {
global->SetNormalizedProperty(&lookup, *value);
@@ -1620,7 +1620,7 @@
} else {
// Ignore re-initialization of constants that have already been
// assigned a function value.
- ASSERT(lookup.IsReadOnly() && type == CONSTANT_FUNCTION);
+ ASSERT(lookup.IsReadOnly() && lookup.IsConstantFunction());
}
// Use the set value as the result of the operation.
@@ -1696,14 +1696,13 @@
ASSERT(lookup.IsFound()); // the property was declared
ASSERT(lookup.IsReadOnly()); // and it was declared as read-only
- PropertyType type = lookup.type();
- if (type == FIELD) {
+ if (lookup.IsField()) {
FixedArray* properties = object->properties();
int index = lookup.GetFieldIndex();
if (properties->get(index)->IsTheHole()) {
properties->set(index, *value);
}
- } else if (type == NORMAL) {
+ } else if (lookup.IsNormal()) {
if (object->GetNormalizedProperty(&lookup)->IsTheHole()) {
object->SetNormalizedProperty(&lookup, *value);
}
@@ -4372,7 +4371,7 @@
// appropriate.
LookupResult result(isolate);
receiver->LocalLookup(key, &result);
- if (result.IsFound() && result.type() == FIELD) {
+ if (result.IsField()) {
int offset = result.GetFieldIndex();
keyed_lookup_cache->Update(receiver_map, key, offset);
return receiver->FastPropertyAt(offset);
@@ -4484,7 +4483,7 @@
js_object->LocalLookupRealNamedProperty(*name, &result);
// Special case for callback properties.
- if (result.IsFound() && result.type() == CALLBACKS) {
+ if (result.IsCallbacks()) {
Object* callback = result.GetCallbackObject();
// To be compatible with Safari we do not change the value on API
objects
// in Object.defineProperty(). Firefox disagrees here, and actually
changes
@@ -4512,7 +4511,7 @@
// correctly in the case where a property is a field and is reset with
// new attributes.
if (result.IsProperty() &&
- (attr != result.GetAttributes() || result.type() == CALLBACKS)) {
+ (attr != result.GetAttributes() || result.IsCallbacks())) {
// New attributes - normalize to avoid writing to instance descriptor
if (js_object->IsJSGlobalProxy()) {
// Since the result is a property, the prototype will exist so
@@ -10364,9 +10363,8 @@
// LookupResult is not GC safe as it holds raw object pointers.
// GC can happen later in this code so put the required fields into
// local variables using handles when required for later use.
- PropertyType result_type = result.type();
Handle<Object> result_callback_obj;
- if (result_type == CALLBACKS) {
+ if (result.IsCallbacks()) {
result_callback_obj = Handle<Object>(result.GetCallbackObject(),
isolate);
}
@@ -10384,7 +10382,7 @@
// If the callback object is a fixed array then it contains
JavaScript
// getter and/or setter.
- bool hasJavaScriptAccessors = result_type == CALLBACKS &&
+ bool hasJavaScriptAccessors = result.IsCallbacks() &&
result_callback_obj->IsAccessorPair();
Handle<FixedArray> details =
isolate->factory()->NewFixedArray(hasJavaScriptAccessors ? 5 :
2);
=======================================
--- /trunk/src/version.cc Thu Jun 21 04:16:20 2012
+++ /trunk/src/version.cc Fri Jun 22 06:44:28 2012
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 12
-#define BUILD_NUMBER 1
+#define BUILD_NUMBER 2
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
=======================================
--- /trunk/src/x64/lithium-codegen-x64.cc Thu Jun 21 04:16:20 2012
+++ /trunk/src/x64/lithium-codegen-x64.cc Fri Jun 22 06:44:28 2012
@@ -2286,7 +2286,7 @@
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
ASSERT(lookup.IsFound() || lookup.IsCacheable());
- if (lookup.IsFound() && lookup.type() == FIELD) {
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2298,7 +2298,7 @@
__ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ movq(result, FieldOperand(result, offset +
FixedArray::kHeaderSize));
}
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
} else {
@@ -2321,13 +2321,17 @@
// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
// prototype chain, which causes unbounded code generation.
-static bool CompactEmit(
- SmallMapList* list, Handle<String> name, int i, Isolate* isolate) {
- LookupResult lookup(isolate);
+static bool CompactEmit(SmallMapList* list,
+ Handle<String> name,
+ int i,
+ Isolate* isolate) {
Handle<Map> map = list->at(i);
+ // If the map has ElementsKind transitions, we will generate map checks
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
+ if (map->elements_transition_map() != NULL) return false;
+ LookupResult lookup(isolate);
map->LookupInDescriptors(NULL, *name, &lookup);
- return lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION);
+ return lookup.IsField() || lookup.IsConstantFunction();
}
=======================================
--- /trunk/src/x64/stub-cache-x64.cc Wed Jun 13 04:51:58 2012
+++ /trunk/src/x64/stub-cache-x64.cc Fri Jun 22 06:44:28 2012
@@ -1143,7 +1143,7 @@
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
@@ -1221,7 +1221,7 @@
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's
holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), rax, holder_reg,
=======================================
--- /trunk/test/cctest/test-alloc.cc Mon Apr 30 05:34:39 2012
+++ /trunk/test/cctest/test-alloc.cc Fri Jun 22 06:44:28 2012
@@ -34,7 +34,8 @@
using namespace v8::internal;
-static inline void SimulateFullSpace(PagedSpace* space) {
+// Also used in test-heap.cc test cases.
+void SimulateFullSpace(PagedSpace* space) {
int old_linear_size = static_cast<int>(space->limit() - space->top());
space->Free(space->top(), old_linear_size);
space->SetTop(space->limit(), space->limit());
=======================================
--- /trunk/test/cctest/test-compiler.cc Wed Jun 20 04:29:00 2012
+++ /trunk/test/cctest/test-compiler.cc Fri Jun 22 06:44:28 2012
@@ -355,6 +355,9 @@
// Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) {
+ // Skip test if --cache-optimized-code is not activated by default
because
+ // FastNewClosureStub that is baked into the snapshot is incorrect.
+ if (!FLAG_cache_optimized_code) return;
FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
=======================================
--- /trunk/test/cctest/test-decls.cc Mon Apr 23 05:57:22 2012
+++ /trunk/test/cctest/test-decls.cc Fri Jun 22 06:44:28 2012
@@ -71,6 +71,10 @@
virtual v8::Handle<Integer> Query(Local<String> key);
void InitializeIfNeeded();
+
+ // Perform optional initialization steps on the context after it has
+ // been created. Defaults to none but may be overwritten.
+ virtual void PostInitializeContext(Handle<Context> context) {}
// Get the holder for the interceptor. Default to the instance template
// but may be overwritten.
@@ -120,6 +124,7 @@
context_ = Context::New(0, function->InstanceTemplate(), Local<Value>());
context_->Enter();
is_initialized_ = true;
+ PostInitializeContext(context_);
}
@@ -536,9 +541,9 @@
{ ExistsInPrototypeContext context;
context.Check("var x; x",
- 0, // get
0,
- 0, // declaration
+ 0,
+ 0,
EXPECT_RESULT, Undefined());
}
@@ -546,7 +551,7 @@
context.Check("var x = 0; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Number::New(0));
}
@@ -554,7 +559,7 @@
context.Check("const x; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Undefined());
}
@@ -562,7 +567,7 @@
context.Check("const x = 0; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Number::New(0));
}
}
@@ -591,7 +596,88 @@
context.Check("if (false) { var x = 0; }; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Undefined());
}
}
+
+
+
+class ExistsInHiddenPrototypeContext: public DeclarationContext {
+ public:
+ ExistsInHiddenPrototypeContext() {
+ hidden_proto_ = FunctionTemplate::New();
+ hidden_proto_->SetHiddenPrototype(true);
+ }
+
+ protected:
+ virtual v8::Handle<Integer> Query(Local<String> key) {
+ // Let it seem that the property exists in the hidden prototype object.
+ return Integer::New(v8::None);
+ }
+
+ // Install the hidden prototype after the global object has been created.
+ virtual void PostInitializeContext(Handle<Context> context) {
+ Local<Object> global_object = context->Global();
+ Local<Object> hidden_proto =
hidden_proto_->GetFunction()->NewInstance();
+ context->DetachGlobal();
+ context->Global()->SetPrototype(hidden_proto);
+ context->ReattachGlobal(global_object);
+ }
+
+ // Use the hidden prototype as the holder for the interceptors.
+ virtual Local<ObjectTemplate> GetHolder(Local<FunctionTemplate>
function) {
+ return hidden_proto_->InstanceTemplate();
+ }
+
+ private:
+ Local<FunctionTemplate> hidden_proto_;
+};
+
+
+TEST(ExistsInHiddenPrototype) {
+ i::FLAG_es52_globals = true;
+ HandleScope scope;
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("var x; x",
+ 1, // access
+ 0,
+ 2, // declaration + initialization
+ EXPECT_EXCEPTION); // x is not defined!
+ }
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("var x = 0; x",
+ 1, // access
+ 1, // initialization
+ 2, // declaration + initialization
+ EXPECT_RESULT, Number::New(0));
+ }
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("function x() { }; x",
+ 0,
+ 0,
+ 0,
+ EXPECT_RESULT);
+ }
+
+ // TODO(mstarzinger): The semantics of global const is vague.
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("const x; x",
+ 0,
+ 0,
+ 1, // (re-)declaration
+ EXPECT_RESULT, Undefined());
+ }
+
+ // TODO(mstarzinger): The semantics of global const is vague.
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("const x = 0; x",
+ 0,
+ 0,
+ 1, // (re-)declaration
+ EXPECT_RESULT, Number::New(0));
+ }
+}
=======================================
--- /trunk/test/cctest/test-heap.cc Wed Jun 20 04:29:00 2012
+++ /trunk/test/cctest/test-heap.cc Fri Jun 22 06:44:28 2012
@@ -1899,3 +1899,42 @@
CHECK(root->IsJSObject());
CHECK(root->map()->IsMap());
}
+
+
+// Implemented in the test-alloc.cc test suite.
+void SimulateFullSpace(PagedSpace* space);
+
+
+TEST(ReleaseOverReservedPages) {
+ i::FLAG_trace_gc = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ static const int number_of_test_pages = 20;
+
+ // Prepare many pages with low live-bytes count.
+ PagedSpace* old_pointer_space = HEAP->old_pointer_space();
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+ for (int i = 0; i < number_of_test_pages; i++) {
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(old_pointer_space);
+ FACTORY->NewFixedArray(1, TENURED);
+ }
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering one GC will cause a lot of garbage to be discovered but
+ // even spread across all allocated pages.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering subsequent GCs should cause at least half of the pages
+ // to be released to the OS after at most two cycles.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages()
* 2);
+
+ // Triggering a last-resort GC should cause all pages to be released
+ // to the OS so that other processes can seize the memory.
+ HEAP->CollectAllAvailableGarbage("triggered really hard");
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+}
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev