Revision: 14540
Author: hpa...@chromium.org
Date: Fri May 3 03:36:16 2013
Log: Pretenure ASCII cons string in high promotion mode.
BUG=
Review URL: https://codereview.chromium.org/14451003
http://code.google.com/p/v8/source/detail?r=14540
Modified:
/branches/bleeding_edge/src/arm/code-stubs-arm.cc
/branches/bleeding_edge/src/arm/macro-assembler-arm.cc
/branches/bleeding_edge/src/assembler.cc
/branches/bleeding_edge/src/assembler.h
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/heap.h
/branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
/branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/serialize.cc
/branches/bleeding_edge/src/x64/code-stubs-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Thu May 2 09:32:47
2013
+++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Fri May 3 03:36:16
2013
@@ -5958,8 +5958,36 @@
__ AllocateAsciiConsString(r7, r6, r4, r5, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ mov(r4, Operand(high_promotion_mode));
+ __ ldr(r4, MemOperand(r4, 0));
+ __ cmp(r4, Operand::Zero());
+ __ b(eq, &skip_write_barrier);
+
__ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
+ __ RecordWriteField(r7,
+ ConsString::kFirstOffset,
+ r0,
+ r4,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
__ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+ __ RecordWriteField(r7,
+ ConsString::kSecondOffset,
+ r1,
+ r4,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
+ __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
+ __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+
+ __ bind(&after_writing);
+
__ mov(r0, Operand(r7));
__ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
@@ -6805,6 +6833,9 @@
{ REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
// FastNewClosureStub::Generate
{ REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
+ // StringAddStub::Generate
+ { REG(r7), REG(r1), REG(r4), EMIT_REMEMBERED_SET },
+ { REG(r7), REG(r0), REG(r4), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
=======================================
--- /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Fri May 3
03:36:16 2013
@@ -1933,8 +1933,34 @@
Register scratch1,
Register scratch2,
Label* gc_required) {
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+ mov(scratch1, Operand(high_promotion_mode));
+ ldr(scratch1, MemOperand(r4, 0));
+ cmp(scratch1, Operand::Zero());
+ b(eq, &allocate_new_space);
+
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags |
PRETENURE_OLD_POINTER_SPACE));
+
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+
+ bind(&install_map);
InitializeNewString(result,
length,
=======================================
--- /branches/bleeding_edge/src/assembler.cc Wed Apr 24 00:39:35 2013
+++ /branches/bleeding_edge/src/assembler.cc Fri May 3 03:36:16 2013
@@ -1201,6 +1201,13 @@
return ExternalReference(
isolate->heap()->OldDataSpaceAllocationLimitAddress());
}
+
+
+ExternalReference ExternalReference::
+ new_space_high_promotion_mode_active_address(Isolate* isolate) {
+ return ExternalReference(
+ isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
+}
ExternalReference ExternalReference::handle_scope_level_address(
=======================================
--- /branches/bleeding_edge/src/assembler.h Wed Apr 24 07:05:37 2013
+++ /branches/bleeding_edge/src/assembler.h Fri May 3 03:36:16 2013
@@ -757,6 +757,8 @@
Isolate* isolate);
static ExternalReference old_data_space_allocation_limit_address(
Isolate* isolate);
+ static ExternalReference new_space_high_promotion_mode_active_address(
+ Isolate* isolate);
static ExternalReference double_fp_operation(Token::Value operation,
Isolate* isolate);
=======================================
--- /branches/bleeding_edge/src/heap.cc Fri Apr 26 08:30:41 2013
+++ /branches/bleeding_edge/src/heap.cc Fri May 3 03:36:16 2013
@@ -3176,7 +3176,8 @@
MaybeObject* Heap::NumberToString(Object* number,
- bool check_number_string_cache) {
+ bool check_number_string_cache,
+ PretenureFlag pretenure) {
isolate_->counters()->number_to_string_runtime()->Increment();
if (check_number_string_cache) {
Object* cached = GetNumberStringCache(number);
@@ -3197,7 +3198,8 @@
}
Object* js_string;
- MaybeObject* maybe_js_string =
AllocateStringFromOneByte(CStrVector(str));
+ MaybeObject* maybe_js_string =
+ AllocateStringFromOneByte(CStrVector(str), pretenure);
if (maybe_js_string->ToObject(&js_string)) {
SetNumberStringCache(number, String::cast(js_string));
}
=======================================
--- /branches/bleeding_edge/src/heap.h Thu May 2 13:28:02 2013
+++ /branches/bleeding_edge/src/heap.h Fri May 3 03:36:16 2013
@@ -1530,6 +1530,14 @@
inline bool ShouldGloballyPretenure() {
return new_space_high_promotion_mode_active_;
}
+
+ inline PretenureFlag GetPretenureMode() {
+ return new_space_high_promotion_mode_active_ ? TENURED : NOT_TENURED;
+ }
+
+ inline Address* NewSpaceHighPromotionModeActiveAddress() {
+ return
reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
+ }
inline intptr_t PromotedTotalSize() {
return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
@@ -1609,7 +1617,8 @@
static bool RootCanBeWrittenAfterInitialization(RootListIndex
root_index);
MUST_USE_RESULT MaybeObject* NumberToString(
- Object* number, bool check_number_string_cache = true);
+ Object* number, bool check_number_string_cache = true,
+ PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* Uint32ToString(
uint32_t value, bool check_number_string_cache = true);
@@ -1976,7 +1985,8 @@
// Indicates that the new space should be kept small due to high
promotion
// rates caused by the mutator allocating a lot of long-lived objects.
- bool new_space_high_promotion_mode_active_;
+ // TODO(hpayer): change to bool if no longer accessed from generated code
+ intptr_t new_space_high_promotion_mode_active_;
// Limit that triggers a global GC on the next (normally caused) GC.
This
// is checked when we have already decided to do a GC to help determine
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Thu May 2 09:32:47
2013
+++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Fri May 3 03:36:16
2013
@@ -5838,8 +5838,33 @@
__ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
__ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+ __ j(zero, &skip_write_barrier);
+
__ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
+ __ RecordWriteField(ecx,
+ ConsString::kFirstOffset,
+ eax,
+ ebx,
+ kDontSaveFPRegs);
__ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+ __ RecordWriteField(ecx,
+ ConsString::kSecondOffset,
+ edx,
+ ebx,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
+ __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
+ __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+
+ __ bind(&after_writing);
+
__ mov(eax, ecx);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
@@ -7371,8 +7396,10 @@
{ REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
- // FastNewClosureStub
+ // FastNewClosureStub and StringAddStub::Generate
{ REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
+ // StringAddStub::Generate
+ { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
=======================================
--- /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Fri May 3
03:36:16 2013
@@ -1603,10 +1603,32 @@
Register scratch1,
Register scratch2,
Label* gc_required) {
- // Allocate heap number in new space.
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+
+ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+ j(zero, &allocate_new_space);
+
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags |
PRETENURE_OLD_POINTER_SPACE));
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+
+ bind(&install_map);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
Immediate(isolate()->factory()->cons_ascii_string_map()));
=======================================
--- /branches/bleeding_edge/src/runtime.cc Fri May 3 02:59:50 2013
+++ /branches/bleeding_edge/src/runtime.cc Fri May 3 03:36:16 2013
@@ -234,7 +234,9 @@
constant_properties,
&is_result_from_cache);
- Handle<JSObject> boilerplate =
isolate->factory()->NewJSObjectFromMap(map);
+ Handle<JSObject> boilerplate =
+ isolate->factory()->NewJSObjectFromMap(
+ map, isolate->heap()->GetPretenureMode());
// Normalize the elements of the boilerplate to save space if needed.
if (!should_have_fast_elements) JSObject::NormalizeElements(boilerplate);
@@ -338,8 +340,10 @@
// Create the JSArray.
Handle<JSFunction> constructor(
JSFunction::NativeContextFromLiterals(*literals)->array_function());
- Handle<JSArray> object =
- Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor));
+
+ Handle<JSArray> object = Handle<JSArray>::cast(
+ isolate->factory()->NewJSObject(
+ constructor, isolate->heap()->GetPretenureMode()));
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(elements->get(0))->value());
@@ -6135,7 +6139,8 @@
Object* number = args[0];
RUNTIME_ASSERT(number->IsNumber());
- return isolate->heap()->NumberToString(number, false);
+ return isolate->heap()->NumberToString(
+ number, false, isolate->heap()->GetPretenureMode());
}
=======================================
--- /branches/bleeding_edge/src/serialize.cc Wed Apr 24 00:39:35 2013
+++ /branches/bleeding_edge/src/serialize.cc Fri May 3 03:36:16 2013
@@ -558,6 +558,11 @@
UNCLASSIFIED,
58,
"Runtime::AllocateInOldPointerSpace");
+
Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
+ address(),
+ UNCLASSIFIED,
+ 59,
+ "Heap::NewSpaceAllocationLimitAddress");
// Add a small set of deopt entry addresses to encoder without
generating the
// deopt table code, which isn't possible at deserialization time.
@@ -568,7 +573,7 @@
entry,
Deoptimizer::LAZY,
Deoptimizer::CALCULATE_ENTRY_ADDRESS);
- Add(address, LAZY_DEOPTIMIZATION, 59 + entry, "lazy_deopt");
+ Add(address, LAZY_DEOPTIMIZATION, 60 + entry, "lazy_deopt");
}
}
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Thu May 2 09:32:47
2013
+++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Fri May 3 03:36:16
2013
@@ -4924,8 +4924,34 @@
__ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
__ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ Load(rbx, high_promotion_mode);
+ __ testb(rbx, Immediate(1));
+ __ j(zero, &skip_write_barrier);
+
__ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
+ __ RecordWriteField(rcx,
+ ConsString::kFirstOffset,
+ rax,
+ rbx,
+ kDontSaveFPRegs);
__ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+ __ RecordWriteField(rcx,
+ ConsString::kSecondOffset,
+ rdx,
+ rbx,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
+ __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
+ __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+
+ __ bind(&after_writing);
+
__ movq(rax, rcx);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
@@ -6364,8 +6390,11 @@
{ REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
- // FastNewClosureStub::Generate
+ // FastNewClosureStub::Generate and
+ // StringAddStub::Generate
{ REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
+ // StringAddStub::Generate
+ { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Fri May 3
03:36:16 2013
@@ -4066,9 +4066,33 @@
Register scratch1,
Register scratch2,
Label* gc_required) {
- // Allocate heap number in new space.
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+
+ Load(scratch1, high_promotion_mode);
+ testb(scratch1, Immediate(1));
+ j(zero, &allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags |
PRETENURE_OLD_POINTER_SPACE));
+
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+
+ bind(&install_map);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
--
--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to v8-dev+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/groups/opt_out.