Revision: 4579
Author: [email protected]
Date: Tue May 4 06:23:58 2010
Log: X64: Faster push/pop implementation.
Also snuck in an intended optimization for fast api call preparation and a
few indentation fixes.
Review URL: http://codereview.chromium.org/1689010
http://code.google.com/p/v8/source/detail?r=4579
Modified:
/branches/bleeding_edge/src/ia32/stub-cache-ia32.cc
/branches/bleeding_edge/src/x64/codegen-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.h
/branches/bleeding_edge/src/x64/stub-cache-x64.cc
/branches/bleeding_edge/test/mjsunit/array-pop.js
=======================================
--- /branches/bleeding_edge/src/ia32/stub-cache-ia32.cc Wed Apr 28 07:06:35
2010
+++ /branches/bleeding_edge/src/ia32/stub-cache-ia32.cc Tue May 4 06:23:58
2010
@@ -1179,7 +1179,7 @@
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
- __ add(Operand(eax), Immediate(argc << 1));
+ __ add(Operand(eax), Immediate(Smi::FromInt(argc)));
// Get the element's length into ecx.
__ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
@@ -1232,7 +1232,7 @@
__ j(not_equal, &call_builtin);
__ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize));
__ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
- __ j(greater, &call_builtin);
+ __ j(above, &call_builtin);
// We fit and could grow elements.
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
@@ -1298,7 +1298,7 @@
return Heap::undefined_value();
}
- Label miss, empty_array, call_builtin;
+ Label miss, return_undefined, call_builtin;
// Get the receiver from the stack.
const int argc = arguments().immediate();
@@ -1307,7 +1307,6 @@
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss);
-
CheckPrototypes(JSObject::cast(object), edx,
holder, ebx,
eax, name, &miss);
@@ -1323,7 +1322,7 @@
// Get the array's length into ecx and calculate new length.
__ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
__ sub(Operand(ecx), Immediate(Smi::FromInt(1)));
- __ j(negative, &empty_array);
+ __ j(negative, &return_undefined);
// Get the last element.
STATIC_ASSERT(kSmiTagSize == 1);
@@ -1344,12 +1343,11 @@
Immediate(Factory::the_hole_value()));
__ ret((argc + 1) * kPointerSize);
- __ bind(&empty_array);
+ __ bind(&return_undefined);
__ mov(eax, Immediate(Factory::undefined_value()));
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
-
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
argc + 1,
1);
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc Tue May 4 05:05:55 2010
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc Tue May 4 06:23:58 2010
@@ -8515,6 +8515,7 @@
|| (cc == greater) || (cc == greater_equal));
return (cc == greater || cc == greater_equal) ? LESS : GREATER;
}
+
void CompareStub::Generate(MacroAssembler* masm) {
Label call_builtin, done;
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Tue May 4
04:06:59 2010
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Tue May 4
06:23:58 2010
@@ -101,15 +101,17 @@
// If the bit offset lies beyond the normal remembered set range, it is
in
// the extra remembered set area of a large object.
cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
- j(less, &fast);
+ j(below, &fast);
+
+ // We have a large object containing pointers. It must be a FixedArray.
// Adjust 'page_start' so that addressing using 'pointer_offset' hits the
// extra remembered set after the large object.
// Load the array length into 'scratch'.
movl(scratch,
- Operand(page_start,
- Page::kObjectStartOffset +
FixedArray::kLengthOffset));
+ Operand(page_start,
+ Page::kObjectStartOffset + FixedArray::kLengthOffset));
Register array_length = scratch;
// Extra remembered set starts right after the large object (a
FixedArray), at
@@ -119,9 +121,9 @@
// extra RSet to 'page_start', so that addressing the bit using
// 'pointer_offset' hits the extra RSet words.
lea(page_start,
- Operand(page_start, array_length, times_pointer_size,
- Page::kObjectStartOffset + FixedArray::kHeaderSize
- - Page::kRSetEndOffset));
+ Operand(page_start, array_length, times_pointer_size,
+ Page::kObjectStartOffset + FixedArray::kHeaderSize
+ - Page::kRSetEndOffset));
// NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
// to limit code size. We should probably evaluate this decision by
@@ -130,22 +132,6 @@
bind(&fast);
bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
}
-
-
-void MacroAssembler::InNewSpace(Register object,
- Register scratch,
- Condition cc,
- Label* branch) {
- ASSERT(cc == equal || cc == not_equal);
- if (!scratch.is(object)) {
- movq(scratch, object);
- }
- ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
- and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
- movq(kScratchRegister, ExternalReference::new_space_start());
- cmpq(scratch, kScratchRegister);
- j(cc, branch);
-}
// Set the remembered set bit for [object+offset].
@@ -213,11 +199,11 @@
// We make sure that an offset is inside the right limits whether it is
// tagged or untagged.
if ((offset > 0) && (offset < Page::kMaxHeapObjectSize -
kHeapObjectTag)) {
- // Compute the bit offset in the remembered set, leave it in 'value'.
+ // Compute the bit offset in the remembered set, leave it in 'scratch'.
lea(scratch, Operand(object, offset));
ASSERT(is_int32(Page::kPageAlignmentMask));
and_(scratch,
Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
- shr(scratch, Immediate(kObjectAlignmentBits));
+ shr(scratch, Immediate(kPointerSizeLog2));
// Compute the page address from the heap object pointer, leave it in
// 'object' (immediate value is sign extended).
@@ -236,10 +222,10 @@
// array access: calculate the destination address in the same
manner as
// KeyedStoreIC::GenerateGeneric.
SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
- lea(dst, Operand(object,
- index.reg,
- index.scale,
- FixedArray::kHeaderSize - kHeapObjectTag));
+ lea(dst, FieldOperand(object,
+ index.reg,
+ index.scale,
+ FixedArray::kHeaderSize));
}
// If we are already generating a shared stub, not inlining the
// record write code isn't going to save us any memory.
@@ -261,6 +247,41 @@
movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
}
}
+
+
+void MacroAssembler::InNewSpace(Register object,
+ Register scratch,
+ Condition cc,
+ Label* branch) {
+ if (Serializer::enabled()) {
+ // Can't do arithmetic on external references if it might get
serialized.
+ // The mask isn't really an address. We load it as an external
reference in
+ // case the size of the new space is different between the snapshot
maker
+ // and the running system.
+ if (scratch.is(object)) {
+ movq(kScratchRegister, ExternalReference::new_space_mask());
+ and_(scratch, kScratchRegister);
+ } else {
+ movq(scratch, ExternalReference::new_space_mask());
+ and_(scratch, object);
+ }
+ movq(kScratchRegister, ExternalReference::new_space_start());
+ cmpq(scratch, kScratchRegister);
+ j(cc, branch);
+ } else {
+ ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
+ intptr_t new_space_start =
+ reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
+ movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
+ if (scratch.is(object)) {
+ addq(scratch, kScratchRegister);
+ } else {
+ lea(scratch, Operand(object, kScratchRegister, times_1, 0));
+ }
+ and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
+ j(cc, branch);
+ }
+}
void MacroAssembler::Assert(Condition cc, const char* msg) {
@@ -2161,7 +2182,7 @@
int depth = 0;
if (save_at_depth == depth) {
- movq(Operand(rsp, kPointerSize), reg);
+ movq(Operand(rsp, kPointerSize), object_reg);
}
// Check the maps in the prototype chain.
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.h Tue May 4
04:06:59 2010
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.h Tue May 4
06:23:58 2010
@@ -102,7 +102,6 @@
Register value,
Register scratch);
-
#ifdef ENABLE_DEBUGGER_SUPPORT
//
---------------------------------------------------------------------------
// Debugger Support
=======================================
--- /branches/bleeding_edge/src/x64/stub-cache-x64.cc Thu Apr 29 06:58:39
2010
+++ /branches/bleeding_edge/src/x64/stub-cache-x64.cc Tue May 4 06:23:58
2010
@@ -564,12 +564,14 @@
// -- rsp[0] : return address
// -- rsp[8] : last argument in the internal frame of the caller
// -----------------------------------
- __ pop(scratch);
- __ Push(Smi::FromInt(0));
- __ Push(Smi::FromInt(0));
- __ Push(Smi::FromInt(0));
- __ Push(Smi::FromInt(0));
- __ push(scratch);
+ __ movq(scratch, Operand(rsp, 0));
+ __ subq(rsp, Immediate(4 * kPointerSize));
+ __ movq(Operand(rsp, 0), scratch);
+ __ Move(scratch, Smi::FromInt(0));
+ __ movq(Operand(rsp, 1 * kPointerSize), scratch);
+ __ movq(Operand(rsp, 2 * kPointerSize), scratch);
+ __ movq(Operand(rsp, 3 * kPointerSize), scratch);
+ __ movq(Operand(rsp, 4 * kPointerSize), scratch);
}
@@ -582,9 +584,9 @@
// -- rsp[32] : first fast api call extra argument
// -- rsp[40] : last argument in the internal frame
// -----------------------------------
- __ pop(scratch);
- __ Drop(4);
- __ push(scratch);
+ __ movq(scratch, Operand(rsp, 0));
+ __ movq(Operand(rsp, 4 * kPointerSize), scratch);
+ __ addq(rsp, Immediate(kPointerSize * 4));
}
@@ -853,129 +855,6 @@
#define __ ACCESS_MASM((masm()))
-
-Object* CallStubCompiler::CompileArrayPushCall(Object* object,
- JSObject* holder,
- JSFunction* function,
- String* name,
- CheckType check) {
- // ----------- S t a t e -------------
- // rcx : function name
- // rsp[0] : return address
- // rsp[8] : argument argc
- // rsp[16] : argument argc - 1
- // ...
- // rsp[argc * 8] : argument 1
- // rsp[(argc + 1) * 8] : argument 0 = receiver
- // -----------------------------------
-
- // If object is not an array, bail out to regular call.
- if (!object->IsJSArray()) {
- return Heap::undefined_value();
- }
-
- // TODO(639): faster implementation.
- ASSERT(check == RECEIVER_MAP_CHECK);
-
- Label miss;
-
- // Get the receiver from the stack.
- const int argc = arguments().immediate();
- __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
-
- // Check that the receiver isn't a smi.
- __ JumpIfSmi(rdx, &miss);
-
- // Check that the maps haven't changed.
- CheckPrototypes(JSObject::cast(object), rdx, holder,
- rbx, rax, name, &miss);
-
- // Patch the receiver on the stack with the global proxy if
- // necessary.
- if (object->IsGlobalObject()) {
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
- }
-
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
- argc + 1,
- 1);
-
- // Handle call cache miss.
- __ bind(&miss);
- Handle<Code> ic = ComputeCallMiss(arguments().immediate());
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- String* function_name = NULL;
- if (function->shared()->name()->IsString()) {
- function_name = String::cast(function->shared()->name());
- }
- return GetCode(CONSTANT_FUNCTION, function_name);
-}
-
-
-Object* CallStubCompiler::CompileArrayPopCall(Object* object,
- JSObject* holder,
- JSFunction* function,
- String* name,
- CheckType check) {
- // ----------- S t a t e -------------
- // rcx : function name
- // rsp[0] : return address
- // rsp[8] : argument argc
- // rsp[16] : argument argc - 1
- // ...
- // rsp[argc * 8] : argument 1
- // rsp[(argc + 1) * 8] : argument 0 = receiver
- // -----------------------------------
-
- // If object is not an array, bail out to regular call.
- if (!object->IsJSArray()) {
- return Heap::undefined_value();
- }
-
- // TODO(642): faster implementation.
- ASSERT(check == RECEIVER_MAP_CHECK);
-
- Label miss;
-
- // Get the receiver from the stack.
- const int argc = arguments().immediate();
- __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
-
- // Check that the receiver isn't a smi.
- __ JumpIfSmi(rdx, &miss);
-
- // Check that the maps haven't changed.
- CheckPrototypes(JSObject::cast(object), rdx, holder,
- rbx, rax, name, &miss);
-
- // Patch the receiver on the stack with the global proxy if
- // necessary.
- if (object->IsGlobalObject()) {
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
- }
-
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
- argc + 1,
- 1);
-
- // Handle call cache miss.
- __ bind(&miss);
- Handle<Code> ic = ComputeCallMiss(arguments().immediate());
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- String* function_name = NULL;
- if (function->shared()->name()->IsString()) {
- function_name = String::cast(function->shared()->name());
- }
- return GetCode(CONSTANT_FUNCTION, function_name);
-}
-
-
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
@@ -1188,6 +1067,257 @@
// Return the generated code.
return GetCode(FIELD, name);
}
+
+
+Object* CallStubCompiler::CompileArrayPushCall(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ CheckType check) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
+ // -- ...
+ // -- rsp[(argc + 1) * 8] : receiver
+ // -----------------------------------
+ ASSERT(check == RECEIVER_MAP_CHECK);
+
+ // If object is not an array, bail out to regular call.
+ if (!object->IsJSArray()) {
+ return Heap::undefined_value();
+ }
+
+ Label miss;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ __ JumpIfSmi(rdx, &miss);
+
+ CheckPrototypes(JSObject::cast(object),
+ rdx,
+ holder,
+ rbx,
+ rax,
+ name,
+ &miss);
+
+ if (argc == 0) {
+ // Noop, return the length.
+ __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
+ __ ret((argc + 1) * kPointerSize);
+ } else {
+ // Get the elements array of the object.
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode (not dictionary).
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Factory::fixed_array_map());
+ __ j(not_equal, &miss);
+
+ if (argc == 1) { // Otherwise fall through to call builtin.
+ Label call_builtin, exit, with_rset_update, attempt_to_grow_elements;
+
+ // Get the array's length into rax and calculate new length.
+ __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
+ STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
+ __ SmiAddConstant(rax, rax, Smi::FromInt(argc));
+
+ // Get the element's length into rcx.
+ __ movl(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
+ __ Integer32ToSmi(rcx, rcx);
+
+ // Check if we could survive without allocation.
+ __ SmiCompare(rax, rcx);
+ __ j(greater, &attempt_to_grow_elements);
+
+ // Save new length.
+ __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
+
+ // Push the element.
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
+ SmiIndex index =
+ masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size);
+ __ lea(rdx, FieldOperand(rbx,
+ index.reg, index.scale,
+ FixedArray::kHeaderSize - argc *
kPointerSize));
+ __ movq(Operand(rdx, 0), rcx);
+
+ // Check if value is a smi.
+ __ JumpIfNotSmi(rcx, &with_rset_update);
+
+ __ bind(&exit);
+ __ ret((argc + 1) * kPointerSize);
+
+ __ bind(&with_rset_update);
+
+ __ InNewSpace(rbx, rcx, equal, &exit);
+
+ RecordWriteStub stub(rbx, rdx, rcx);
+ __ CallStub(&stub);
+ __ ret((argc + 1) * kPointerSize);
+
+ __ bind(&attempt_to_grow_elements);
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+ ExternalReference new_space_allocation_limit =
+ ExternalReference::new_space_allocation_limit_address();
+
+ const int kAllocationDelta = 4;
+ // Load top.
+ __ movq(rcx, new_space_allocation_top);
+ __ movq(rcx, Operand(rcx, 0));
+
+ // Check if it's the end of elements.
+ index = masm()->SmiToIndex(kScratchRegister, rax,
times_pointer_size);
+ __ lea(rdx, FieldOperand(rbx,
+ index.reg, index.scale,
+ FixedArray::kHeaderSize - argc *
kPointerSize));
+ __ cmpq(rdx, rcx);
+ __ j(not_equal, &call_builtin);
+ __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
+ __ movq(kScratchRegister, new_space_allocation_limit);
+ __ cmpq(rcx, Operand(kScratchRegister, 0));
+ __ j(above, &call_builtin);
+
+ // We fit and could grow elements.
+ __ movq(kScratchRegister, new_space_allocation_top);
+ __ movq(Operand(kScratchRegister, 0), rcx);
+ __ movq(rcx, Operand(rsp, argc * kPointerSize));
+
+ // Push the argument...
+ __ movq(Operand(rdx, 0), rcx);
+ // ... and fill the rest with holes.
+ __ Move(kScratchRegister, Factory::the_hole_value());
+ for (int i = 1; i < kAllocationDelta; i++) {
+ __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
+ }
+
+ // Restore receiver to rdx as finish sequence assumes it's here.
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Increment element's and array's sizes.
+ __ addq(FieldOperand(rbx, FixedArray::kLengthOffset),
+ Immediate(kAllocationDelta));
+ __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
+
+ // Elements are in new space, so no remembered set updates are
necessary.
+ __ ret((argc + 1) * kPointerSize);
+
+ __ bind(&call_builtin);
+ }
+
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
+ argc + 1,
+ 1);
+ }
+
+ __ bind(&miss);
+
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ String* function_name = NULL;
+ if (function->shared()->name()->IsString()) {
+ function_name = String::cast(function->shared()->name());
+ }
+ return GetCode(CONSTANT_FUNCTION, function_name);
+}
+
+
+Object* CallStubCompiler::CompileArrayPopCall(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ CheckType check) {
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
+ // -----------------------------------
+ ASSERT(check == RECEIVER_MAP_CHECK);
+
+ // If object is not an array, bail out to regular call.
+ if (!object->IsJSArray()) {
+ return Heap::undefined_value();
+ }
+
+ Label miss, return_undefined, call_builtin;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ __ JumpIfSmi(rdx, &miss);
+
+ CheckPrototypes(JSObject::cast(object), rdx,
+ holder, rbx,
+ rax, name, &miss);
+
+ // Get the elements array of the object.
+ __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode (not dictionary).
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Factory::fixed_array_map());
+ __ j(not_equal, &miss);
+
+ // Get the array's length into rcx and calculate new length.
+ __ movq(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
+ __ SmiSubConstant(rcx, rcx, Smi::FromInt(1));
+ __ SmiTest(rcx);
+ __ j(negative, &return_undefined);
+
+ // Get the last element.
+ __ Move(r9, Factory::the_hole_value());
+ SmiIndex index =
+ masm()->SmiToIndex(r8, rcx, times_pointer_size);
+ __ movq(rax, FieldOperand(rbx,
+ index.reg, index.scale,
+ FixedArray::kHeaderSize));
+ // Check if element is already the hole.
+ __ cmpq(rax, r9);
+ __ j(equal, &call_builtin);
+
+ // Set the array's length.
+ __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
+
+ // Fill with the hole and return original value..
+ __ movq(FieldOperand(rbx,
+ index.reg, index.scale,
+ FixedArray::kHeaderSize),
+ r9);
+ __ ret((argc + 1) * kPointerSize);
+
+ __ bind(&return_undefined);
+
+ __ Move(rax, Factory::undefined_value());
+ __ ret((argc + 1) * kPointerSize);
+
+ __ bind(&call_builtin);
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
+ argc + 1,
+ 1);
+ __ bind(&miss);
+
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ String* function_name = NULL;
+ if (function->shared()->name()->IsString()) {
+ function_name = String::cast(function->shared()->name());
+ }
+ return GetCode(CONSTANT_FUNCTION, function_name);
+}
+
+
Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
@@ -2043,8 +2173,13 @@
Label* miss) {
// Check that the maps haven't changed.
Register result =
- __ CheckMaps(object, object_reg, holder, holder_reg, scratch,
- save_at_depth, miss);
+ masm()->CheckMaps(object,
+ object_reg,
+ holder,
+ holder_reg,
+ scratch,
+ save_at_depth,
+ miss);
// If we've skipped any global objects, it's not enough to verify
// that their maps haven't changed. We also need to check that the
=======================================
--- /branches/bleeding_edge/test/mjsunit/array-pop.js Mon Apr 26 08:08:07
2010
+++ /branches/bleeding_edge/test/mjsunit/array-pop.js Tue May 4 06:23:58
2010
@@ -58,6 +58,29 @@
assertEquals(undefined, a.pop(1, 2, 3), "9th pop");
assertEquals(0, a.length, "length 9th pop");
}
+
+ // Check that pop works on inherited properties.
+ for (var i = 0; i < 10 ;i++) { // Ensure ICs are stabilized.
+ Array.prototype[1] = 1;
+ Array.prototype[3] = 3;
+ Array.prototype[5] = 5;
+ Array.prototype[7] = 7;
+ Array.prototype[9] = 9;
+ a = [0,1,2,,4,,6,7,8,,];
+ assertEquals(10, a.length, "inherit-initial-length");
+ for (var j = 9; j >= 0; j--) {
+ assertEquals(j + 1, a.length, "inherit-pre-length-" + j);
+ assertTrue(j in a, "has property " + j);
+ var own = a.hasOwnProperty(j);
+ var inherited = Array.prototype.hasOwnProperty(j);
+ assertEquals(j, a.pop(), "inherit-pop");
+ assertEquals(j, a.length, "inherit-post-length");
+ assertFalse(a.hasOwnProperty(j), "inherit-deleted-own-" + j);
+ assertEquals(inherited, Array.prototype.hasOwnProperty(j),
+ "inherit-not-deleted-inherited" + j);
+ }
+ Array.prototype.length = 0; // Clean-up.
+ }
})();
// Test the case of not JSArray receiver.
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev