Author: [email protected]
Date: Mon Jun 29 01:04:39 2009
New Revision: 2287
Modified:
branches/bleeding_edge/src/objects-inl.h
branches/bleeding_edge/src/x64/builtins-x64.cc
branches/bleeding_edge/src/x64/codegen-x64.cc
branches/bleeding_edge/src/x64/ic-x64.cc
Log:
X64 Implementation: Implement Generate_Function(Call,Apply)
Review URL: http://codereview.chromium.org/147205
Modified: branches/bleeding_edge/src/objects-inl.h
==============================================================================
--- branches/bleeding_edge/src/objects-inl.h (original)
+++ branches/bleeding_edge/src/objects-inl.h Mon Jun 29 01:04:39 2009
@@ -654,6 +654,12 @@
#define WRITE_INT_FIELD(p, offset, value) \
(*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
+#define READ_INTPTR_FIELD(p, offset) \
+ (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
+
+#define WRITE_INTPTR_FIELD(p, offset, value) \
+ (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
+
#define READ_UINT32_FIELD(p, offset) \
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
@@ -2303,12 +2309,12 @@
Address Proxy::proxy() {
- return AddressFrom<Address>(READ_INT_FIELD(this, kProxyOffset));
+ return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
}
void Proxy::set_proxy(Address value) {
- WRITE_INT_FIELD(this, kProxyOffset, OffsetFrom(value));
+ WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
}
Modified: branches/bleeding_edge/src/x64/builtins-x64.cc
==============================================================================
--- branches/bleeding_edge/src/x64/builtins-x64.cc (original)
+++ branches/bleeding_edge/src/x64/builtins-x64.cc Mon Jun 29 01:04:39 2009
@@ -168,14 +168,292 @@
}
-void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
- masm->int3(); // UNIMPLEMENTED.
- masm->movq(kScratchRegister, Immediate(0xBEFA)); // Debugging aid.
+void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
+ // Stack Layout:
+ // rsp: return address
+ // +1: Argument n
+ // +2: Argument n-1
+ // ...
+ // +n: Argument 1 = receiver
+ // +n+1: Argument 0 = function to call
+ //
+ // rax contains the number of arguments, n, not counting the function.
+ //
+ // 1. Make sure we have at least one argument.
+ { Label done;
+ __ testq(rax, rax);
+ __ j(not_zero, &done);
+ __ pop(rbx);
+ __ Push(Factory::undefined_value());
+ __ push(rbx);
+ __ incq(rax);
+ __ bind(&done);
+ }
+
+ // 2. Get the function to call from the stack.
+ { Label done, non_function, function;
+ // The function to call is at position n+1 on the stack.
+ __ movq(rdi, Operand(rsp, rax, times_pointer_size, +1 * kPointerSize));
+ __ testl(rdi, Immediate(kSmiTagMask));
+ __ j(zero, &non_function);
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
+ __ j(equal, &function);
+
+ // Non-function called: Clear the function to force exception.
+ __ bind(&non_function);
+ __ xor_(rdi, rdi);
+ __ jmp(&done);
+
+ // Function called: Change context eagerly to get the right global
object.
+ __ bind(&function);
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ __ bind(&done);
+ }
+
+ // 3. Make sure first argument is an object; convert if necessary.
+ { Label call_to_object, use_global_receiver, patch_receiver, done;
+ __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
+
+ __ testl(rbx, Immediate(kSmiTagMask));
+ __ j(zero, &call_to_object);
+
+ __ Cmp(rbx, Factory::null_value());
+ __ j(equal, &use_global_receiver);
+ __ Cmp(rbx, Factory::undefined_value());
+ __ j(equal, &use_global_receiver);
+
+ __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
+ __ j(below, &call_to_object);
+ __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
+ __ j(below_equal, &done);
+
+ __ bind(&call_to_object);
+ __ EnterInternalFrame(); // preserves rax, rbx, rdi
+
+ // Store the arguments count on the stack (smi tagged).
+ ASSERT(kSmiTag == 0);
+ __ shl(rax, Immediate(kSmiTagSize));
+ __ push(rax);
+
+ __ push(rdi); // save edi across the call
+ __ push(rbx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ movq(rbx, rax);
+ __ pop(rdi); // restore edi after the call
+
+ // Get the arguments count and untag it.
+ __ pop(rax);
+ __ shr(rax, Immediate(kSmiTagSize));
+
+ __ LeaveInternalFrame();
+ __ jmp(&patch_receiver);
+
+ // Use the global receiver object from the called function as the
receiver.
+ __ bind(&use_global_receiver);
+ const int kGlobalIndex =
+ Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
+
+ __ bind(&patch_receiver);
+ __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
+
+ __ bind(&done);
+ }
+
+ // 4. Shift stuff one slot down the stack.
+ { Label loop;
+ __ lea(rcx, Operand(rax, +1)); // +1 ~ copy receiver too
+ __ bind(&loop);
+ __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
+ __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
+ __ decq(rcx);
+ __ j(not_zero, &loop);
+ }
+
+ // 5. Remove TOS (copy of last arguments), but keep return address.
+ __ pop(rbx);
+ __ pop(rcx);
+ __ push(rbx);
+ __ decq(rax);
+
+ // 6. Check that function really was a function and get the code to
+ // call from the function and check that the number of expected
+ // arguments matches what we're providing.
+ { Label invoke, trampoline;
+ __ testq(rdi, rdi);
+ __ j(not_zero, &invoke);
+ __ xor_(rbx, rbx);
+ __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
+ __ bind(&trampoline);
+ __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ RelocInfo::CODE_TARGET);
+
+ __ bind(&invoke);
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movsxlq(rbx,
+ FieldOperand(rdx,
SharedFunctionInfo::kFormalParameterCountOffset));
+ __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
+ __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
+ __ cmpq(rax, rbx);
+ __ j(not_equal, &trampoline);
+ }
+
+ // 7. Jump (tail-call) to the code in register edx without checking
arguments.
+ ParameterCount expected(0);
+ __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
}
-void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
- masm->int3(); // UNIMPLEMENTED.
- masm->movq(kScratchRegister, Immediate(0xBEFC)); // Debugging aid.
+
+void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
+ // Stack at entry:
+ // rsp: return address
+ // rsp+8: arguments
+ // rsp+16: receiver ("this")
+ // rsp+24: function
+ __ EnterInternalFrame();
+ // Stack frame:
+ // rbp: Old base pointer
+ // rbp[1]: return address
+ // rbp[2]: function arguments
+ // rbp[3]: receiver
+ // rbp[4]: function
+ static const int kArgumentsOffset = 2 * kPointerSize;
+ static const int kReceiverOffset = 3 * kPointerSize;
+ static const int kFunctionOffset = 4 * kPointerSize;
+ __ push(Operand(rbp, kFunctionOffset));
+ __ push(Operand(rbp, kArgumentsOffset));
+ __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
+
+ if (FLAG_check_stack) {
+ // We need to catch preemptions right here, otherwise an unlucky
preemption
+ // could show up as a failed apply.
+ Label retry_preemption;
+ Label no_preemption;
+ __ bind(&retry_preemption);
+ ExternalReference stack_guard_limit =
+ ExternalReference::address_of_stack_guard_limit();
+ __ movq(kScratchRegister, stack_guard_limit);
+ __ movq(rcx, rsp);
+ __ subq(rcx, Operand(kScratchRegister, 0));
+ // rcx contains the difference between the stack limit and the stack
top.
+ // We use it below to check that there is enough room for the
arguments.
+ __ j(above, &no_preemption);
+
+ // Preemption!
+ // Because runtime functions always remove the receiver from the
stack, we
+ // have to fake one to avoid underflowing the stack.
+ __ push(rax);
+ __ push(Immediate(Smi::FromInt(0)));
+
+ // Do call to runtime routine.
+ __ CallRuntime(Runtime::kStackGuard, 1);
+ __ pop(rax);
+ __ jmp(&retry_preemption);
+
+ __ bind(&no_preemption);
+
+ Label okay;
+ // Make rdx the space we need for the array when it is unrolled onto
the
+ // stack.
+ __ movq(rdx, rax);
+ __ shl(rdx, Immediate(kPointerSizeLog2 - kSmiTagSize));
+ __ cmpq(rcx, rdx);
+ __ j(greater, &okay);
+
+ // Too bad: Out of stack space.
+ __ push(Operand(rbp, kFunctionOffset));
+ __ push(rax);
+ __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+ __ bind(&okay);
+ }
+
+ // Push current index and limit.
+ const int kLimitOffset =
+ StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
+ const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
+ __ push(rax); // limit
+ __ push(Immediate(0)); // index
+
+ // Change context eagerly to get the right global object if
+ // necessary.
+ __ movq(rdi, Operand(rbp, kFunctionOffset));
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Compute the receiver.
+ Label call_to_object, use_global_receiver, push_receiver;
+ __ movq(rbx, Operand(rbp, kReceiverOffset));
+ __ testl(rbx, Immediate(kSmiTagMask));
+ __ j(zero, &call_to_object);
+ __ Cmp(rbx, Factory::null_value());
+ __ j(equal, &use_global_receiver);
+ __ Cmp(rbx, Factory::undefined_value());
+ __ j(equal, &use_global_receiver);
+
+ // If given receiver is already a JavaScript object then there's no
+ // reason for converting it.
+ __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
+ __ j(less, &call_to_object);
+ __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
+ __ j(less_equal, &push_receiver);
+
+ // Convert the receiver to an object.
+ __ bind(&call_to_object);
+ __ push(rbx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ movq(rbx, rax);
+ __ jmp(&push_receiver);
+
+ // Use the current global receiver object as the receiver.
+ __ bind(&use_global_receiver);
+ const int kGlobalOffset =
+ Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
+
+ // Push the receiver.
+ __ bind(&push_receiver);
+ __ push(rbx);
+
+ // Copy all arguments from the array to the stack.
+ Label entry, loop;
+ __ movq(rax, Operand(rbp, kIndexOffset));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(rcx, Operand(rbp, kArgumentsOffset)); // load arguments
+ __ push(rcx);
+ __ push(rax);
+
+ // Use inline caching to speed up access to arguments.
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ __ Call(ic, RelocInfo::CODE_TARGET);
+ // It is important that we do not have a test instruction after the
+ // call. A test instruction after the call is used to indicate that
+ // we have generated an inline version of the keyed load. In this
+ // case, we know that we are not generating a test instruction next.
+
+ // Remove IC arguments from the stack and push the nth argument.
+ __ addq(rsp, Immediate(2 * kPointerSize));
+ __ push(rax);
+
+ // Update the index on the stack and in register rax.
+ __ movq(rax, Operand(rbp, kIndexOffset));
+ __ addq(rax, Immediate(Smi::FromInt(1)));
+ __ movq(Operand(rbp, kIndexOffset), rax);
+
+ __ bind(&entry);
+ __ cmpq(rax, Operand(rbp, kLimitOffset));
+ __ j(not_equal, &loop);
+
+ // Invoke the function.
+ ParameterCount actual(rax);
+ __ shr(rax, Immediate(kSmiTagSize));
+ __ movq(rdi, Operand(rbp, kFunctionOffset));
+ __ InvokeFunction(rdi, actual, CALL_FUNCTION);
+
+ __ LeaveInternalFrame();
+ __ ret(3 * kPointerSize); // remove function, receiver, and arguments
}
Modified: branches/bleeding_edge/src/x64/codegen-x64.cc
==============================================================================
--- branches/bleeding_edge/src/x64/codegen-x64.cc (original)
+++ branches/bleeding_edge/src/x64/codegen-x64.cc Mon Jun 29 01:04:39 2009
@@ -2301,7 +2301,8 @@
// the target, with an implicit promise that it will be written to
again
// before it is read.
// TODO(X64): Implement TakeValue optimization.
- if (false && literal != NULL || (right_var != NULL && right_var !=
var)) {
+ if (false) {
+ // if (literal != NULL || (right_var != NULL && right_var != var))
{
// target.TakeValue(NOT_INSIDE_TYPEOF);
} else {
target.GetValue(NOT_INSIDE_TYPEOF);
@@ -3342,7 +3343,9 @@
void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* a) {
- UNIMPLEMENTED();
+ // TODO(X64): Implement this function.
+ // Ignore arguments and return undefined, to signal failure.
+ frame_->Push(Factory::undefined_value());
}
@@ -3371,10 +3374,27 @@
}
-void CodeGenerator::GenerateLog(ZoneList<Expression*>* a) {
- UNIMPLEMENTED();
+void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
+ // Conditionally generate a log call.
+ // Args:
+ // 0 (literal string): The type of logging (corresponds to the flags).
+ // This is used to determine whether or not to generate the log call.
+ // 1 (string): Format string. Access the string at argument index 2
+ // with '%2s' (see Logger::LogRuntime for all the formats).
+ // 2 (array): Arguments to the format string.
+ ASSERT_EQ(args->length(), 3);
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (ShouldGenerateLog(args->at(0))) {
+ Load(args->at(1));
+ Load(args->at(2));
+ frame_->CallRuntime(Runtime::kLog, 2);
+ }
+#endif
+ // Finally, we're expected to leave a value on the top of the stack.
+ frame_->Push(Factory::undefined_value());
}
+
void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
@@ -3446,8 +3466,28 @@
void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
- UNIMPLEMENTED();
+ ASSERT(args->length() == 1);
+ JumpTarget leave;
+ Load(args->at(0)); // Load the object.
+ frame_->Dup();
+ Result object = frame_->Pop();
+ object.ToRegister();
+ ASSERT(object.is_valid());
+ // if (object->IsSmi()) return object.
+ __ testl(object.reg(), Immediate(kSmiTagMask));
+ leave.Branch(zero);
+ // It is a heap object - get map.
+ Result temp = allocator()->Allocate();
+ ASSERT(temp.is_valid());
+ // if (!object->IsJSValue()) return object.
+ __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
+ leave.Branch(not_equal);
+ __ movq(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
+ object.Unuse();
+ frame_->SetElementAt(0, &temp);
+ leave.Bind();
}
+
//
-----------------------------------------------------------------------------
// CodeGenerator implementation of Expressions
Modified: branches/bleeding_edge/src/x64/ic-x64.cc
==============================================================================
--- branches/bleeding_edge/src/x64/ic-x64.cc (original)
+++ branches/bleeding_edge/src/x64/ic-x64.cc Mon Jun 29 01:04:39 2009
@@ -79,10 +79,12 @@
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC0AB)); // Debugging aid.
}
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC1AB)); // Debugging aid.
}
bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
@@ -162,10 +164,12 @@
void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC2AB)); // Debugging aid.
}
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC3AB)); // Debugging aid.
}
Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
@@ -265,26 +269,32 @@
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC4AB)); // Debugging aid.
}
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC5AB)); // Debugging aid.
}
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC6AB)); // Debugging aid.
}
void LoadIC::GenerateMiss(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC7AB)); // Debugging aid.
}
void LoadIC::GenerateNormal(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC8AB)); // Debugging aid.
}
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xC9AB)); // Debugging aid.
}
bool LoadIC::PatchInlinedLoad(Address address, Object* map, int index) {
@@ -312,10 +322,12 @@
void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xCAAB)); // Debugging aid.
}
void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
+ masm->movq(kScratchRegister, Immediate(0xCBAB)); // Debugging aid.
}
--~--~---------~--~----~------------~-------~--~----~
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
-~----------~----~----~----~------~----~------~--~---