Revision: 4689
Author: [email protected]
Date: Thu May 20 07:33:02 2010
Log: Reapply r4686: Complete version of full codegen for x64.
Already reviewed at: http://codereview.chromium.org/2078022/show
[email protected]
Review URL: http://codereview.chromium.org/2137008
http://code.google.com/p/v8/source/detail?r=4689
Modified:
/branches/bleeding_edge/src/compiler.cc
/branches/bleeding_edge/src/flag-definitions.h
/branches/bleeding_edge/src/ia32/codegen-ia32.cc
/branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
/branches/bleeding_edge/src/x64/codegen-x64.cc
/branches/bleeding_edge/src/x64/full-codegen-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.h
/branches/bleeding_edge/test/cctest/test-log-stack-tracer.cc
=======================================
--- /branches/bleeding_edge/src/compiler.cc Thu May 20 07:02:51 2010
+++ /branches/bleeding_edge/src/compiler.cc Thu May 20 07:33:02 2010
@@ -121,7 +121,7 @@
: (shared->is_toplevel() || shared->try_full_codegen());
bool force_full_compiler = false;
-#ifdef V8_TARGET_ARCH_IA32
+#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64)
// On ia32 the full compiler can compile all code whereas the other
platforms
// the constructs supported is checked by the associated syntax checker.
When
// --always-full-compiler is used on ia32 the syntax checker is still in
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Thu May 20 07:02:51 2010
+++ /branches/bleeding_edge/src/flag-definitions.h Thu May 20 07:33:02 2010
@@ -149,7 +149,7 @@
DEFINE_bool(fast_compiler, false, "enable speculative optimizing backend")
DEFINE_bool(always_full_compiler, false,
"try to use the dedicated run-once backend for all code")
-#ifdef V8_TARGET_ARCH_IA32
+#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64)
DEFINE_bool(force_full_compiler, false,
"force use of the dedicated run-once backend for all code")
#endif
=======================================
--- /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu May 20 07:02:51
2010
+++ /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu May 20 07:33:02
2010
@@ -6165,11 +6165,11 @@
__ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
__ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
__ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
- destination()->false_target()->Branch(less);
+ destination()->false_target()->Branch(below);
__ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
obj.Unuse();
map.Unuse();
- destination()->Split(less_equal);
+ destination()->Split(below_equal);
}
@@ -6282,7 +6282,7 @@
__ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
__ movzx_b(tmp.reg(), FieldOperand(obj.reg(),
Map::kInstanceTypeOffset));
__ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
- null.Branch(less);
+ null.Branch(below);
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
@@ -6872,7 +6872,7 @@
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
- deferred->Branch(less);
+ deferred->Branch(below);
__ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset));
__ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(not_zero);
@@ -8192,11 +8192,11 @@
__ mov(map.reg(), FieldOperand(answer.reg(),
HeapObject::kMapOffset));
__ movzx_b(map.reg(), FieldOperand(map.reg(),
Map::kInstanceTypeOffset));
__ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
- destination()->false_target()->Branch(less);
+ destination()->false_target()->Branch(below);
__ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
answer.Unuse();
map.Unuse();
- destination()->Split(less_equal);
+ destination()->Split(below_equal);
} else {
// Uncommon case: typeof testing against a string literal that is
// never returned from the typeof operator.
@@ -11602,7 +11602,7 @@
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Label first_non_object;
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
- __ j(less, &first_non_object);
+ __ j(below, &first_non_object);
// Return non-zero (eax is not zero)
Label return_not_equal;
@@ -11619,7 +11619,7 @@
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
- __ j(greater_equal, &return_not_equal);
+ __ j(above_equal, &return_not_equal);
// Check for oddballs: true, false, null, undefined.
__ cmp(ecx, ODDBALL_TYPE);
@@ -12267,9 +12267,9 @@
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object
map
__ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx -
type
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
- __ j(less, &slow, not_taken);
+ __ j(below, &slow, not_taken);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
- __ j(greater, &slow, not_taken);
+ __ j(above, &slow, not_taken);
// Get the prototype of the function.
__ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
@@ -12297,9 +12297,9 @@
__ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
- __ j(less, &slow, not_taken);
+ __ j(below, &slow, not_taken);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
- __ j(greater, &slow, not_taken);
+ __ j(above, &slow, not_taken);
// Register mapping:
// eax is object map.
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu May 20
07:02:51 2010
+++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu May 20
07:33:02 2010
@@ -806,8 +806,8 @@
__ Check(equal, "Unexpected declaration in current context.");
}
if (mode == Variable::CONST) {
- __ mov(eax, Immediate(Factory::the_hole_value()));
- __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
+ __ mov(CodeGenerator::ContextOperand(esi, slot->index()),
+ Immediate(Factory::the_hole_value()));
// No write barrier since the hole value is in old space.
} else if (function != NULL) {
VisitForValue(function, kAccumulator);
@@ -823,10 +823,8 @@
__ push(esi);
__ push(Immediate(variable->name()));
// Declaration nodes are always introduced in one of two modes.
- ASSERT(mode == Variable::VAR ||
- mode == Variable::CONST);
- PropertyAttributes attr =
- (mode == Variable::VAR) ? NONE : READ_ONLY;
+ ASSERT(mode == Variable::VAR || mode == Variable::CONST);
+ PropertyAttributes attr = (mode == Variable::VAR) ? NONE :
READ_ONLY;
__ push(Immediate(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -1070,8 +1068,8 @@
__ StackLimitCheck(&stack_limit_hit);
__ bind(&stack_check_done);
- // Generate code for the going to the next element by incrementing
- // the index (smi) stored on top of the stack.
+ // Generate code for going to the next element by incrementing the
+ // index (smi) stored on top of the stack.
__ bind(loop_statement.continue_target());
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
__ jmp(&loop);
@@ -2033,9 +2031,9 @@
__ j(not_zero, if_false);
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
- __ j(less, if_false);
+ __ j(below, if_false);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
- __ j(less_equal, if_true);
+ __ j(below_equal, if_true);
__ jmp(if_false);
Apply(context_, if_true, if_false);
@@ -2227,7 +2225,7 @@
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(eax, Map::kInstanceTypeOffset));
__ cmp(ebx, FIRST_JS_OBJECT_TYPE);
- __ j(less, &null);
+ __ j(below, &null);
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc Thu May 20 07:02:51 2010
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc Thu May 20 07:33:02 2010
@@ -3842,11 +3842,13 @@
__ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
destination()->false_target()->Branch(not_zero);
- __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
- destination()->false_target()->Branch(less);
- __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
+ __ movzxbq(kScratchRegister,
+ FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
+ __ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
+ destination()->false_target()->Branch(below);
+ __ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
obj.Unuse();
- destination()->Split(less_equal);
+ destination()->Split(below_equal);
}
@@ -4338,7 +4340,7 @@
__ PrepareCallCFunction(0);
__ CallCFunction(ExternalReference::random_uint32_function(), 0);
- // Convert 32 random bits in eax to 0.(32 random bits) in a double
+ // Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
__ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu May 20 07:02:51
2010
+++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu May 20 07:33:02
2010
@@ -81,11 +81,17 @@
bool function_in_register = true;
// Possibly allocate a local context.
- if (scope()->num_heap_slots() > 0) {
+ int heap_slots = scope()->num_heap_slots() -
Context::MIN_CONTEXT_SLOTS;
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is still in rdi.
__ push(rdi);
- __ CallRuntime(Runtime::kNewContext, 1);
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub(heap_slots);
+ __ CallStub(&stub);
+ } else {
+ __ CallRuntime(Runtime::kNewContext, 1);
+ }
function_in_register = false;
// Context is returned in both rax and rsi. It replaces the context
// passed to us. It's saved in the stack and kept live in rsi.
@@ -145,7 +151,18 @@
}
{ Comment cmnt(masm_, "[ Declarations");
- VisitDeclarations(scope()->declarations());
+ // For named function expressions, declare the function name as a
+ // constant.
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ }
+ // Visit all the explicit declarations unless there is an illegal
+ // redeclaration.
+ if (scope()->HasIllegalRedeclaration()) {
+ scope()->VisitIllegalRedeclaration(this);
+ } else {
+ VisitDeclarations(scope()->declarations());
+ }
}
{ Comment cmnt(masm_, "[ Stack check");
@@ -427,6 +444,39 @@
break;
}
}
+
+
+void FullCodeGenerator::PrepareTest(Label* materialize_true,
+ Label* materialize_false,
+ Label** if_true,
+ Label** if_false) {
+ switch (context_) {
+ case Expression::kUninitialized:
+ UNREACHABLE();
+ break;
+ case Expression::kEffect:
+ // In an effect context, the true and the false case branch to the
+ // same label.
+ *if_true = *if_false = materialize_true;
+ break;
+ case Expression::kValue:
+ *if_true = materialize_true;
+ *if_false = materialize_false;
+ break;
+ case Expression::kTest:
+ *if_true = true_label_;
+ *if_false = false_label_;
+ break;
+ case Expression::kValueTest:
+ *if_true = materialize_true;
+ *if_false = false_label_;
+ break;
+ case Expression::kTestValue:
+ *if_true = true_label_;
+ *if_false = materialize_false;
+ break;
+ }
+}
void FullCodeGenerator::Apply(Expression::Context context,
@@ -492,6 +542,61 @@
break;
}
}
+
+
+// Convert constant control flow (true or false) to the result expected for
+// a given expression context.
+void FullCodeGenerator::Apply(Expression::Context context, bool flag) {
+ switch (context) {
+ case Expression::kUninitialized:
+ UNREACHABLE();
+ break;
+ case Expression::kEffect:
+ break;
+ case Expression::kValue: {
+ Heap::RootListIndex value_root_index =
+ flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
+ switch (location_) {
+ case kAccumulator:
+ __ LoadRoot(result_register(), value_root_index);
+ break;
+ case kStack:
+ __ PushRoot(value_root_index);
+ break;
+ }
+ break;
+ }
+ case Expression::kTest:
+ __ jmp(flag ? true_label_ : false_label_);
+ break;
+ case Expression::kTestValue:
+ switch (location_) {
+ case kAccumulator:
+ // If value is false it's needed.
+ if (!flag) __ LoadRoot(result_register(),
Heap::kFalseValueRootIndex);
+ break;
+ case kStack:
+ // If value is false it's needed.
+ if (!flag) __ PushRoot(Heap::kFalseValueRootIndex);
+ break;
+ }
+ __ jmp(flag ? true_label_ : false_label_);
+ break;
+ case Expression::kValueTest:
+ switch (location_) {
+ case kAccumulator:
+ // If value is true it's needed.
+ if (flag) __ LoadRoot(result_register(),
Heap::kTrueValueRootIndex);
+ break;
+ case kStack:
+ // If value is true it's needed.
+ if (flag) __ PushRoot(Heap::kTrueValueRootIndex);
+ break;
+ }
+ __ jmp(flag ? true_label_ : false_label_);
+ break;
+ }
+}
void FullCodeGenerator::DoTest(Expression::Context context) {
@@ -669,22 +774,23 @@
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
+void FullCodeGenerator::EmitDeclaration(Variable* variable,
+ Variable::Mode mode,
+ FunctionLiteral* function) {
Comment cmnt(masm_, "[ Declaration");
- Variable* var = decl->proxy()->var();
- ASSERT(var != NULL); // Must have been resolved.
- Slot* slot = var->slot();
- Property* prop = var->AsProperty();
+ ASSERT(variable != NULL); // Must have been resolved.
+ Slot* slot = variable->slot();
+ Property* prop = variable->AsProperty();
if (slot != NULL) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
- if (decl->mode() == Variable::CONST) {
+ if (mode == Variable::CONST) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
- } else if (decl->fun() != NULL) {
- VisitForValue(decl->fun(), kAccumulator);
+ } else if (function != NULL) {
+ VisitForValue(function, kAccumulator);
__ movq(Operand(rbp, SlotOffset(slot)), result_register());
}
break;
@@ -694,7 +800,7 @@
// this specific context.
// The variable in the decl always resides in the current context.
- ASSERT_EQ(0, scope()->ContextChainLength(var->scope()));
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) {
// Check if we have the correct context pointer.
__ movq(rbx,
@@ -702,13 +808,13 @@
__ cmpq(rbx, rsi);
__ Check(equal, "Unexpected declaration in current context.");
}
- if (decl->mode() == Variable::CONST) {
+ if (mode == Variable::CONST) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(CodeGenerator::ContextOperand(rsi, slot->index()),
kScratchRegister);
// No write barrier since the hole value is in old space.
- } else if (decl->fun() != NULL) {
- VisitForValue(decl->fun(), kAccumulator);
+ } else if (function != NULL) {
+ VisitForValue(function, kAccumulator);
__ movq(CodeGenerator::ContextOperand(rsi, slot->index()),
result_register());
int offset = Context::SlotOffset(slot->index());
@@ -719,21 +825,19 @@
case Slot::LOOKUP: {
__ push(rsi);
- __ Push(var->name());
+ __ Push(variable->name());
// Declaration nodes are always introduced in one of two modes.
- ASSERT(decl->mode() == Variable::VAR ||
- decl->mode() == Variable::CONST);
- PropertyAttributes attr =
- (decl->mode() == Variable::VAR) ? NONE : READ_ONLY;
+ ASSERT(mode == Variable::VAR || mode == Variable::CONST);
+ PropertyAttributes attr = (mode == Variable::VAR) ? NONE :
READ_ONLY;
__ Push(Smi::FromInt(attr));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (decl->mode() == Variable::CONST) {
+ if (mode == Variable::CONST) {
__ PushRoot(Heap::kTheHoleValueRootIndex);
- } else if (decl->fun() != NULL) {
- VisitForValue(decl->fun(), kStack);
+ } else if (function != NULL) {
+ VisitForValue(function, kStack);
} else {
__ Push(Smi::FromInt(0)); // no initial value!
}
@@ -743,14 +847,14 @@
}
} else if (prop != NULL) {
- if (decl->fun() != NULL || decl->mode() == Variable::CONST) {
+ if (function != NULL || mode == Variable::CONST) {
// We are declaring a function or constant that rewrites to a
// property. Use (keyed) IC to set the initial value.
VisitForValue(prop->obj(), kStack);
VisitForValue(prop->key(), kStack);
- if (decl->fun() != NULL) {
- VisitForValue(decl->fun(), kAccumulator);
+ if (function != NULL) {
+ VisitForValue(function, kAccumulator);
} else {
__ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
}
@@ -767,6 +871,11 @@
}
}
}
+
+
+void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
+ EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
+}
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
@@ -780,12 +889,210 @@
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
- UNREACHABLE();
+ Comment cmnt(masm_, "[ SwitchStatement");
+ Breakable nested_statement(this, stmt);
+ SetStatementPosition(stmt);
+ // Keep the switch value on the stack until a case matches.
+ VisitForValue(stmt->tag(), kStack);
+
+ ZoneList<CaseClause*>* clauses = stmt->cases();
+ CaseClause* default_clause = NULL; // Can occur anywhere in the list.
+
+ Label next_test; // Recycled for each test.
+ // Compile all the tests with branches to their bodies.
+ for (int i = 0; i < clauses->length(); i++) {
+ CaseClause* clause = clauses->at(i);
+ // The default is not a test, but remember it as final fall through.
+ if (clause->is_default()) {
+ default_clause = clause;
+ continue;
+ }
+
+ Comment cmnt(masm_, "[ Case comparison");
+ __ bind(&next_test);
+ next_test.Unuse();
+
+ // Compile the label expression.
+ VisitForValue(clause->label(), kAccumulator);
+
+ // Perform the comparison as if via '==='. The comparison stub expects
+ // the smi vs. smi case to be handled before it is called.
+ Label slow_case;
+ __ movq(rdx, Operand(rsp, 0)); // Switch value.
+ __ JumpIfNotBothSmi(rdx, rax, &slow_case);
+ __ SmiCompare(rdx, rax);
+ __ j(not_equal, &next_test);
+ __ Drop(1); // Switch value is no longer needed.
+ __ jmp(clause->body_target()->entry_label());
+
+ __ bind(&slow_case);
+ CompareStub stub(equal, true);
+ __ CallStub(&stub);
+ __ testq(rax, rax);
+ __ j(not_equal, &next_test);
+ __ Drop(1); // Switch value is no longer needed.
+ __ jmp(clause->body_target()->entry_label());
+ }
+
+ // Discard the test value and jump to the default if present, otherwise
to
+ // the end of the statement.
+ __ bind(&next_test);
+ __ Drop(1); // Switch value is no longer needed.
+ if (default_clause == NULL) {
+ __ jmp(nested_statement.break_target());
+ } else {
+ __ jmp(default_clause->body_target()->entry_label());
+ }
+
+ // Compile all the case bodies.
+ for (int i = 0; i < clauses->length(); i++) {
+ Comment cmnt(masm_, "[ Case body");
+ CaseClause* clause = clauses->at(i);
+ __ bind(clause->body_target()->entry_label());
+ VisitStatements(clause->statements());
+ }
+
+ __ bind(nested_statement.break_target());
}
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
- UNREACHABLE();
+ Comment cmnt(masm_, "[ ForInStatement");
+ SetStatementPosition(stmt);
+
+ Label loop, exit;
+ ForIn loop_statement(this, stmt);
+ increment_loop_depth();
+
+ // Get the object to enumerate over. Both SpiderMonkey and JSC
+ // ignore null and undefined in contrast to the specification; see
+ // ECMA-262 section 12.6.4.
+ VisitForValue(stmt->enumerable(), kAccumulator);
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
+ __ j(equal, &exit);
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ __ j(equal, &exit);
+
+ // Convert the object to a JS object.
+ Label convert, done_convert;
+ __ JumpIfSmi(rax, &convert);
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
+ __ j(above_equal, &done_convert);
+ __ bind(&convert);
+ __ push(rax);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ bind(&done_convert);
+ __ push(rax);
+
+ // TODO(kasperl): Check cache validity in generated code. This is a
+ // fast case for the JSObject::IsSimpleEnum cache validity
+ // checks. If we cannot guarantee cache validity, call the runtime
+ // system to check cache validity or get the property names in a
+ // fixed array.
+
+ // Get the set of properties to enumerate.
+ __ push(rax); // Duplicate the enumerable object on the stack.
+ __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
+
+ // If we got a map from the runtime call, we can do a fast
+ // modification check. Otherwise, we got a fixed array, and we have
+ // to do a slow check.
+ Label fixed_array;
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kMetaMapRootIndex);
+ __ j(not_equal, &fixed_array);
+
+ // We got a map in register rax. Get the enumeration cache from it.
+ __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
+ __ movq(rcx, FieldOperand(rcx,
DescriptorArray::kEnumerationIndexOffset));
+ __ movq(rdx, FieldOperand(rcx,
DescriptorArray::kEnumCacheBridgeCacheOffset));
+
+ // Setup the four remaining stack slots.
+ __ push(rax); // Map.
+ __ push(rdx); // Enumeration cache.
+ __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
+ __ Integer32ToSmi(rax, rax);
+ __ push(rax); // Enumeration cache length (as smi).
+ __ Push(Smi::FromInt(0)); // Initial index.
+ __ jmp(&loop);
+
+ // We got a fixed array in register rax. Iterate through that.
+ __ bind(&fixed_array);
+ __ Push(Smi::FromInt(0)); // Map (0) - force slow check.
+ __ push(rax);
+ __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
+ __ Integer32ToSmi(rax, rax);
+ __ push(rax); // Fixed array length (as smi).
+ __ Push(Smi::FromInt(0)); // Initial index.
+
+ // Generate code for doing the condition check.
+ __ bind(&loop);
+ __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
+ __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array
length.
+ __ j(above_equal, loop_statement.break_target());
+
+ // Get the current entry of the array into register rbx.
+ __ movq(rbx, Operand(rsp, 2 * kPointerSize));
+ SmiIndex index = __ SmiToIndex(rax, rax, kPointerSizeLog2);
+ __ movq(rbx, FieldOperand(rbx,
+ index.reg,
+ index.scale,
+ FixedArray::kHeaderSize));
+
+ // Get the expected map from the stack or a zero map in the
+ // permanent slow case into register rdx.
+ __ movq(rdx, Operand(rsp, 3 * kPointerSize));
+
+ // Check if the expected map still matches that of the enumerable.
+ // If not, we have to filter the key.
+ Label update_each;
+ __ movq(rcx, Operand(rsp, 4 * kPointerSize));
+ __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
+ __ j(equal, &update_each);
+
+ // Convert the entry to a string or null if it isn't a property
+ // anymore. If the property has been removed while iterating, we
+ // just skip it.
+ __ push(rcx); // Enumerable.
+ __ push(rbx); // Current entry.
+ __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ __ j(equal, loop_statement.continue_target());
+ __ movq(rbx, rax);
+
+ // Update the 'each' property or variable from the possibly filtered
+ // entry in register rbx.
+ __ bind(&update_each);
+ __ movq(result_register(), rbx);
+ // Perform the assignment as if via '='.
+ EmitAssignment(stmt->each());
+
+ // Generate code for the body of the loop.
+ Label stack_limit_hit, stack_check_done;
+ Visit(stmt->body());
+
+ __ StackLimitCheck(&stack_limit_hit);
+ __ bind(&stack_check_done);
+
+ // Generate code for going to the next element by incrementing the
+ // index (smi) stored on top of the stack.
+ __ bind(loop_statement.continue_target());
+ __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
+ __ jmp(&loop);
+
+ // Slow case for the stack limit check.
+ StackCheckStub stack_check_stub;
+ __ bind(&stack_limit_hit);
+ __ CallStub(&stack_check_stub);
+ __ jmp(&stack_check_done);
+
+ // Remove the pointers stored on the stack.
+ __ bind(loop_statement.break_target());
+ __ addq(rsp, Immediate(5 * kPointerSize));
+
+ // Exit and decrement the loop depth.
+ __ bind(&exit);
+ decrement_loop_depth();
}
@@ -844,7 +1151,20 @@
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
- Apply(context, slot);
+ if (var->mode() == Variable::CONST) {
+ // Constants may be the hole value if they have not been initialized.
+ // Unhole them.
+ Label done;
+ MemOperand slot_operand = EmitSlotSearch(slot, rax);
+ __ movq(rax, slot_operand);
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &done);
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ __ bind(&done);
+ Apply(context, rax);
+ } else {
+ Apply(context, slot);
+ }
} else {
Comment cmnt(masm_, "Rewritten parameter");
@@ -980,22 +1300,28 @@
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Comment cmnt(masm_, "[ ArrayLiteral");
+
+ ZoneList<Expression*>* subexprs = expr->values();
+ int length = subexprs->length();
+
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->constant_elements());
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
- } else {
+ } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
+ } else {
+ FastCloneShallowArrayStub stub(length);
+ __ CallStub(&stub);
}
bool result_saved = false; // Is the result saved to the stack?
// Emit code to evaluate all the non-constant subexpressions and to store
// them into the newly cloned array.
- ZoneList<Expression*>* subexprs = expr->values();
- for (int i = 0, len = subexprs->length(); i < len; i++) {
+ for (int i = 0; i < length; i++) {
Expression* subexpr = subexprs->at(i);
// If the subexpression is a literal or a simple materialized literal
it
// is already set in the cloned array.
@@ -1155,6 +1481,57 @@
__ CallStub(&stub);
Apply(context, rax);
}
+
+
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
+ // Invalid left-hand sides are rewritten to have a 'throw
+ // ReferenceError' on the left-hand side.
+ if (!expr->IsValidLeftHandSide()) {
+ VisitForEffect(expr);
+ return;
+ }
+
+ // Left-hand side can only be a property, a global or a (parameter or
local)
+ // slot. Variables with rewrite to .arguments are treated as
KEYED_PROPERTY.
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
+ LhsKind assign_type = VARIABLE;
+ Property* prop = expr->AsProperty();
+ if (prop != NULL) {
+ assign_type = (prop->key()->IsPropertyName())
+ ? NAMED_PROPERTY
+ : KEYED_PROPERTY;
+ }
+
+ switch (assign_type) {
+ case VARIABLE: {
+ Variable* var = expr->AsVariableProxy()->var();
+ EmitVariableAssignment(var, Token::ASSIGN, Expression::kEffect);
+ break;
+ }
+ case NAMED_PROPERTY: {
+ __ push(rax); // Preserve value.
+ VisitForValue(prop->obj(), kAccumulator);
+ __ movq(rdx, rax);
+ __ pop(rax); // Restore value.
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ __ nop(); // Signal no inlined code.
+ break;
+ }
+ case KEYED_PROPERTY: {
+ __ push(rax); // Preserve value.
+ VisitForValue(prop->obj(), kStack);
+ VisitForValue(prop->key(), kStack);
+ __ movq(rax, Operand(rsp, 2 * kPointerSize));
+ Handle<Code>
ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ __ nop(); // Signal no inlined code.
+ __ Drop(3); // Receiver, key, and extra copy of value.
+ break;
+ }
+ }
+}
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
@@ -1187,7 +1564,7 @@
if (op == Token::INIT_CONST) {
// Detect const reinitialization by checking for the hole value.
__ movq(rdx, Operand(rbp, SlotOffset(slot)));
- __ Cmp(rdx, Factory::the_hole_value());
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &done);
}
// Perform the assignment.
@@ -1199,7 +1576,7 @@
if (op == Token::INIT_CONST) {
// Detect const reinitialization by checking for the hole value.
__ movq(rdx, target);
- __ Cmp(rdx, Factory::the_hole_value());
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &done);
}
// Perform the assignment and issue the write barrier.
@@ -1362,7 +1739,8 @@
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -1377,8 +1755,47 @@
Variable* var = fun->AsVariableProxy()->AsVariable();
if (var != NULL && var->is_possibly_eval()) {
- // Call to the identifier 'eval'.
- UNREACHABLE();
+ // In a call to eval, we first call %ResolvePossiblyDirectEval to
+ // resolve the function we need to call and the receiver of the
+ // call. The we call the resolved function using the given
+ // arguments.
+ VisitForValue(fun, kStack);
+ __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver
slot.
+
+ // Push the arguments.
+ ZoneList<Expression*>* args = expr->arguments();
+ int arg_count = args->length();
+ for (int i = 0; i < arg_count; i++) {
+ VisitForValue(args->at(i), kStack);
+ }
+
+ // Push copy of the function - found below the arguments.
+ __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
+
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ push(Operand(rsp, arg_count * kPointerSize));
+ } else {
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
+ }
+
+ // Push the receiver of the enclosing function and do runtime call.
+ __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
+ __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
+
+ // The runtime call returns a pair of values in rax (function) and
+ // rdx (receiver). Touch up the stack with the right values.
+ __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
+ __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
+
+ // Record source position for debugger.
+ SetSourcePosition(expr->position());
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
+ __ CallStub(&stub);
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ DropAndApply(1, context_, rax);
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Call to a global variable.
// Push global object as receiver for the call IC lookup.
@@ -1386,8 +1803,15 @@
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
- // Call to a lookup slot.
- UNREACHABLE();
+ // Call to a lookup slot (dynamically introduced variable). Call
+ // the runtime to find the function to call (returned in rax) and
+ // the object holding it (returned in rdx).
+ __ push(context_register());
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
+ __ push(rax); // Function.
+ __ push(rdx); // Receiver.
+ EmitCallWithStub(expr);
} else if (fun->AsProperty() != NULL) {
// Call to an object property.
Property* prop = fun->AsProperty();
@@ -1476,9 +1900,713 @@
// Replace function on TOS with result in rax, or pop it.
DropAndApply(1, context_, rax);
}
+
+
+void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
+ Handle<String> name = expr->name();
+ if (strcmp("_IsSmi", *name->ToCString()) == 0) {
+ EmitIsSmi(expr->arguments());
+ } else if (strcmp("_IsNonNegativeSmi", *name->ToCString()) == 0) {
+ EmitIsNonNegativeSmi(expr->arguments());
+ } else if (strcmp("_IsObject", *name->ToCString()) == 0) {
+ EmitIsObject(expr->arguments());
+ } else if (strcmp("_IsUndetectableObject", *name->ToCString()) == 0) {
+ EmitIsUndetectableObject(expr->arguments());
+ } else if (strcmp("_IsFunction", *name->ToCString()) == 0) {
+ EmitIsFunction(expr->arguments());
+ } else if (strcmp("_IsArray", *name->ToCString()) == 0) {
+ EmitIsArray(expr->arguments());
+ } else if (strcmp("_IsRegExp", *name->ToCString()) == 0) {
+ EmitIsRegExp(expr->arguments());
+ } else if (strcmp("_IsConstructCall", *name->ToCString()) == 0) {
+ EmitIsConstructCall(expr->arguments());
+ } else if (strcmp("_ObjectEquals", *name->ToCString()) == 0) {
+ EmitObjectEquals(expr->arguments());
+ } else if (strcmp("_Arguments", *name->ToCString()) == 0) {
+ EmitArguments(expr->arguments());
+ } else if (strcmp("_ArgumentsLength", *name->ToCString()) == 0) {
+ EmitArgumentsLength(expr->arguments());
+ } else if (strcmp("_ClassOf", *name->ToCString()) == 0) {
+ EmitClassOf(expr->arguments());
+ } else if (strcmp("_Log", *name->ToCString()) == 0) {
+ EmitLog(expr->arguments());
+ } else if (strcmp("_RandomHeapNumber", *name->ToCString()) == 0) {
+ EmitRandomHeapNumber(expr->arguments());
+ } else if (strcmp("_SubString", *name->ToCString()) == 0) {
+ EmitSubString(expr->arguments());
+ } else if (strcmp("_RegExpExec", *name->ToCString()) == 0) {
+ EmitRegExpExec(expr->arguments());
+ } else if (strcmp("_ValueOf", *name->ToCString()) == 0) {
+ EmitValueOf(expr->arguments());
+ } else if (strcmp("_SetValueOf", *name->ToCString()) == 0) {
+ EmitSetValueOf(expr->arguments());
+ } else if (strcmp("_NumberToString", *name->ToCString()) == 0) {
+ EmitNumberToString(expr->arguments());
+ } else if (strcmp("_CharFromCode", *name->ToCString()) == 0) {
+ EmitCharFromCode(expr->arguments());
+ } else if (strcmp("_FastCharCodeAt", *name->ToCString()) == 0) {
+ EmitFastCharCodeAt(expr->arguments());
+ } else if (strcmp("_StringAdd", *name->ToCString()) == 0) {
+ EmitStringAdd(expr->arguments());
+ } else if (strcmp("_StringCompare", *name->ToCString()) == 0) {
+ EmitStringCompare(expr->arguments());
+ } else if (strcmp("_MathPow", *name->ToCString()) == 0) {
+ EmitMathPow(expr->arguments());
+ } else if (strcmp("_MathSin", *name->ToCString()) == 0) {
+ EmitMathSin(expr->arguments());
+ } else if (strcmp("_MathCos", *name->ToCString()) == 0) {
+ EmitMathCos(expr->arguments());
+ } else if (strcmp("_MathSqrt", *name->ToCString()) == 0) {
+ EmitMathSqrt(expr->arguments());
+ } else if (strcmp("_CallFunction", *name->ToCString()) == 0) {
+ EmitCallFunction(expr->arguments());
+ } else if (strcmp("_RegExpConstructResult", *name->ToCString()) == 0) {
+ EmitRegExpConstructResult(expr->arguments());
+ } else if (strcmp("_SwapElements", *name->ToCString()) == 0) {
+ EmitSwapElements(expr->arguments());
+ } else if (strcmp("_GetFromCache", *name->ToCString()) == 0) {
+ EmitGetFromCache(expr->arguments());
+ } else {
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ Condition positive_smi = __ CheckPositiveSmi(rax);
+ __ j(positive_smi, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_false);
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ __ j(equal, if_true);
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ // Undetectable objects behave like undefined when tested with typeof.
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ __ j(not_zero, if_false);
+ __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
+ __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
+ __ j(below, if_false);
+ __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
+ __ j(below_equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>*
args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_false);
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ __ j(not_zero, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_false);
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
+ __ j(equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_false);
+ __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
+ __ j(equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ JumpIfSmi(rax, if_false);
+ __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
+ __ j(equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+
+void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 0);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ // Get the frame pointer for the calling frame.
+ __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+
+ // Skip the arguments adaptor frame if it exists.
+ Label check_frame_marker;
+ __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(not_equal, &check_frame_marker);
+ __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
+
+ // Check the marker in the calling frame.
+ __ bind(&check_frame_marker);
+ __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
+ __ j(equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 2);
+
+ // Load the two objects into registers and perform the comparison.
+ VisitForValue(args->at(0), kStack);
+ VisitForValue(args->at(1), kAccumulator);
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+ __ pop(rbx);
+ __ cmpq(rax, rbx);
+ __ j(equal, if_true);
+ __ jmp(if_false);
+
+ Apply(context_, if_true, if_false);
+}
+
+
+void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+
+ // ArgumentsAccessStub expects the key in edx and the formal
+ // parameter count in eax.
+ VisitForValue(args->at(0), kAccumulator);
+ __ movq(rdx, rax);
+ __ Move(rax, Smi::FromInt(scope()->num_parameters()));
+ ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ __ CallStub(&stub);
+ Apply(context_, rax);
+}
+
+
+void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 0);
+
+ Label exit;
+ // Get the number of formal parameters.
+ __ Move(rax, Smi::FromInt(scope()->num_parameters()));
+
+ // Check if the calling frame is an arguments adaptor frame.
+ __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(not_equal, &exit);
+
+ // Arguments adaptor case: Read the arguments length from the
+ // adaptor frame.
+ __ movq(rax, Operand(rbx,
ArgumentsAdaptorFrameConstants::kLengthOffset));
+
+ __ bind(&exit);
+ if (FLAG_debug_code) __ AbortIfNotSmi(rax);
+ Apply(context_, rax);
+}
+
+
+void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+ Label done, null, function, non_function_constructor;
+
+ VisitForValue(args->at(0), kAccumulator);
+
+ // If the object is a smi, we return null.
+ __ JumpIfSmi(rax, &null);
+
+ // Check that the object is a JS object but take special care of JS
+ // functions to make sure they have 'Function' as their class.
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
+ __ j(below, &null);
+
***The diff for this file has been truncated for email.***
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Thu May 20
07:02:51 2010
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Thu May 20
07:33:02 2010
@@ -802,7 +802,7 @@
void MacroAssembler::SmiSub(Register dst,
Register src1,
- Operand const& src2,
+ const Operand& src2,
Label* on_not_smi_result) {
if (on_not_smi_result == NULL) {
// No overflow checking. Use only when it's known that
@@ -918,6 +918,14 @@
addq(dst, src);
}
}
+
+
+void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
+ if (constant->value() != 0) {
+ Move(kScratchRegister, constant);
+ addq(dst, kScratchRegister);
+ }
+}
void MacroAssembler::SmiAddConstant(Register dst,
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.h Thu May 20
07:02:51 2010
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.h Thu May 20
07:33:02 2010
@@ -306,6 +306,10 @@
// No overflow testing on the result is done.
void SmiAddConstant(Register dst, Register src, Smi* constant);
+ // Add an integer constant to a tagged smi, giving a tagged smi as
result.
+ // No overflow testing on the result is done.
+ void SmiAddConstant(const Operand& dst, Smi* constant);
+
// Add an integer constant to a tagged smi, giving a tagged smi as
result,
// or jumping to a label if the result cannot be represented by a smi.
void SmiAddConstant(Register dst,
@@ -349,7 +353,7 @@
void SmiSub(Register dst,
Register src1,
- Operand const& src2,
+ const Operand& src2,
Label* on_not_smi_result);
// Multiplies smi values and return the result as a smi,
=======================================
--- /branches/bleeding_edge/test/cctest/test-log-stack-tracer.cc Thu May 20
07:02:51 2010
+++ /branches/bleeding_edge/test/cctest/test-log-stack-tracer.cc Thu May 20
07:33:02 2010
@@ -273,7 +273,7 @@
// StackTracer uses Top::c_entry_fp as a starting point for stack
// walking.
TEST(CFromJSStackTrace) {
-#ifdef V8_HOST_ARCH_IA32
+#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64)
// TODO(711) The hack of replacing the inline runtime function
// RandomHeapNumber with GetFrameNumber does not work with the way the
full
// compiler generates inline runtime calls.
@@ -315,7 +315,7 @@
// Top::c_entry_fp value. In this case, StackTracer uses passed frame
// pointer value as a starting point for stack walking.
TEST(PureJSStackTrace) {
-#ifdef V8_HOST_ARCH_IA32
+#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64)
// TODO(711) The hack of replacing the inline runtime function
// RandomHeapNumber with GetFrameNumber does not work with the way the
full
// compiler generates inline runtime calls.
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev