Revision: 7251
Author: [email protected]
Date: Thu Mar 17 13:28:30 2011
Log: Implement fast path for strict closure creation.
Review URL: http://codereview.chromium.org/6677036/
http://code.google.com/p/v8/source/detail?r=7251
Modified:
/branches/bleeding_edge/src/arm/code-stubs-arm.cc
/branches/bleeding_edge/src/arm/codegen-arm.cc
/branches/bleeding_edge/src/arm/full-codegen-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
/branches/bleeding_edge/src/code-stubs.h
/branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
/branches/bleeding_edge/src/ia32/codegen-ia32.cc
/branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/x64/code-stubs-x64.cc
/branches/bleeding_edge/src/x64/codegen-x64.cc
/branches/bleeding_edge/src/x64/full-codegen-x64.cc
/branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Tue Mar 15 04:19:13
2011
+++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Thu Mar 17 13:28:30
2011
@@ -91,11 +91,15 @@
&gc,
TAG_OBJECT);
+ int map_index = strict_mode_ == kStrictMode
+ ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
+ : Context::FUNCTION_MAP_INDEX;
+
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
- __ ldr(r2, MemOperand(r2,
Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ ldr(r2, MemOperand(r2, Context::SlotOffset(map_index)));
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
// Initialize the rest of the function. We don't have to update the
=======================================
--- /branches/bleeding_edge/src/arm/codegen-arm.cc Thu Mar 17 13:28:17 2011
+++ /branches/bleeding_edge/src/arm/codegen-arm.cc Thu Mar 17 13:28:30 2011
@@ -3116,9 +3116,9 @@
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
- function_info->num_literals() == 0 &&
- !function_info->strict_mode()) { // Strict mode functions use slow
path.
- FastNewClosureStub stub;
+ function_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame_->EmitPush(Operand(function_info));
frame_->SpillAll();
frame_->CallStub(&stub, 1);
=======================================
--- /branches/bleeding_edge/src/arm/full-codegen-arm.cc Thu Mar 17 13:28:17
2011
+++ /branches/bleeding_edge/src/arm/full-codegen-arm.cc Thu Mar 17 13:28:30
2011
@@ -1086,9 +1086,8 @@
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
- info->num_literals() == 0 &&
- !info->strict_mode()) { // Strict mode functions use slow path.
- FastNewClosureStub stub;
+ info->num_literals() == 0) {
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode :
kNonStrictMode);
__ mov(r0, Operand(info));
__ push(r0);
__ CallStub(&stub);
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Thu Mar 17
13:28:17 2011
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Thu Mar 17
13:28:30 2011
@@ -3726,9 +3726,9 @@
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
- if (!pretenure && shared_info->num_literals() == 0 &&
- !shared_info->strict_mode()) { // Strict mode functions use slow
path.
- FastNewClosureStub stub;
+ if (!pretenure && shared_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ mov(r1, Operand(shared_info));
__ push(r1);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
=======================================
--- /branches/bleeding_edge/src/code-stubs.h Mon Mar 14 09:54:51 2011
+++ /branches/bleeding_edge/src/code-stubs.h Thu Mar 17 13:28:30 2011
@@ -277,12 +277,17 @@
class FastNewClosureStub : public CodeStub {
public:
+ explicit FastNewClosureStub(StrictModeFlag strict_mode)
+ : strict_mode_(strict_mode) { }
+
void Generate(MacroAssembler* masm);
private:
const char* GetName() { return "FastNewClosureStub"; }
Major MajorKey() { return FastNewClosure; }
- int MinorKey() { return 0; }
+ int MinorKey() { return strict_mode_; }
+
+ StrictModeFlag strict_mode_;
};
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Tue Mar 15 03:03:57
2011
+++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Thu Mar 17 13:28:30
2011
@@ -69,11 +69,15 @@
// Get the function info from the stack.
__ mov(edx, Operand(esp, 1 * kPointerSize));
+ int map_index = strict_mode_ == kStrictMode
+ ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
+ : Context::FUNCTION_MAP_INDEX;
+
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
- __ mov(ecx, Operand(ecx,
Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
__ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
// Initialize the rest of the function. We don't have to update the
=======================================
--- /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu Mar 17 13:28:17
2011
+++ /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu Mar 17 13:28:30
2011
@@ -4918,9 +4918,9 @@
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
- function_info->num_literals() == 0 &&
- !function_info->strict_mode()) { // Strict mode functions use slow
path.
- FastNewClosureStub stub;
+ function_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame()->EmitPush(Immediate(function_info));
return frame()->CallStub(&stub, 1);
} else {
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu Mar 17
13:28:17 2011
+++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu Mar 17
13:28:30 2011
@@ -1019,9 +1019,8 @@
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
- info->num_literals() == 0 &&
- !info->strict_mode()) { // Strict mode functions go through slow
path.
- FastNewClosureStub stub;
+ info->num_literals() == 0) {
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode :
kNonStrictMode);
__ push(Immediate(info));
__ CallStub(&stub);
} else {
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Thu Mar 17
13:28:17 2011
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Thu Mar 17
13:28:30 2011
@@ -3739,9 +3739,9 @@
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
- if (!pretenure && shared_info->num_literals() == 0 &&
- !shared_info->strict_mode()) {
- FastNewClosureStub stub;
+ if (!pretenure && shared_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ push(Immediate(shared_info));
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else {
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Tue Mar 15 03:03:57
2011
+++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Thu Mar 17 13:28:30
2011
@@ -68,11 +68,15 @@
// Get the function info from the stack.
__ movq(rdx, Operand(rsp, 1 * kPointerSize));
+ int map_index = strict_mode_ == kStrictMode
+ ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
+ : Context::FUNCTION_MAP_INDEX;
+
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
- __ movq(rcx, Operand(rcx,
Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
__ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
// Initialize the rest of the function. We don't have to update the
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc Thu Mar 17 13:28:17 2011
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc Thu Mar 17 13:28:30 2011
@@ -4262,9 +4262,9 @@
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
- function_info->num_literals() == 0 &&
- !function_info->strict_mode()) { // Strict mode functions use slow
path.
- FastNewClosureStub stub;
+ function_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame_->Push(function_info);
Result answer = frame_->CallStub(&stub, 1);
frame_->Push(&answer);
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu Mar 17 13:28:17
2011
+++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu Mar 17 13:28:30
2011
@@ -1041,9 +1041,8 @@
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
- info->num_literals() == 0 &&
- !info->strict_mode()) { // Strict mode functions use slow path.
- FastNewClosureStub stub;
+ info->num_literals() == 0) {
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode :
kNonStrictMode);
__ Push(info);
__ CallStub(&stub);
} else {
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Thu Mar 17
13:28:17 2011
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Thu Mar 17
13:28:30 2011
@@ -3539,9 +3539,9 @@
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
- if (!pretenure && shared_info->num_literals() == 0 &&
- !shared_info->strict_mode()) {
- FastNewClosureStub stub;
+ if (!pretenure && shared_info->num_literals() == 0) {
+ FastNewClosureStub stub(
+ shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ Push(shared_info);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else {
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev