Reviewers: danno, Jakob,
Message:
I'd like to know your feedback before porting it to IA32 and ARM.
https://codereview.chromium.org/22935005/diff/1/src/x64/full-codegen-x64.cc
File src/x64/full-codegen-x64.cc (left):
https://codereview.chromium.org/22935005/diff/1/src/x64/full-codegen-x64.cc#oldcode4433
src/x64/full-codegen-x64.cc:4433: __ SmiAddConstant(rax, rax,
Smi::FromInt(1));
It takes me some time to understand that the original code uses the
SmiAddConstant for both SMI and HeapNumber.
https://codereview.chromium.org/22935005/diff/1/src/x64/full-codegen-x64.cc#oldcode4439
src/x64/full-codegen-x64.cc:4439: // the first smi check before calling
ToNumber.
This Cl splits the code at the first smi check.
https://codereview.chromium.org/22935005/diff/1/src/x64/full-codegen-x64.cc
File src/x64/full-codegen-x64.cc (right):
https://codereview.chromium.org/22935005/diff/1/src/x64/full-codegen-x64.cc#newcode4425
src/x64/full-codegen-x64.cc:4425: if (expr->op() == Token::INC) {
An alternative is:
if (expr->op() == Token::INC) {
__ SmiAddConstant(rax, rax, Smi::FromInt(1), &stub_call);
} else {
__ SmiSubConstant(rax, rax, Smi::FromInt(1), &stub_call);
}
__ jmp(&done, Label::kNear);
__ bind(&slow);
This version uses 5 instructions in SmiAddConstant (movq
kScratchRegister, kSmiConstantRegister; addq(kScratchRegister, rax);
j(overflow, &stub_call); movq(rax, kScratchRegister), jmp(&done)) than 2
(addq(rax, kSmiConstantRegister), j(no_overflow, &done) for normal
cases.
Description:
Refine CountOperation of FullCodeGen
Please review this at https://codereview.chromium.org/22935005/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/x64/full-codegen-x64.cc
Index: src/x64/full-codegen-x64.cc
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index
6333e87bea1b6c2ed82cd2b4ea153dd0cc917890..d4f6ee88a2d67ef1e31e94163dd858c333aa6676
100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -4395,14 +4395,51 @@ void
FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
- // Call ToNumber only if operand is not a smi.
- Label no_conversion;
+ // Inline smi case if we are in a loop.
+ Label done, stub_call;
+ JumpPatchSite patch_site(masm_);
if (ShouldInlineSmiCase(expr->op())) {
- __ JumpIfSmi(rax, &no_conversion, Label::kNear);
+ Label slow;
+ patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
+
+ // Save result for postfix expressions.
+ if (expr->is_postfix()) {
+ if (!context()->IsEffect()) {
+ // Save the result on the stack. If we have a named or keyed
property
+ // we store the result under the receiver that is currently on top
+ // of the stack.
+ switch (assign_type) {
+ case VARIABLE:
+ __ push(rax);
+ break;
+ case NAMED_PROPERTY:
+ __ movq(Operand(rsp, kPointerSize), rax);
+ break;
+ case KEYED_PROPERTY:
+ __ movq(Operand(rsp, 2 * kPointerSize), rax);
+ break;
+ }
+ }
+ }
+
+ if (expr->op() == Token::INC) {
+ __ SmiAddConstant(rax, rax, Smi::FromInt(1));
+ } else {
+ __ SmiSubConstant(rax, rax, Smi::FromInt(1));
+ }
+ __ j(no_overflow, &done, Label::kNear);
+ // Call stub. Undo operation first.
+ if (expr->op() == Token::INC) {
+ __ SmiSubConstant(rax, rax, Smi::FromInt(1));
+ } else {
+ __ SmiAddConstant(rax, rax, Smi::FromInt(1));
+ }
+ __ jmp(&stub_call, Label::kNear);
+ __ bind(&slow);
}
+
ToNumberStub convert_stub;
__ CallStub(&convert_stub);
- __ bind(&no_conversion);
// Save result for postfix expressions.
if (expr->is_postfix()) {
@@ -4424,34 +4461,11 @@ void
FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
}
- // Inline smi case if we are in a loop.
- Label done, stub_call;
- JumpPatchSite patch_site(masm_);
-
- if (ShouldInlineSmiCase(expr->op())) {
- if (expr->op() == Token::INC) {
- __ SmiAddConstant(rax, rax, Smi::FromInt(1));
- } else {
- __ SmiSubConstant(rax, rax, Smi::FromInt(1));
- }
- __ j(overflow, &stub_call, Label::kNear);
- // We could eliminate this smi check if we split the code at
- // the first smi check before calling ToNumber.
- patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
-
- __ bind(&stub_call);
- // Call stub. Undo operation first.
- if (expr->op() == Token::INC) {
- __ SmiSubConstant(rax, rax, Smi::FromInt(1));
- } else {
- __ SmiAddConstant(rax, rax, Smi::FromInt(1));
- }
- }
-
// Record position before stub call.
SetSourcePosition(expr->position());
// Call stub for +1/-1.
+ __ bind(&stub_call);
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.