Revision: 3513
Author: [email protected]
Date: Tue Dec 22 02:16:27 2009
Log: Add fast case stub for BIT_NOT.
Review URL: http://codereview.chromium.org/503079
http://code.google.com/p/v8/source/detail?r=3513
Added:
/branches/bleeding_edge/test/mjsunit/bit-not.js
Modified:
/branches/bleeding_edge/src/arm/codegen-arm.cc
/branches/bleeding_edge/src/code-stubs.h
/branches/bleeding_edge/src/codegen.cc
/branches/bleeding_edge/src/codegen.h
/branches/bleeding_edge/src/ia32/codegen-ia32.cc
/branches/bleeding_edge/src/x64/codegen-x64.cc
=======================================
--- /dev/null
+++ /branches/bleeding_edge/test/mjsunit/bit-not.js Tue Dec 22 02:16:27 2009
@@ -0,0 +1,75 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function testBitNot(x) {
+ // The VM constant folds so we use that to check the result.
+ var expected = eval("~(" + x + ")");
+ var actual = ~x;
+ assertEquals(expected, actual, "x: " + x);
+
+ // Test the path where we can overwrite the result. Use -
+ // to avoid concatenating strings.
+ expected = eval("~(" + x + " - 0.01)");
+ actual = ~(x - 0.01);
+ assertEquals(expected, actual, "x - 0.01: " + x);
+}
+
+
+testBitNot(0);
+testBitNot(1);
+testBitNot(-1);
+testBitNot(100);
+testBitNot(0x40000000);
+testBitNot(0x7fffffff);
+testBitNot(0x80000000);
+
+testBitNot(2.2);
+testBitNot(-2.3);
+testBitNot(Infinity);
+testBitNot(NaN);
+testBitNot(-Infinity);
+testBitNot(0x40000000 + 0.12345);
+testBitNot(0x40000000 - 0.12345);
+testBitNot(0x7fffffff + 0.12345);
+testBitNot(0x7fffffff - 0.12345);
+testBitNot(0x80000000 + 0.12345);
+testBitNot(0x80000000 - 0.12345);
+
+testBitNot("0");
+testBitNot("2.3");
+testBitNot("-9.4");
+
+
+// Try to test that we can deal with allocation failures in
+// the fast path and just use the slow path instead.
+function TryToGC() {
+ var x = 0x40000000;
+ for (var i = 0; i < 1000000; i++) {
+ assertEquals(~0x40000000, ~x);
+ }
+}
+TryToGC();
=======================================
--- /branches/bleeding_edge/src/arm/codegen-arm.cc Mon Dec 21 05:30:10 2009
+++ /branches/bleeding_edge/src/arm/codegen-arm.cc Tue Dec 22 02:16:27 2009
@@ -3698,7 +3698,7 @@
bool overwrite =
(node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
- UnarySubStub stub(overwrite);
+ GenericUnaryOpStub stub(Token::SUB, overwrite);
frame_->CallStub(&stub, 0);
break;
}
@@ -5940,7 +5940,9 @@
}
-void UnarySubStub::Generate(MacroAssembler* masm) {
+void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
+ ASSERT(op_ == Token::SUB);
+
Label undo;
Label slow;
Label not_smi;
=======================================
--- /branches/bleeding_edge/src/code-stubs.h Thu Dec 17 07:35:15 2009
+++ /branches/bleeding_edge/src/code-stubs.h Tue Dec 22 02:16:27 2009
@@ -46,7 +46,7 @@
V(FastNewClosure) \
V(FastNewContext) \
V(FastCloneShallowArray) \
- V(UnarySub) \
+ V(GenericUnaryOp) \
V(RevertToNumber) \
V(ToBoolean) \
V(Instanceof) \
=======================================
--- /branches/bleeding_edge/src/codegen.cc Mon Dec 21 05:30:10 2009
+++ /branches/bleeding_edge/src/codegen.cc Tue Dec 22 02:16:27 2009
@@ -446,6 +446,23 @@
const char* RuntimeStub::GetName() {
return Runtime::FunctionForId(id_)->stub_name;
}
+
+
+const char* GenericUnaryOpStub::GetName() {
+ switch (op_) {
+ case Token::SUB:
+ return overwrite_
+ ? "GenericUnaryOpStub_SUB_Overwrite"
+ : "GenericUnaryOpStub_SUB_Alloc";
+ case Token::BIT_NOT:
+ return overwrite_
+ ? "GenericUnaryOpStub_BIT_NOT_Overwrite"
+ : "GenericUnaryOpStub_BIT_NOT_Alloc";
+ default:
+ UNREACHABLE();
+ return "<unknown>";
+ }
+}
void RuntimeStub::Generate(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/codegen.h Fri Dec 18 01:33:24 2009
+++ /branches/bleeding_edge/src/codegen.h Tue Dec 22 02:16:27 2009
@@ -294,20 +294,26 @@
};
-class UnarySubStub : public CodeStub {
+class GenericUnaryOpStub : public CodeStub {
public:
- explicit UnarySubStub(bool overwrite)
- : overwrite_(overwrite) { }
+ GenericUnaryOpStub(Token::Value op, bool overwrite)
+ : op_(op), overwrite_(overwrite) { }
private:
+ Token::Value op_;
bool overwrite_;
- Major MajorKey() { return UnarySub; }
- int MinorKey() { return overwrite_ ? 1 : 0; }
- void Generate(MacroAssembler* masm);
-
- const char* GetName() {
- return overwrite_ ? "UnarySubStub_Overwrite" : "UnarySubStub_Alloc";
- }
+
+ class OverwriteField: public BitField<int, 0, 1> {};
+ class OpField: public BitField<Token::Value, 1, kMinorBits - 1> {};
+
+ Major MajorKey() { return GenericUnaryOp; }
+ int MinorKey() {
+ return OpField::encode(op_) | OverwriteField::encode(overwrite_);
+ }
+
+ void Generate(MacroAssembler* masm);
+
+ const char* GetName();
};
=======================================
--- /branches/bleeding_edge/src/ia32/codegen-ia32.cc Tue Dec 22 01:48:55
2009
+++ /branches/bleeding_edge/src/ia32/codegen-ia32.cc Tue Dec 22 02:16:27
2009
@@ -5515,12 +5515,12 @@
} else {
Load(node->expression());
+ bool overwrite =
+ (node->expression()->AsBinaryOperation() != NULL &&
+
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
switch (op) {
case Token::SUB: {
- bool overwrite =
- (node->expression()->AsBinaryOperation() != NULL &&
-
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
- UnarySubStub stub(overwrite);
+ GenericUnaryOpStub stub(Token::SUB, overwrite);
// TODO(1222589): remove dependency of TOS being cached inside stub
Result operand = frame_->Pop();
Result answer = frame_->CallStub(&stub, &operand);
@@ -5537,16 +5537,16 @@
__ test(operand.reg(), Immediate(kSmiTagMask));
smi_label.Branch(zero, &operand, taken);
- frame_->Push(&operand); // undo popping of TOS
- Result answer = frame_->InvokeBuiltin(Builtins::BIT_NOT,
- CALL_FUNCTION, 1);
-
+ GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
+ Result answer = frame_->CallStub(&stub, &operand);
continue_label.Jump(&answer);
+
smi_label.Bind(&answer);
answer.ToRegister();
frame_->Spill(answer.reg());
__ not_(answer.reg());
__ and_(answer.reg(), ~kSmiTagMask); // Remove inverted smi-tag.
+
continue_label.Bind(&answer);
frame_->Push(&answer);
break;
@@ -7282,9 +7282,15 @@
default: UNREACHABLE();
}
// Store the result in the HeapNumber and return.
- __ mov(Operand(esp, 1 * kPointerSize), ebx);
- __ fild_s(Operand(esp, 1 * kPointerSize));
- __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
+ if (CpuFeatures::IsSupported(SSE2)) {
+ CpuFeatures::Scope use_sse2(SSE2);
+ __ cvtsi2sd(xmm0, Operand(ebx));
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
+ } else {
+ __ mov(Operand(esp, 1 * kPointerSize), ebx);
+ __ fild_s(Operand(esp, 1 * kPointerSize));
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
+ }
GenerateReturn(masm);
}
@@ -7711,67 +7717,119 @@
}
-void UnarySubStub::Generate(MacroAssembler* masm) {
- Label undo;
- Label slow;
- Label done;
- Label try_float;
-
- // Check whether the value is a smi.
- __ test(eax, Immediate(kSmiTagMask));
- __ j(not_zero, &try_float, not_taken);
-
- // Enter runtime system if the value of the expression is zero
- // to make sure that we switch between 0 and -0.
- __ test(eax, Operand(eax));
- __ j(zero, &slow, not_taken);
-
- // The value of the expression is a smi that is not zero. Try
- // optimistic subtraction '0 - value'.
- __ mov(edx, Operand(eax));
- __ Set(eax, Immediate(0));
- __ sub(eax, Operand(edx));
- __ j(overflow, &undo, not_taken);
-
- // If result is a smi we are done.
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done, taken);
-
- // Restore eax and enter runtime system.
- __ bind(&undo);
- __ mov(eax, Operand(edx));
-
- // Enter runtime system.
- __ bind(&slow);
- __ pop(ecx); // pop return address
- __ push(eax);
- __ push(ecx); // push return address
- __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
-
- // Try floating point case.
- __ bind(&try_float);
- __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
- __ cmp(edx, Factory::heap_number_map());
- __ j(not_equal, &slow);
- if (overwrite_) {
- __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
- __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
- __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
+void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
+ Label slow, done;
+
+ if (op_ == Token::SUB) {
+ // Check whether the value is a smi.
+ Label try_float;
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(not_zero, &try_float, not_taken);
+
+ // Go slow case if the value of the expression is zero
+ // to make sure that we switch between 0 and -0.
+ __ test(eax, Operand(eax));
+ __ j(zero, &slow, not_taken);
+
+ // The value of the expression is a smi that is not zero. Try
+ // optimistic subtraction '0 - value'.
+ Label undo;
+ __ mov(edx, Operand(eax));
+ __ Set(eax, Immediate(0));
+ __ sub(eax, Operand(edx));
+ __ j(overflow, &undo, not_taken);
+
+ // If result is a smi we are done.
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(zero, &done, taken);
+
+ // Restore eax and go slow case.
+ __ bind(&undo);
+ __ mov(eax, Operand(edx));
+ __ jmp(&slow);
+
+ // Try floating point case.
+ __ bind(&try_float);
+ __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ cmp(edx, Factory::heap_number_map());
+ __ j(not_equal, &slow);
+ if (overwrite_) {
+ __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
+ __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
+ __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
+ } else {
+ __ mov(edx, Operand(eax));
+ // edx: operand
+ __ AllocateHeapNumber(eax, ebx, ecx, &undo);
+ // eax: allocated 'empty' number
+ __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
+ __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
+ __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
+ __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
+ __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
+ }
+ } else if (op_ == Token::BIT_NOT) {
+ // Check if the operand is a heap number.
+ __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ cmp(edx, Factory::heap_number_map());
+ __ j(not_equal, &slow, not_taken);
+
+ // Convert the heap number in eax to an untagged integer in ecx.
+ IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), &slow);
+
+ // Do the bitwise operation and check if the result fits in a smi.
+ Label try_float;
+ __ not_(ecx);
+ __ cmp(ecx, 0xc0000000);
+ __ j(sign, &try_float, not_taken);
+
+ // Tag the result as a smi and we're done.
+ ASSERT(kSmiTagSize == 1);
+ __ lea(eax, Operand(ecx, times_2, kSmiTag));
+ __ jmp(&done);
+
+ // Try to store the result in a heap number.
+ __ bind(&try_float);
+ if (!overwrite_) {
+ // Allocate a fresh heap number, but don't overwrite eax until
+ // we're sure we can do it without going through the slow case
+ // that needs the value in eax.
+ __ AllocateHeapNumber(ebx, edx, edi, &slow);
+ __ mov(eax, Operand(ebx));
+ }
+ if (CpuFeatures::IsSupported(SSE2)) {
+ CpuFeatures::Scope use_sse2(SSE2);
+ __ cvtsi2sd(xmm0, Operand(ecx));
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
+ } else {
+ __ push(ecx);
+ __ fild_s(Operand(esp, 0));
+ __ pop(ecx);
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
+ }
} else {
- __ mov(edx, Operand(eax));
- // edx: operand
- __ AllocateHeapNumber(eax, ebx, ecx, &undo);
- // eax: allocated 'empty' number
- __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
- __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
- __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
- __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
- __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
+ UNIMPLEMENTED();
}
+ // Return from the stub.
__ bind(&done);
-
__ StubReturn(1);
+
+ // Handle the slow case by jumping to the JavaScript builtin.
+ __ bind(&slow);
+ __ pop(ecx); // pop return address.
+ __ push(eax);
+ __ push(ecx); // push return address
+ switch (op_) {
+ case Token::SUB:
+ __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
+ break;
+ case Token::BIT_NOT:
+ __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
+ break;
+ default:
+ UNREACHABLE();
+ }
}
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc Mon Dec 21 05:30:10 2009
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc Tue Dec 22 02:16:27 2009
@@ -3109,7 +3109,7 @@
bool overwrite =
(node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
- UnarySubStub stub(overwrite);
+ GenericUnaryOpStub stub(Token::SUB, overwrite);
// TODO(1222589): remove dependency of TOS being cached inside stub
Result operand = frame_->Pop();
Result answer = frame_->CallStub(&stub, &operand);
@@ -6272,7 +6272,9 @@
// End of CodeGenerator implementation.
-void UnarySubStub::Generate(MacroAssembler* masm) {
+void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
+ ASSERT(op_ == Token::SUB);
+
Label slow;
Label done;
Label try_float;
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev