Reviewers: Jakob,

Description:
Merged r10719 into 3.8 branch.

Ensure using byte registers for byte instructions on ia32 and x64.

BUG=v8:1945

[email protected]
TEST=


Please review this at https://chromiumcodereview.appspot.com/9417009/

SVN Base: https://v8.googlecode.com/svn/branches/3.8

Affected files:
  M src/ia32/assembler-ia32.cc
  M src/ia32/lithium-codegen-ia32.cc
  M src/version.cc
  M src/x64/assembler-x64.h
  M src/x64/assembler-x64.cc
  M src/x64/lithium-codegen-x64.cc
  A test/mjsunit/regress/regress-1945.js


Index: src/ia32/assembler-ia32.cc
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index bb050b63f91b2820bb9702a3255e54f295d43437..a42f6324e34abe4058d7ab9cc24a3faafb4f0f77 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -32,7 +32,7 @@

// The original source code covered by the above license above has been modified
 // significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.

 #include "v8.h"

@@ -575,7 +575,7 @@ void Assembler::leave() {


 void Assembler::mov_b(Register dst, const Operand& src) {
-  ASSERT(dst.code() < 4);
+  CHECK(dst.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0x8A);
   emit_operand(dst, src);
@@ -591,7 +591,7 @@ void Assembler::mov_b(const Operand& dst, int8_t imm8) {


 void Assembler::mov_b(const Operand& dst, Register src) {
-  ASSERT(src.code() < 4);
+  CHECK(src.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0x88);
   emit_operand(src, dst);
@@ -829,7 +829,7 @@ void Assembler::cmpb(const Operand& op, int8_t imm8) {


 void Assembler::cmpb(const Operand& op, Register reg) {
-  ASSERT(reg.is_byte_register());
+  CHECK(reg.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0x38);
   emit_operand(reg, op);
@@ -837,7 +837,7 @@ void Assembler::cmpb(const Operand& op, Register reg) {


 void Assembler::cmpb(Register reg, const Operand& op) {
-  ASSERT(reg.is_byte_register());
+  CHECK(reg.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0x3A);
   emit_operand(reg, op);
@@ -901,6 +901,7 @@ void Assembler::cmpw_ax(const Operand& op) {


 void Assembler::dec_b(Register dst) {
+  CHECK(dst.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0xFE);
   EMIT(0xC8 | dst.code());
@@ -1174,7 +1175,9 @@ void Assembler::test(Register reg, const Immediate& imm) {
   EnsureSpace ensure_space(this);
   // Only use test against byte for registers that have a byte
   // variant: eax, ebx, ecx, and edx.
- if (imm.rmode_ == RelocInfo::NONE && is_uint8(imm.x_) && reg.code() < 4) {
+  if (imm.rmode_ == RelocInfo::NONE &&
+      is_uint8(imm.x_) &&
+      reg.is_byte_register()) {
     uint8_t imm8 = imm.x_;
     if (reg.is(eax)) {
       EMIT(0xA8);
@@ -1204,6 +1207,7 @@ void Assembler::test(Register reg, const Operand& op) {


 void Assembler::test_b(Register reg, const Operand& op) {
+  CHECK(reg.is_byte_register());
   EnsureSpace ensure_space(this);
   EMIT(0x84);
   emit_operand(reg, op);
@@ -1219,7 +1223,7 @@ void Assembler::test(const Operand& op, const Immediate& imm) {


 void Assembler::test_b(const Operand& op, uint8_t imm8) {
-  if (op.is_reg_only() && op.reg().code() >= 4) {
+  if (op.is_reg_only() && !op.reg().is_byte_register()) {
     test(op, Immediate(imm8));
     return;
   }
Index: src/ia32/lithium-codegen-ia32.cc
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index a594c6628eb7b075e30e979f8ea903e8fc40d323..46a35b6e650429df4bcb8b04fd256d7684855190 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -1869,11 +1869,10 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
// Faster code path to avoid two compares: subtract lower bound from the // actual type and do a signed compare with the width of the type range.
     __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
-    __ mov(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
+    __ movzx_b(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
     __ sub(Operand(temp2), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
-    __ cmpb(Operand(temp2),
-            static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
-                                FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
+    __ cmp(Operand(temp2), Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
+                                     FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
     __ j(above, is_false);
   }

@@ -4071,7 +4070,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
     } else {
       __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
       __ and_(temp, mask);
-      __ cmpb(Operand(temp), tag);
+      __ cmp(temp, tag);
       DeoptimizeIf(not_equal, instr->environment());
     }
   }
Index: src/version.cc
diff --git a/src/version.cc b/src/version.cc
index fd8972c123046f9774b3a9a69e3490a0ba3c5bc6..ec009045c536916a34c7e6e85c0b2c57ceb61299 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -35,7 +35,7 @@
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     8
 #define BUILD_NUMBER      9
-#define PATCH_LEVEL       6
+#define PATCH_LEVEL       7
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
Index: src/x64/assembler-x64.cc
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index eb8d7d4d998048f79bd20c77eb05760fe93094ec..9ce1af89b98bab7ea843c08e49e8bd621c90a635 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -775,7 +775,7 @@ void Assembler::immediate_arithmetic_op_8(byte subcode,
                                           Register dst,
                                           Immediate src) {
   EnsureSpace ensure_space(this);
-  if (dst.code() > 3) {
+  if (!dst.is_byte_register()) {
     // Use 64-bit mode byte registers.
     emit_rex_64(dst);
   }
@@ -1059,7 +1059,7 @@ void Assembler::decl(const Operand& dst) {

 void Assembler::decb(Register dst) {
   EnsureSpace ensure_space(this);
-  if (dst.code() > 3) {
+  if (!dst.is_byte_register()) {
     // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
     emit_rex_32(dst);
   }
@@ -1387,7 +1387,7 @@ void Assembler::leave() {

 void Assembler::movb(Register dst, const Operand& src) {
   EnsureSpace ensure_space(this);
-  if (dst.code() > 3) {
+  if (!dst.is_byte_register()) {
     // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
     emit_rex_32(dst, src);
   } else {
@@ -1400,7 +1400,7 @@ void Assembler::movb(Register dst, const Operand& src) {

 void Assembler::movb(Register dst, Immediate imm) {
   EnsureSpace ensure_space(this);
-  if (dst.code() > 3) {
+  if (!dst.is_byte_register()) {
     emit_rex_32(dst);
   }
   emit(0xB0 + dst.low_bits());
@@ -1410,7 +1410,7 @@ void Assembler::movb(Register dst, Immediate imm) {

 void Assembler::movb(const Operand& dst, Register src) {
   EnsureSpace ensure_space(this);
-  if (src.code() > 3) {
+  if (!src.is_byte_register()) {
     emit_rex_32(src, dst);
   } else {
     emit_optional_rex_32(src, dst);
@@ -1931,7 +1931,7 @@ void Assembler::setcc(Condition cc, Register reg) {
   }
   EnsureSpace ensure_space(this);
   ASSERT(is_uint4(cc));
-  if (reg.code() > 3) {  // Use x64 byte registers, where different.
+ if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
     emit_rex_32(reg);
   }
   emit(0x0F);
@@ -1996,7 +1996,7 @@ void Assembler::testb(Register dst, Register src) {
     emit(0x84);
     emit_modrm(src, dst);
   } else {
-    if (dst.code() > 3 || src.code() > 3) {
+    if (!dst.is_byte_register() || !src.is_byte_register()) {
       // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
       emit_rex_32(dst, src);
     }
@@ -2013,7 +2013,7 @@ void Assembler::testb(Register reg, Immediate mask) {
     emit(0xA8);
     emit(mask.value_);  // Low byte emitted.
   } else {
-    if (reg.code() > 3) {
+    if (!reg.is_byte_register()) {
       // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
       emit_rex_32(reg);
     }
@@ -2036,7 +2036,7 @@ void Assembler::testb(const Operand& op, Immediate mask) {

 void Assembler::testb(const Operand& op, Register reg) {
   EnsureSpace ensure_space(this);
-  if (reg.code() > 3) {
+  if (!reg.is_byte_register()) {
     // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
     emit_rex_32(reg, op);
   } else {
Index: src/x64/assembler-x64.h
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 745850d822e68a003a338f689b5755c5545fb252..7af33e126f8401641ea128ecc7d1054267dbb5a5 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -30,7 +30,7 @@

 // The original source code covered by the above license above has been
 // modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.

 // A lightweight X64 Assembler.

@@ -131,6 +131,8 @@ struct Register {
   }
   bool is_valid() const { return 0 <= code_ && code_ < kNumRegisters; }
   bool is(Register reg) const { return code_ == reg.code_; }
+  // rax, rbx, rcx and rdx are byte registers, the rest are not.
+  bool is_byte_register() const { return code_ <= 3; }
   int code() const {
     ASSERT(is_valid());
     return code_;
Index: src/x64/lithium-codegen-x64.cc
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index b14fa8b34a851065decdce931d56591ef8a64e57..c0723ffb43c5bb61c6f377c6adb76d64f47a4cc8 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -1794,11 +1794,10 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
// Faster code path to avoid two compares: subtract lower bound from the // actual type and do a signed compare with the width of the type range.
     __ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
-    __ movq(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
-    __ subb(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
-    __ cmpb(temp2,
- Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE - - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)));
+    __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
+    __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
+    __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
+                             FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
     __ j(above, is_false);
   }

Index: test/mjsunit/regress/regress-1945.js
diff --git a/test/mjsunit/regress/regress-1945.js b/test/mjsunit/regress/regress-1945.js
new file mode 100644
index 0000000000000000000000000000000000000000..bffc775fc42c113c9c6c66bd21ca5b098941a5c5
--- /dev/null
+++ b/test/mjsunit/regress/regress-1945.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var _d = new Date();
+_d.setHours(0,0,0,0);
+_d.setHours(0,0,0,0);
+%OptimizeFunctionOnNextCall(_d.setHours);
+_d.setHours(0,0,0,0);


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to