Revision: 16263
Author:   [email protected]
Date:     Wed Aug 21 16:32:23 2013 UTC
Log:      Merged r16224 into 3.20 branch.

Fix invalid out-of-bounds store in MacroAssembler::Allocate.

[email protected]
BUG=chromium:263515

Review URL: https://codereview.chromium.org/22980024
http://code.google.com/p/v8/source/detail?r=16263

Modified:
 /branches/3.20/src/arm/macro-assembler-arm.cc
 /branches/3.20/src/ia32/macro-assembler-ia32.cc
 /branches/3.20/src/mips/macro-assembler-mips.cc
 /branches/3.20/src/version.cc
 /branches/3.20/src/x64/macro-assembler-x64.cc

=======================================
--- /branches/3.20/src/arm/macro-assembler-arm.cc Tue Aug 13 17:09:37 2013 UTC +++ /branches/3.20/src/arm/macro-assembler-arm.cc Wed Aug 21 16:32:23 2013 UTC
@@ -1732,12 +1732,16 @@

   if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
-    // always safe because the limit of the heap is always aligned.
+    // safe in new-space because the limit of the heap is aligned there.
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      cmp(result, Operand(ip));
+      b(hs, gc_required);
+    }
     mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
     str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
     bind(&aligned);
@@ -1830,12 +1834,16 @@

   if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
-    // always safe because the limit of the heap is always aligned.
+    // safe in new-space because the limit of the heap is aligned there.
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      cmp(result, Operand(ip));
+      b(hs, gc_required);
+    }
     mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
     str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
     bind(&aligned);
=======================================
--- /branches/3.20/src/ia32/macro-assembler-ia32.cc Tue Aug 13 17:09:37 2013 UTC +++ /branches/3.20/src/ia32/macro-assembler-ia32.cc Wed Aug 21 16:32:23 2013 UTC
@@ -1307,26 +1307,29 @@
   // Load address of new object into result.
   LoadAllocationTopHelper(result, scratch, flags);

+  ExternalReference allocation_limit =
+      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
+
// Align the next allocation. Storing the filler map without checking top is
-  // always safe because the limit of the heap is always aligned.
+  // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      cmp(result, Operand::StaticVariable(allocation_limit));
+      j(above_equal, gc_required);
+    }
     mov(Operand(result, 0),
         Immediate(isolate()->factory()->one_pointer_filler_map()));
     add(result, Immediate(kDoubleSize / 2));
     bind(&aligned);
   }

+  // Calculate new top and bail out if space is exhausted.
   Register top_reg = result_end.is_valid() ? result_end : result;
-
-  // Calculate new top and bail out if space is exhausted.
-  ExternalReference allocation_limit =
-      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
-
   if (!top_reg.is(result)) {
     mov(top_reg, result);
   }
@@ -1381,14 +1384,21 @@
   // Load address of new object into result.
   LoadAllocationTopHelper(result, scratch, flags);

+  ExternalReference allocation_limit =
+      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
+
// Align the next allocation. Storing the filler map without checking top is
-  // always safe because the limit of the heap is always aligned.
+  // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      cmp(result, Operand::StaticVariable(allocation_limit));
+      j(above_equal, gc_required);
+    }
     mov(Operand(result, 0),
         Immediate(isolate()->factory()->one_pointer_filler_map()));
     add(result, Immediate(kDoubleSize / 2));
@@ -1396,9 +1406,6 @@
   }

   // Calculate new top and bail out if space is exhausted.
-  ExternalReference allocation_limit =
-      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
-
   // We assume that element_count*element_size + header_size does not
   // overflow.
   if (element_count_type == REGISTER_VALUE_IS_SMI) {
@@ -1452,14 +1459,21 @@
   // Load address of new object into result.
   LoadAllocationTopHelper(result, scratch, flags);

+  ExternalReference allocation_limit =
+      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
+
// Align the next allocation. Storing the filler map without checking top is
-  // always safe because the limit of the heap is always aligned.
+  // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      cmp(result, Operand::StaticVariable(allocation_limit));
+      j(above_equal, gc_required);
+    }
     mov(Operand(result, 0),
         Immediate(isolate()->factory()->one_pointer_filler_map()));
     add(result, Immediate(kDoubleSize / 2));
@@ -1467,9 +1481,6 @@
   }

   // Calculate new top and bail out if space is exhausted.
-  ExternalReference allocation_limit =
-      AllocationUtils::GetAllocationLimitReference(isolate(), flags);
-
   if (!object_size.is(result_end)) {
     mov(result_end, object_size);
   }
=======================================
--- /branches/3.20/src/mips/macro-assembler-mips.cc Tue Aug 13 17:09:37 2013 UTC +++ /branches/3.20/src/mips/macro-assembler-mips.cc Wed Aug 21 16:32:23 2013 UTC
@@ -2944,12 +2944,15 @@

   if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
-    // always safe because the limit of the heap is always aligned.
+    // safe in new-space because the limit of the heap is aligned there.
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      Branch(gc_required, Ugreater_equal, result, Operand(t9));
+    }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
     sw(scratch2, MemOperand(result));
     Addu(result, result, Operand(kDoubleSize / 2));
@@ -3028,12 +3031,15 @@

   if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
-    // always safe because the limit of the heap is always aligned.
+    // safe in new-space because the limit of the heap is aligned there.
     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      Branch(gc_required, Ugreater_equal, result, Operand(t9));
+    }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
     sw(scratch2, MemOperand(result));
     Addu(result, result, Operand(kDoubleSize / 2));
=======================================
--- /branches/3.20/src/version.cc       Thu Aug 15 08:05:35 2013 UTC
+++ /branches/3.20/src/version.cc       Wed Aug 21 16:32:23 2013 UTC
@@ -35,7 +35,7 @@
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     20
 #define BUILD_NUMBER      17
-#define PATCH_LEVEL       1
+#define PATCH_LEVEL       2
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
=======================================
--- /branches/3.20/src/x64/macro-assembler-x64.cc Tue Aug 13 17:09:37 2013 UTC +++ /branches/3.20/src/x64/macro-assembler-x64.cc Wed Aug 21 16:32:23 2013 UTC
@@ -3859,7 +3859,7 @@
   LoadAllocationTopHelper(result, scratch, flags);

// Align the next allocation. Storing the filler map without checking top is
-  // always safe because the limit of the heap is always aligned.
+  // safe in new-space because the limit of the heap is aligned there.
   if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
     testq(result, Immediate(kDoubleAlignmentMask));
     Check(zero, kAllocationIsNotDoubleAligned);
@@ -3938,7 +3938,7 @@
   LoadAllocationTopHelper(result, scratch, flags);

// Align the next allocation. Storing the filler map without checking top is
-  // always safe because the limit of the heap is always aligned.
+  // safe in new-space because the limit of the heap is aligned there.
   if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
     testq(result, Immediate(kDoubleAlignmentMask));
     Check(zero, kAllocationIsNotDoubleAligned);

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to