Reviewers: ulan, jochen,

Description:
A64: Fixes for the veneers emission.

This patch includes 3 fixes for veneers emission.

1) Block veneer pools emission in the PatchingAssembler.
2) Fix the check for veneer pool emission just before a constant pool.
3) Forbid copy of labels. The list of JumpTableEntry used to track the
deoptimization table entries would make copies of the labels when growing.
   Doing so, it would confuse the Assembler that was tracking the labels via
   pointers.

Please review this at https://codereview.chromium.org/200133002/

SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge

Affected files (+24, -18 lines):
  M src/a64/assembler-a64.h
  M src/a64/assembler-a64.cc
  M src/a64/lithium-codegen-a64.h
  M src/a64/lithium-codegen-a64.cc
  M src/assembler.h
  M src/deoptimizer.h


Index: src/a64/assembler-a64.cc
diff --git a/src/a64/assembler-a64.cc b/src/a64/assembler-a64.cc
index 5843c2ef965677c40019e60726923d86a18a2e28..51542b27da0d1cdc5230cc5a6d3f8c149644cdfe 100644
--- a/src/a64/assembler-a64.cc
+++ b/src/a64/assembler-a64.cc
@@ -2548,7 +2548,7 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {

// Emit veneers for branches that would go out of range during emission of the
   // constant pool.
-  CheckVeneerPool(require_jump, kVeneerDistanceMargin - pool_size);
+  CheckVeneerPool(require_jump, kVeneerDistanceMargin + pool_size);

   Label size_check;
   bind(&size_check);
Index: src/a64/assembler-a64.h
diff --git a/src/a64/assembler-a64.h b/src/a64/assembler-a64.h
index c07c6380a3acfcb858c168f8f2b9320c4acb7f12..31d7f17e071612e503c8fbfba88fdf27f1a5b018 100644
--- a/src/a64/assembler-a64.h
+++ b/src/a64/assembler-a64.h
@@ -2175,20 +2175,19 @@ class PatchingAssembler : public Assembler {
     : Assembler(NULL,
                 reinterpret_cast<byte*>(start),
                 count * kInstructionSize + kGap) {
-    // Block constant pool emission.
-    StartBlockConstPool();
+    StartBlockPools();
   }

   PatchingAssembler(byte* start, unsigned count)
     : Assembler(NULL, start, count * kInstructionSize + kGap) {
     // Block constant pool emission.
-    StartBlockConstPool();
+    StartBlockPools();
   }

   ~PatchingAssembler() {
     // Const pool should still be blocked.
     ASSERT(is_const_pool_blocked());
-    EndBlockConstPool();
+    EndBlockPools();
     // Verify we have generated the number of instruction we expected.
     ASSERT((pc_offset() + kGap) == buffer_size_);
     // Verify no relocation information has been emitted.
Index: src/a64/lithium-codegen-a64.cc
diff --git a/src/a64/lithium-codegen-a64.cc b/src/a64/lithium-codegen-a64.cc
index 982cd8f1d0067152aa6c4c470db0be9c94b82fa0..c07e6ba03bf16184ef88d9bd8ab7d8f8aabd94bd 100644
--- a/src/a64/lithium-codegen-a64.cc
+++ b/src/a64/lithium-codegen-a64.cc
@@ -841,16 +841,16 @@ bool LCodeGen::GenerateDeoptJumpTable() {
   __ bind(&table_start);
   Label needs_frame;
   for (int i = 0; i < deopt_jump_table_.length(); i++) {
-    __ Bind(&deopt_jump_table_[i].label);
-    Address entry = deopt_jump_table_[i].address;
-    Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
+    __ Bind(&deopt_jump_table_[i]->label);
+    Address entry = deopt_jump_table_[i]->address;
+    Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
     int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
     if (id == Deoptimizer::kNotDeoptimizationEntry) {
       Comment(";;; jump table entry %d.", i);
     } else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
     }
-    if (deopt_jump_table_[i].needs_frame) {
+    if (deopt_jump_table_[i]->needs_frame) {
       ASSERT(!info()->saves_caller_doubles());

       UseScratchRegisterScope temps(masm());
@@ -1039,15 +1039,16 @@ void LCodeGen::DeoptimizeBranch(
     // We often have several deopts to the same entry, reuse the last
     // jump entry if this is the case.
     if (deopt_jump_table_.is_empty() ||
-        (deopt_jump_table_.last().address != entry) ||
-        (deopt_jump_table_.last().bailout_type != bailout_type) ||
-        (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
-      Deoptimizer::JumpTableEntry table_entry(entry,
-                                              bailout_type,
-                                              !frame_is_built_);
+        (deopt_jump_table_.last()->address != entry) ||
+        (deopt_jump_table_.last()->bailout_type != bailout_type) ||
+        (deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
+      Deoptimizer::JumpTableEntry* table_entry =
+        new(zone()) Deoptimizer::JumpTableEntry(entry,
+                                                bailout_type,
+                                                !frame_is_built_);
       deopt_jump_table_.Add(table_entry, zone());
     }
-    __ B(&deopt_jump_table_.last().label,
+    __ B(&deopt_jump_table_.last()->label,
          branch_type, reg, bit);
   }
 }
Index: src/a64/lithium-codegen-a64.h
diff --git a/src/a64/lithium-codegen-a64.h b/src/a64/lithium-codegen-a64.h
index fb1ca94f847efaa96f5fa9863a0de0071763054a..cca87726d9529534a03a5d6aa5b81ff1aa129f61 100644
--- a/src/a64/lithium-codegen-a64.h
+++ b/src/a64/lithium-codegen-a64.h
@@ -347,7 +347,7 @@ class LCodeGen: public LCodeGenBase {
   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;

   ZoneList<LEnvironment*> deoptimizations_;
-  ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
+  ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
   ZoneList<Handle<Object> > deoptimization_literals_;
   int inlined_function_count_;
   Scope* const scope_;
Index: src/assembler.h
diff --git a/src/assembler.h b/src/assembler.h
index cbbe03c7d122b29625c2dfcc1874373c011bab0d..cfd991495df9256eaab1aa7d638f8a7688ccadb5 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -210,6 +210,12 @@ class Label BASE_EMBEDDED {
   friend class Assembler;
   friend class Displacement;
   friend class RegExpMacroAssemblerIrregexp;
+
+#if V8_TARGET_ARCH_A64
+ // On A64, the Assembler keeps track of pointers to Labels to resolve branches
+  // to distant targets. Copying labels would confuse the Assembler.
+  DISALLOW_COPY_AND_ASSIGN(Label);  // NOLINT
+#endif
 };


Index: src/deoptimizer.h
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 67690ded0de2e9dbccab31eb9342a40e223c15a6..4e9b4821be98423cc2c1970d78e5ae19c11f8ce6 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -134,7 +134,7 @@ class Deoptimizer : public Malloced {

   static const int kBailoutTypesWithCodeEntry = SOFT + 1;

-  struct JumpTableEntry {
+  struct JumpTableEntry : public ZoneObject {
     inline JumpTableEntry(Address entry,
                           Deoptimizer::BailoutType type,
                           bool frame)


--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to