Revision: 19941
Author: [email protected]
Date: Fri Mar 14 15:14:42 2014 UTC
Log: A64: Fixes for the veneers emission.
This patch includes 3 fixes for veneers emission.
1) Block veneer pools emission in the PatchingAssembler.
2) Fix the check for veneer pool emission just before a constant pool.
3) Forbid copy of labels. The list of JumpTableEntry used to track the
deoptimization table entries would make copies of the labels when
growing.
Doing so, it would confuse the Assembler that was tracking the labels via
pointers.
[email protected]
Review URL: https://codereview.chromium.org/200133002
http://code.google.com/p/v8/source/detail?r=19941
Modified:
/branches/bleeding_edge/src/a64/assembler-a64.cc
/branches/bleeding_edge/src/a64/assembler-a64.h
/branches/bleeding_edge/src/a64/lithium-codegen-a64.cc
/branches/bleeding_edge/src/a64/lithium-codegen-a64.h
/branches/bleeding_edge/src/assembler.h
/branches/bleeding_edge/src/deoptimizer.h
=======================================
--- /branches/bleeding_edge/src/a64/assembler-a64.cc Thu Mar 13 10:28:53
2014 UTC
+++ /branches/bleeding_edge/src/a64/assembler-a64.cc Fri Mar 14 15:14:42
2014 UTC
@@ -2548,7 +2548,7 @@
// Emit veneers for branches that would go out of range during emission
of the
// constant pool.
- CheckVeneerPool(require_jump, kVeneerDistanceMargin - pool_size);
+ CheckVeneerPool(require_jump, kVeneerDistanceMargin + pool_size);
Label size_check;
bind(&size_check);
=======================================
--- /branches/bleeding_edge/src/a64/assembler-a64.h Thu Mar 13 09:45:02
2014 UTC
+++ /branches/bleeding_edge/src/a64/assembler-a64.h Fri Mar 14 15:14:42
2014 UTC
@@ -2175,20 +2175,19 @@
: Assembler(NULL,
reinterpret_cast<byte*>(start),
count * kInstructionSize + kGap) {
- // Block constant pool emission.
- StartBlockConstPool();
+ StartBlockPools();
}
PatchingAssembler(byte* start, unsigned count)
: Assembler(NULL, start, count * kInstructionSize + kGap) {
// Block constant pool emission.
- StartBlockConstPool();
+ StartBlockPools();
}
~PatchingAssembler() {
// Const pool should still be blocked.
ASSERT(is_const_pool_blocked());
- EndBlockConstPool();
+ EndBlockPools();
// Verify we have generated the number of instruction we expected.
ASSERT((pc_offset() + kGap) == buffer_size_);
// Verify no relocation information has been emitted.
=======================================
--- /branches/bleeding_edge/src/a64/lithium-codegen-a64.cc Fri Mar 14
10:22:55 2014 UTC
+++ /branches/bleeding_edge/src/a64/lithium-codegen-a64.cc Fri Mar 14
15:14:42 2014 UTC
@@ -841,16 +841,16 @@
__ bind(&table_start);
Label needs_frame;
for (int i = 0; i < deopt_jump_table_.length(); i++) {
- __ Bind(&deopt_jump_table_[i].label);
- Address entry = deopt_jump_table_[i].address;
- Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
+ __ Bind(&deopt_jump_table_[i]->label);
+ Address entry = deopt_jump_table_[i]->address;
+ Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
}
- if (deopt_jump_table_[i].needs_frame) {
+ if (deopt_jump_table_[i]->needs_frame) {
ASSERT(!info()->saves_caller_doubles());
UseScratchRegisterScope temps(masm());
@@ -1039,15 +1039,16 @@
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().bailout_type != bailout_type) ||
- (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ (deopt_jump_table_.last()->address != entry) ||
+ (deopt_jump_table_.last()->bailout_type != bailout_type) ||
+ (deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
+ Deoptimizer::JumpTableEntry* table_entry =
+ new(zone()) Deoptimizer::JumpTableEntry(entry,
+ bailout_type,
+ !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
- __ B(&deopt_jump_table_.last().label,
+ __ B(&deopt_jump_table_.last()->label,
branch_type, reg, bit);
}
}
=======================================
--- /branches/bleeding_edge/src/a64/lithium-codegen-a64.h Wed Mar 12
09:59:36 2014 UTC
+++ /branches/bleeding_edge/src/a64/lithium-codegen-a64.h Fri Mar 14
15:14:42 2014 UTC
@@ -347,7 +347,7 @@
void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
=======================================
--- /branches/bleeding_edge/src/assembler.h Wed Mar 12 17:18:49 2014 UTC
+++ /branches/bleeding_edge/src/assembler.h Fri Mar 14 15:14:42 2014 UTC
@@ -210,6 +210,12 @@
friend class Assembler;
friend class Displacement;
friend class RegExpMacroAssemblerIrregexp;
+
+#if V8_TARGET_ARCH_A64
+ // On A64, the Assembler keeps track of pointers to Labels to resolve
branches
+ // to distant targets. Copying labels would confuse the Assembler.
+ DISALLOW_COPY_AND_ASSIGN(Label); // NOLINT
+#endif
};
=======================================
--- /branches/bleeding_edge/src/deoptimizer.h Fri Mar 14 15:11:58 2014 UTC
+++ /branches/bleeding_edge/src/deoptimizer.h Fri Mar 14 15:14:42 2014 UTC
@@ -134,7 +134,7 @@
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
- struct JumpTableEntry {
+ struct JumpTableEntry : public ZoneObject {
inline JumpTableEntry(Address entry,
Deoptimizer::BailoutType type,
bool frame)
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.