Revision: 25149
Author: [email protected]
Date: Wed Nov 5 12:46:43 2014 UTC
Log: [turbofan] optimize hot loop in ResolveControlFlow
[email protected]
BUG=
Review URL: https://codereview.chromium.org/694473002
https://code.google.com/p/v8/source/detail?r=25149
Modified:
/branches/bleeding_edge/src/compiler/register-allocator.cc
/branches/bleeding_edge/src/compiler/register-allocator.h
=======================================
--- /branches/bleeding_edge/src/compiler/register-allocator.cc Tue Nov 4
09:21:12 2014 UTC
+++ /branches/bleeding_edge/src/compiler/register-allocator.cc Wed Nov 5
12:46:43 2014 UTC
@@ -199,7 +199,7 @@
}
-InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
+InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const {
InstructionOperand* op = NULL;
if (HasRegisterAssigned()) {
DCHECK(!IsSpilled());
@@ -507,23 +507,24 @@
RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config,
- Zone* local_zone, Frame* frame,
+ Zone* zone, Frame* frame,
InstructionSequence* code,
const char* debug_name)
- : zone_(local_zone),
+ : local_zone_(zone),
frame_(frame),
code_(code),
debug_name_(debug_name),
config_(config),
- live_in_sets_(code->InstructionBlockCount(), zone()),
- live_ranges_(code->VirtualRegisterCount() * 2, zone()),
- fixed_live_ranges_(this->config()->num_general_registers(), NULL,
zone()),
+ live_in_sets_(code->InstructionBlockCount(), local_zone()),
+ live_ranges_(code->VirtualRegisterCount() * 2, local_zone()),
+ fixed_live_ranges_(this->config()->num_general_registers(), NULL,
+ local_zone()),
fixed_double_live_ranges_(this->config()->num_double_registers(),
NULL,
- zone()),
- unhandled_live_ranges_(code->VirtualRegisterCount() * 2, zone()),
- active_live_ranges_(8, zone()),
- inactive_live_ranges_(8, zone()),
- reusable_slots_(8, zone()),
+ local_zone()),
+ unhandled_live_ranges_(code->VirtualRegisterCount() * 2,
local_zone()),
+ active_live_ranges_(8, local_zone()),
+ inactive_live_ranges_(8, local_zone()),
+ reusable_slots_(8, local_zone()),
mode_(UNALLOCATED_REGISTERS),
num_registers_(-1),
allocation_ok_(true) {
@@ -541,16 +542,16 @@
void RegisterAllocator::InitializeLivenessAnalysis() {
// Initialize the live_in sets for each block to NULL.
int block_count = code()->InstructionBlockCount();
- live_in_sets_.Initialize(block_count, zone());
- live_in_sets_.AddBlock(NULL, block_count, zone());
+ live_in_sets_.Initialize(block_count, local_zone());
+ live_in_sets_.AddBlock(NULL, block_count, local_zone());
}
BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock*
block) {
// Compute live out for the given block, except not including backward
// successor edges.
- BitVector* live_out =
- new (zone()) BitVector(code()->VirtualRegisterCount(), zone());
+ BitVector* live_out = new (local_zone())
+ BitVector(code()->VirtualRegisterCount(), local_zone());
// Process all successor blocks.
for (auto succ : block->successors()) {
@@ -584,7 +585,7 @@
while (!iterator.Done()) {
int operand_index = iterator.Current();
LiveRange* range = LiveRangeFor(operand_index);
- range->AddUseInterval(start, end, zone());
+ range->AddUseInterval(start, end, local_zone());
iterator.Advance();
}
}
@@ -630,7 +631,7 @@
// The LiveRange object itself can go in this zone, but the
// InstructionOperand needs
// to go in the code zone, since it may survive register allocation.
- result = new (zone()) LiveRange(FixedLiveRangeID(index), code_zone());
+ result = new (local_zone()) LiveRange(FixedLiveRangeID(index),
code_zone());
DCHECK(result->IsFixed());
result->kind_ = GENERAL_REGISTERS;
SetLiveRangeAssignedRegister(result, index);
@@ -644,7 +645,8 @@
DCHECK(index < config()->num_aliased_double_registers());
LiveRange* result = fixed_double_live_ranges_[index];
if (result == NULL) {
- result = new (zone()) LiveRange(FixedDoubleLiveRangeID(index),
code_zone());
+ result = new (local_zone())
+ LiveRange(FixedDoubleLiveRangeID(index), code_zone());
DCHECK(result->IsFixed());
result->kind_ = DOUBLE_REGISTERS;
SetLiveRangeAssignedRegister(result, index);
@@ -656,11 +658,12 @@
LiveRange* RegisterAllocator::LiveRangeFor(int index) {
if (index >= live_ranges_.length()) {
- live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
+ live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1,
+ local_zone());
}
LiveRange* result = live_ranges_[index];
if (result == NULL) {
- result = new (zone()) LiveRange(index, code_zone());
+ result = new (local_zone()) LiveRange(index, code_zone());
live_ranges_[index] = result;
}
return result;
@@ -694,15 +697,15 @@
if (range->IsEmpty() || range->Start().Value() > position.Value()) {
// Can happen if there is a definition without use.
- range->AddUseInterval(position, position.NextInstruction(), zone());
- range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
+ range->AddUseInterval(position, position.NextInstruction(),
local_zone());
+ range->AddUsePosition(position.NextInstruction(), NULL, NULL,
local_zone());
} else {
range->ShortenTo(position);
}
if (operand->IsUnallocated()) {
UnallocatedOperand* unalloc_operand =
UnallocatedOperand::cast(operand);
- range->AddUsePosition(position, unalloc_operand, hint, zone());
+ range->AddUsePosition(position, unalloc_operand, hint, local_zone());
}
}
@@ -715,9 +718,9 @@
if (range == NULL) return;
if (operand->IsUnallocated()) {
UnallocatedOperand* unalloc_operand =
UnallocatedOperand::cast(operand);
- range->AddUsePosition(position, unalloc_operand, hint, zone());
+ range->AddUsePosition(position, unalloc_operand, hint, local_zone());
}
- range->AddUseInterval(block_start, position, zone());
+ range->AddUseInterval(block_start, position, local_zone());
}
@@ -1023,7 +1026,7 @@
if (!IsOutputRegisterOf(instr, i)) {
LiveRange* range = FixedLiveRangeFor(i);
range->AddUseInterval(curr_position,
curr_position.InstructionEnd(),
- zone());
+ local_zone());
}
}
}
@@ -1033,7 +1036,7 @@
if (!IsOutputDoubleRegisterOf(instr, i)) {
LiveRange* range = FixedDoubleLiveRangeFor(i);
range->AddUseInterval(curr_position,
curr_position.InstructionEnd(),
- zone());
+ local_zone());
}
}
}
@@ -1160,9 +1163,8 @@
void RegisterAllocator::MeetRegisterConstraints() {
- for (int i = 0; i < code()->InstructionBlockCount(); ++i) {
- MeetRegisterConstraints(
- code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
+ for (auto block : code()->instruction_blocks()) {
+ MeetRegisterConstraints(block);
if (!AllocationOk()) return;
}
}
@@ -1170,55 +1172,9 @@
void RegisterAllocator::ResolvePhis() {
// Process the blocks in reverse order.
- for (int i = code()->InstructionBlockCount() - 1; i >= 0; --i) {
-
ResolvePhis(code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
- }
-}
-
-
-void RegisterAllocator::ResolveControlFlow(LiveRange* range,
- const InstructionBlock* block,
- const InstructionBlock* pred) {
- LifetimePosition pred_end =
-
LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
- LifetimePosition cur_start =
-
LifetimePosition::FromInstructionIndex(block->first_instruction_index());
- LiveRange* pred_cover = NULL;
- LiveRange* cur_cover = NULL;
- LiveRange* cur_range = range;
- while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
- if (cur_range->CanCover(cur_start)) {
- DCHECK(cur_cover == NULL);
- cur_cover = cur_range;
- }
- if (cur_range->CanCover(pred_end)) {
- DCHECK(pred_cover == NULL);
- pred_cover = cur_range;
- }
- cur_range = cur_range->next();
- }
-
- if (cur_cover->IsSpilled()) return;
- DCHECK(pred_cover != NULL && cur_cover != NULL);
- if (pred_cover != cur_cover) {
- InstructionOperand* pred_op =
- pred_cover->CreateAssignedOperand(code_zone());
- InstructionOperand* cur_op =
cur_cover->CreateAssignedOperand(code_zone());
- if (!pred_op->Equals(cur_op)) {
- GapInstruction* gap = NULL;
- if (block->PredecessorCount() == 1) {
- gap = code()->GapAt(block->first_instruction_index());
- } else {
- DCHECK(pred->SuccessorCount() == 1);
- gap = GetLastGap(pred);
-
- Instruction* branch =
InstructionAt(pred->last_instruction_index());
- DCHECK(!branch->HasPointerMap());
- USE(branch);
- }
- gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
- ->AddMove(pred_op, cur_op, code_zone());
- }
+ for (auto i = code()->instruction_blocks().rbegin();
+ i != code()->instruction_blocks().rend(); ++i) {
+ ResolvePhis(*i);
}
}
@@ -1288,25 +1244,175 @@
}
+namespace {
+
+class LiveRangeBound {
+ public:
+ explicit LiveRangeBound(const LiveRange* range)
+ : range_(range), start_(range->Start()), end_(range->End()) {
+ DCHECK(!range->IsEmpty());
+ }
+
+ bool CanCover(LifetimePosition position) {
+ return start_.Value() <= position.Value() &&
+ position.Value() < end_.Value();
+ }
+
+ const LiveRange* const range_;
+ const LifetimePosition start_;
+ const LifetimePosition end_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
+};
+
+
+struct FindResult {
+ const LiveRange* cur_cover_;
+ const LiveRange* pred_cover_;
+};
+
+
+class LiveRangeBoundArray {
+ public:
+ LiveRangeBoundArray() : length_(0), start_(nullptr) {}
+
+ bool ShouldInitialize() { return start_ == NULL; }
+
+ void Initialize(Zone* zone, const LiveRange* const range) {
+ size_t length = 0;
+ for (const LiveRange* i = range; i != NULL; i = i->next()) length++;
+ start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length));
+ length_ = length;
+ LiveRangeBound* curr = start_;
+ for (const LiveRange* i = range; i != NULL; i = i->next(), ++curr) {
+ new (curr) LiveRangeBound(i);
+ }
+ }
+
+ LiveRangeBound* Find(const LifetimePosition position) const {
+ size_t left_index = 0;
+ size_t right_index = length_;
+ while (true) {
+ size_t current_index = left_index + (right_index - left_index) / 2;
+ DCHECK(right_index > current_index);
+ LiveRangeBound* bound = &start_[current_index];
+ if (bound->start_.Value() <= position.Value()) {
+ if (position.Value() < bound->end_.Value()) return bound;
+ DCHECK(left_index < current_index);
+ left_index = current_index;
+ } else {
+ right_index = current_index;
+ }
+ }
+ }
+
+ void Find(const InstructionBlock* block, const InstructionBlock* pred,
+ FindResult* result) const {
+ const LifetimePosition pred_end =
+
LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
+ LiveRangeBound* bound = Find(pred_end);
+ result->pred_cover_ = bound->range_;
+ const LifetimePosition cur_start =
LifetimePosition::FromInstructionIndex(
+ block->first_instruction_index());
+ // Common case.
+ if (bound->CanCover(cur_start)) {
+ result->cur_cover_ = bound->range_;
+ return;
+ }
+ result->cur_cover_ = Find(cur_start)->range_;
+ DCHECK(result->pred_cover_ != NULL && result->cur_cover_ != NULL);
+ }
+
+ private:
+ size_t length_;
+ LiveRangeBound* start_;
+
+ DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
+};
+
+
+class LiveRangeFinder {
+ public:
+ explicit LiveRangeFinder(const RegisterAllocator& allocator)
+ : allocator_(allocator),
+ bounds_length_(allocator.live_ranges().length()),
+ bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>(
+ bounds_length_)) {
+ for (int i = 0; i < bounds_length_; ++i) {
+ new (&bounds_[i]) LiveRangeBoundArray();
+ }
+ }
+
+ LiveRangeBoundArray* ArrayFor(int operand_index) {
+ DCHECK(operand_index < bounds_length_);
+ const LiveRange* range = allocator_.live_ranges()[operand_index];
+ DCHECK(range != nullptr && !range->IsEmpty());
+ LiveRangeBoundArray* array = &bounds_[operand_index];
+ if (array->ShouldInitialize()) {
+ array->Initialize(allocator_.local_zone(), range);
+ }
+ return array;
+ }
+
+ private:
+ const RegisterAllocator& allocator_;
+ const int bounds_length_;
+ LiveRangeBoundArray* const bounds_;
+
+ DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
+};
+
+} // namespace
+
+
void RegisterAllocator::ResolveControlFlow() {
- for (int block_id = 1; block_id < code()->InstructionBlockCount();
- ++block_id) {
- const InstructionBlock* block =
-
code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
+ // Lazily linearize live ranges in memory for fast lookup.
+ LiveRangeFinder finder(*this);
+ for (auto block : code()->instruction_blocks()) {
if (CanEagerlyResolveControlFlow(block)) continue;
BitVector* live = live_in_sets_[block->rpo_number().ToInt()];
BitVector::Iterator iterator(live);
while (!iterator.Done()) {
- int operand_index = iterator.Current();
+ LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
for (auto pred : block->predecessors()) {
- const InstructionBlock* cur = code()->InstructionBlockAt(pred);
- LiveRange* cur_range = LiveRangeFor(operand_index);
- ResolveControlFlow(cur_range, block, cur);
+ FindResult result;
+ const InstructionBlock* pred_block =
code()->InstructionBlockAt(pred);
+ array->Find(block, pred_block, &result);
+ if (result.cur_cover_ == result.pred_cover_ ||
+ result.cur_cover_->IsSpilled())
+ continue;
+ ResolveControlFlow(block, result.cur_cover_, pred_block,
+ result.pred_cover_);
}
iterator.Advance();
}
}
}
+
+
+void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block,
+ const LiveRange* cur_cover,
+ const InstructionBlock* pred,
+ const LiveRange* pred_cover) {
+ InstructionOperand* pred_op =
pred_cover->CreateAssignedOperand(code_zone());
+ InstructionOperand* cur_op =
cur_cover->CreateAssignedOperand(code_zone());
+ if (!pred_op->Equals(cur_op)) {
+ GapInstruction* gap = NULL;
+ if (block->PredecessorCount() == 1) {
+ gap = code()->GapAt(block->first_instruction_index());
+ } else {
+ DCHECK(pred->SuccessorCount() == 1);
+ gap = GetLastGap(pred);
+
+ Instruction* branch = InstructionAt(pred->last_instruction_index());
+ DCHECK(!branch->HasPointerMap());
+ USE(branch);
+ }
+ gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
+ ->AddMove(pred_op, cur_op, code_zone());
+ }
+}
void RegisterAllocator::BuildLiveRanges() {
@@ -1371,7 +1477,7 @@
while (!iterator.Done()) {
int operand_index = iterator.Current();
LiveRange* range = LiveRangeFor(operand_index);
- range->EnsureInterval(start, end, zone());
+ range->EnsureInterval(start, end, local_zone());
iterator.Advance();
}
@@ -1667,13 +1773,13 @@
void RegisterAllocator::AddToActive(LiveRange* range) {
TraceAlloc("Add live range %d to active\n", range->id());
- active_live_ranges_.Add(range, zone());
+ active_live_ranges_.Add(range, local_zone());
}
void RegisterAllocator::AddToInactive(LiveRange* range) {
TraceAlloc("Add live range %d to inactive\n", range->id());
- inactive_live_ranges_.Add(range, zone());
+ inactive_live_ranges_.Add(range, local_zone());
}
@@ -1685,13 +1791,13 @@
LiveRange* cur_range = unhandled_live_ranges_.at(i);
if (range->ShouldBeAllocatedBefore(cur_range)) {
TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i
+ 1);
- unhandled_live_ranges_.InsertAt(i + 1, range, zone());
+ unhandled_live_ranges_.InsertAt(i + 1, range, local_zone());
DCHECK(UnhandledIsSorted());
return;
}
}
TraceAlloc("Add live range %d to unhandled at start\n", range->id());
- unhandled_live_ranges_.InsertAt(0, range, zone());
+ unhandled_live_ranges_.InsertAt(0, range, local_zone());
DCHECK(UnhandledIsSorted());
}
@@ -1700,7 +1806,7 @@
if (range == NULL || range->IsEmpty()) return;
DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
TraceAlloc("Add live range %d to unhandled unsorted at end\n",
range->id());
- unhandled_live_ranges_.Add(range, zone());
+ unhandled_live_ranges_.Add(range, local_zone());
}
@@ -1742,7 +1848,7 @@
InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand();
if (spill_operand->IsConstant()) return;
if (spill_operand->index() >= 0) {
- reusable_slots_.Add(range, zone());
+ reusable_slots_.Add(range, local_zone());
}
}
@@ -1771,7 +1877,7 @@
void RegisterAllocator::ActiveToInactive(LiveRange* range) {
DCHECK(active_live_ranges_.Contains(range));
active_live_ranges_.RemoveElement(range);
- inactive_live_ranges_.Add(range, zone());
+ inactive_live_ranges_.Add(range, local_zone());
TraceAlloc("Moving live range %d from active to inactive\n",
range->id());
}
@@ -1787,7 +1893,7 @@
void RegisterAllocator::InactiveToActive(LiveRange* range) {
DCHECK(inactive_live_ranges_.Contains(range));
inactive_live_ranges_.RemoveElement(range);
- active_live_ranges_.Add(range, zone());
+ active_live_ranges_.Add(range, local_zone());
TraceAlloc("Moving live range %d from inactive to active\n",
range->id());
}
@@ -2063,7 +2169,7 @@
int vreg = GetVirtualRegister();
if (!AllocationOk()) return NULL;
LiveRange* result = LiveRangeFor(vreg);
- range->SplitAt(pos, result, zone());
+ range->SplitAt(pos, result, local_zone());
return result;
}
@@ -2171,10 +2277,10 @@
RegisterKind kind = range->Kind();
int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
if (kind == DOUBLE_REGISTERS) {
- op = DoubleStackSlotOperand::Create(index, zone());
+ op = DoubleStackSlotOperand::Create(index, local_zone());
} else {
DCHECK(kind == GENERAL_REGISTERS);
- op = StackSlotOperand::Create(index, zone());
+ op = StackSlotOperand::Create(index, local_zone());
}
}
first->SetSpillOperand(op);
=======================================
--- /branches/bleeding_edge/src/compiler/register-allocator.h Tue Nov 4
09:21:12 2014 UTC
+++ /branches/bleeding_edge/src/compiler/register-allocator.h Wed Nov 5
12:46:43 2014 UTC
@@ -186,12 +186,15 @@
UsePosition* first_pos() const { return first_pos_; }
LiveRange* parent() const { return parent_; }
LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
+ const LiveRange* TopLevel() const {
+ return (parent_ == NULL) ? this : parent_;
+ }
LiveRange* next() const { return next_; }
bool IsChild() const { return parent() != NULL; }
int id() const { return id_; }
bool IsFixed() const { return id_ < 0; }
bool IsEmpty() const { return first_interval() == NULL; }
- InstructionOperand* CreateAssignedOperand(Zone* zone);
+ InstructionOperand* CreateAssignedOperand(Zone* zone) const;
int assigned_register() const { return assigned_register_; }
int spill_start_index() const { return spill_start_index_; }
void set_assigned_register(int reg, Zone* zone);
@@ -337,6 +340,8 @@
return fixed_double_live_ranges_;
}
InstructionSequence* code() const { return code_; }
+ // This zone is for datastructures only needed during register
allocation.
+ Zone* local_zone() const { return local_zone_; }
private:
int GetVirtualRegister() {
@@ -355,9 +360,6 @@
// Returns the register kind required by the given virtual register.
RegisterKind RequiredRegisterKind(int virtual_register) const;
-
- // This zone is for datastructures only needed during register
allocation.
- Zone* zone() const { return zone_; }
// This zone is for InstructionOperands and moves that live beyond
register
// allocation.
@@ -465,8 +467,10 @@
bool IsBlockBoundary(LifetimePosition pos);
// Helper methods for resolving control flow.
- void ResolveControlFlow(LiveRange* range, const InstructionBlock* block,
- const InstructionBlock* pred);
+ void ResolveControlFlow(const InstructionBlock* block,
+ const LiveRange* cur_cover,
+ const InstructionBlock* pred,
+ const LiveRange* pred_cover);
void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
@@ -494,7 +498,7 @@
const char* debug_name() const { return debug_name_; }
const RegisterConfiguration* config() const { return config_; }
- Zone* const zone_;
+ Zone* const local_zone_;
Frame* const frame_;
InstructionSequence* const code_;
const char* const debug_name_;
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.