Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(155)

Unified Diff: src/compiler/greedy-allocator.cc

Issue 1242123006: [turbofan] Deferred block spilling heuristic - first step. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: TEMP Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | src/compiler/register-allocator.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/compiler/greedy-allocator.cc
diff --git a/src/compiler/greedy-allocator.cc b/src/compiler/greedy-allocator.cc
index 8d658c39ff4203bb420e7f184dd46f8634b1900f..81614092d2af3fc9ac65a61c91f754ad29073c1d 100644
--- a/src/compiler/greedy-allocator.cc
+++ b/src/compiler/greedy-allocator.cc
@@ -14,7 +14,6 @@ namespace compiler {
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
} while (false)
-
namespace {
@@ -40,6 +39,17 @@ LiveRange* Split(LiveRange* range, RegisterAllocationData* data,
}
+void SpillToStackSlot(LiveRange* range, RegisterAllocationData* data) {
+ DCHECK(!range->spilled());
+ TRACE("Spilling live range %d\n", range->id());
+ auto first = range->TopLevel();
+ if (first->HasNoSpillType()) {
+ data->AssignSpillRangeToLiveRange(first);
+ }
+ range->Spill();
+}
+
+
// TODO(mtrofin): explain why splitting in gap START is always OK.
LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
const InstructionSequence* code,
@@ -67,6 +77,73 @@ int GetLastGapIndex(const UseInterval* interval) {
}
+int GetFirstInstructionIndex(const UseInterval* interval) {
+ int ret = interval->start().ToInstructionIndex();
+ if (!interval->start().IsGapPosition() && !interval->start().IsStart()) {
+ ++ret;
+ }
+ return ret;
+}
+
+
+int GetLastInstructionIndex(const UseInterval* interval) {
+ int ret = interval->end().ToInstructionIndex();
+ if (interval->end().IsGapPosition() || interval->end().IsStart()) {
+ // Meaning, this is either a gap or instruction start
+ --ret;
+ }
+ return ret;
+}
+
+
+// Split before a call that requires parameters on stack, and spill until
+// the first position requiring the parameter back in a register.
+bool TrySplittingAroundCalls(LiveRange* range, RegisterAllocationData* data,
+ AllocationScheduler* scheduler) {
+ UsePosition* use = range->first_pos();
+
+ for (auto interval = range->first_interval(); interval != nullptr;
+ interval = interval->next()) {
+ int first_index = GetFirstInstructionIndex(interval);
+ int last_index = GetLastInstructionIndex(interval);
+ for (int index = first_index; index <= last_index; ++index) {
+ auto instr = data->code()->InstructionAt(index);
+ if (!instr->IsCall()) continue;
+ while (use != nullptr && use->pos().ToInstructionIndex() < index) {
+ use = use->next();
+ }
+ if (use != nullptr && use->pos().ToInstructionIndex() == index &&
+ use->type() == UsePositionType::kRequiresRegister)
+ continue;
+ LifetimePosition before_call =
+ GetSplitPositionForInstruction(range, data->code(), index);
+ if (!before_call.IsValid()) continue;
+ LiveRange* call_part = Split(range, data, before_call);
+ scheduler->Schedule(range);
+
+ for (auto next_use = use; next_use != nullptr;
+ next_use = next_use->next()) {
+ if (next_use->pos() <= before_call && !next_use->RegisterIsBeneficial())
+ continue;
+ int instr_to_split =
+ index + 1; // next_use->pos().ToInstructionIndex();
Mircea Trofin 2015/07/23 02:20:04 If we change instr_to_split = next_use->pos().ToIn
+ LifetimePosition after_call = GetSplitPositionForInstruction(
+ call_part, data->code(), instr_to_split);
+ if (after_call.IsValid()) {
+ LiveRange* keep = Split(call_part, data, after_call);
+ scheduler->Schedule(keep);
+ break;
+ }
+ }
+
+ SpillToStackSlot(call_part, data);
+ return true;
+ }
+ }
+ return false;
+}
+
+
// Basic heuristic for advancing the algorithm, if any other splitting heuristic
// failed.
LifetimePosition GetLastResortSplitPosition(const LiveRange* range,
@@ -244,18 +321,16 @@ void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
// If the range already has a spill operand and it doesn't need a
// register immediately, split it and spill the first part of the range.
if (pos == nullptr) {
- Spill(range);
+ SpillToStackSlot(range, data());
} else if (pos->pos() > range->Start().NextStart()) {
// Do not spill live range eagerly if use position that can benefit from
// the register is too close to the start of live range.
- auto split_pos = pos->pos();
- if (data()->IsBlockBoundary(split_pos.Start())) {
- split_pos = split_pos.Start();
- } else {
- split_pos = split_pos.PrevStart().End();
+ auto split_pos = GetSplitPositionForInstruction(
+ range, code(), pos->pos().ToInstructionIndex());
+ if (split_pos.IsValid()) {
+ Split(range, data(), split_pos);
+ SpillToStackSlot(range, data());
}
- Split(range, data(), split_pos);
- Spill(range);
}
}
}
@@ -326,11 +401,13 @@ void GreedyAllocator::SpillRangeAsLastResort(LiveRange* range) {
CHECK(range->CanBeSpilled(start));
DCHECK(range->NextRegisterPosition(start) == nullptr);
- Spill(range);
+ SpillToStackSlot(range, data());
}
void GreedyAllocator::SplitOrSpillBlockedRange(LiveRange* range) {
+ if (TrySplittingAroundCalls(range, data(), &scheduler())) return;
+
// TODO(mtrofin): replace the call below with the entry point selecting
// heuristics, once they exist, out of which GLRSP is the last one.
auto pos = GetLastResortSplitPosition(range, code());
« no previous file with comments | « no previous file | src/compiler/register-allocator.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698