| Index: src/lithium-allocator.cc
|
| ===================================================================
|
| --- src/lithium-allocator.cc (revision 6800)
|
| +++ src/lithium-allocator.cc (working copy)
|
| @@ -25,7 +25,7 @@
|
| // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
| -#include "lithium-allocator.h"
|
| +#include "lithium-allocator-inl.h"
|
|
|
| #include "hydrogen.h"
|
| #include "string-stream.h"
|
| @@ -71,73 +71,24 @@
|
| }
|
|
|
|
|
| -void LOperand::PrintTo(StringStream* stream) {
|
| - LUnallocated* unalloc = NULL;
|
| - switch (kind()) {
|
| - case INVALID:
|
| - break;
|
| - case UNALLOCATED:
|
| - unalloc = LUnallocated::cast(this);
|
| - stream->Add("v%d", unalloc->virtual_register());
|
| - switch (unalloc->policy()) {
|
| - case LUnallocated::NONE:
|
| - break;
|
| - case LUnallocated::FIXED_REGISTER: {
|
| - const char* register_name =
|
| - Register::AllocationIndexToString(unalloc->fixed_index());
|
| - stream->Add("(=%s)", register_name);
|
| - break;
|
| - }
|
| - case LUnallocated::FIXED_DOUBLE_REGISTER: {
|
| - const char* double_register_name =
|
| - DoubleRegister::AllocationIndexToString(unalloc->fixed_index());
|
| - stream->Add("(=%s)", double_register_name);
|
| - break;
|
| - }
|
| - case LUnallocated::FIXED_SLOT:
|
| - stream->Add("(=%dS)", unalloc->fixed_index());
|
| - break;
|
| - case LUnallocated::MUST_HAVE_REGISTER:
|
| - stream->Add("(R)");
|
| - break;
|
| - case LUnallocated::WRITABLE_REGISTER:
|
| - stream->Add("(WR)");
|
| - break;
|
| - case LUnallocated::SAME_AS_FIRST_INPUT:
|
| - stream->Add("(1)");
|
| - break;
|
| - case LUnallocated::ANY:
|
| - stream->Add("(-)");
|
| - break;
|
| - case LUnallocated::IGNORE:
|
| - stream->Add("(0)");
|
| - break;
|
| - }
|
| - break;
|
| - case CONSTANT_OPERAND:
|
| - stream->Add("[constant:%d]", index());
|
| - break;
|
| - case STACK_SLOT:
|
| - stream->Add("[stack:%d]", index());
|
| - break;
|
| - case DOUBLE_STACK_SLOT:
|
| - stream->Add("[double_stack:%d]", index());
|
| - break;
|
| - case REGISTER:
|
| - stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
|
| - break;
|
| - case DOUBLE_REGISTER:
|
| - stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
|
| - break;
|
| - case ARGUMENT:
|
| - stream->Add("[arg:%d]", index());
|
| - break;
|
| +UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
|
| + : operand_(operand),
|
| + hint_(NULL),
|
| + pos_(pos),
|
| + next_(NULL),
|
| + requires_reg_(false),
|
| + register_beneficial_(true) {
|
| + if (operand_ != NULL && operand_->IsUnallocated()) {
|
| + LUnallocated* unalloc = LUnallocated::cast(operand_);
|
| + requires_reg_ = unalloc->HasRegisterPolicy();
|
| + register_beneficial_ = !unalloc->HasAnyPolicy();
|
| }
|
| + ASSERT(pos_.IsValid());
|
| }
|
|
|
| -int LOperand::VirtualRegister() {
|
| - LUnallocated* unalloc = LUnallocated::cast(this);
|
| - return unalloc->virtual_register();
|
| +
|
| +bool UsePosition::HasHint() const {
|
| + return hint_ != NULL && !hint_->IsUnallocated();
|
| }
|
|
|
|
|
| @@ -190,6 +141,53 @@
|
| #endif
|
|
|
|
|
| +LiveRange::LiveRange(int id)
|
| + : id_(id),
|
| + spilled_(false),
|
| + assigned_register_(kInvalidAssignment),
|
| + assigned_register_kind_(NONE),
|
| + last_interval_(NULL),
|
| + first_interval_(NULL),
|
| + first_pos_(NULL),
|
| + parent_(NULL),
|
| + next_(NULL),
|
| + current_interval_(NULL),
|
| + last_processed_use_(NULL),
|
| + spill_start_index_(kMaxInt) {
|
| + spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
|
| +}
|
| +
|
| +
|
| +void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
|
| + ASSERT(!HasRegisterAssigned() && !IsSpilled());
|
| + assigned_register_ = reg;
|
| + assigned_register_kind_ = register_kind;
|
| + ConvertOperands();
|
| +}
|
| +
|
| +
|
| +void LiveRange::MakeSpilled() {
|
| + ASSERT(!IsSpilled());
|
| + ASSERT(TopLevel()->HasAllocatedSpillOperand());
|
| + spilled_ = true;
|
| + assigned_register_ = kInvalidAssignment;
|
| + ConvertOperands();
|
| +}
|
| +
|
| +
|
| +bool LiveRange::HasAllocatedSpillOperand() const {
|
| + return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
|
| +}
|
| +
|
| +
|
| +void LiveRange::SetSpillOperand(LOperand* operand) {
|
| + ASSERT(!operand->IsUnallocated());
|
| + ASSERT(spill_operand_ != NULL);
|
| + ASSERT(spill_operand_->IsUnallocated());
|
| + spill_operand_->ConvertTo(operand->kind(), operand->index());
|
| +}
|
| +
|
| +
|
| UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
|
| UsePosition* use_pos = last_processed_use_;
|
| if (use_pos == NULL) use_pos = first_pos();
|
| @@ -534,7 +532,7 @@
|
|
|
| void LAllocator::InitializeLivenessAnalysis() {
|
| // Initialize the live_in sets for each block to NULL.
|
| - int block_count = graph()->blocks()->length();
|
| + int block_count = graph_->blocks()->length();
|
| live_in_sets_.Initialize(block_count);
|
| live_in_sets_.AddBlock(NULL, block_count);
|
| }
|
| @@ -615,7 +613,7 @@
|
| }
|
| if (is_tagged) {
|
| TraceAlloc("Fixed reg is tagged at %d\n", pos);
|
| - LInstruction* instr = chunk_->instructions()->at(pos);
|
| + LInstruction* instr = InstructionAt(pos);
|
| if (instr->HasPointerMap()) {
|
| instr->pointer_map()->RecordPointer(operand);
|
| }
|
| @@ -670,17 +668,17 @@
|
| }
|
|
|
|
|
| -LGap* LAllocator::GetLastGap(HBasicBlock* block) const {
|
| +LGap* LAllocator::GetLastGap(HBasicBlock* block) {
|
| int last_instruction = block->last_instruction_index();
|
| int index = chunk_->NearestGapPos(last_instruction);
|
| - return chunk_->GetGapAt(index);
|
| + return GapAt(index);
|
| }
|
|
|
|
|
| HPhi* LAllocator::LookupPhi(LOperand* operand) const {
|
| if (!operand->IsUnallocated()) return NULL;
|
| int index = operand->VirtualRegister();
|
| - HValue* instr = graph()->LookupValue(index);
|
| + HValue* instr = graph_->LookupValue(index);
|
| if (instr != NULL && instr->IsPhi()) {
|
| return HPhi::cast(instr);
|
| }
|
| @@ -739,16 +737,16 @@
|
| void LAllocator::AddConstraintsGapMove(int index,
|
| LOperand* from,
|
| LOperand* to) {
|
| - LGap* gap = chunk_->GetGapAt(index);
|
| + LGap* gap = GapAt(index);
|
| LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| if (from->IsUnallocated()) {
|
| const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
| for (int i = 0; i < move_operands->length(); ++i) {
|
| LMoveOperands cur = move_operands->at(i);
|
| - LOperand* cur_to = cur.to();
|
| + LOperand* cur_to = cur.destination();
|
| if (cur_to->IsUnallocated()) {
|
| if (cur_to->VirtualRegister() == from->VirtualRegister()) {
|
| - move->AddMove(cur.from(), to);
|
| + move->AddMove(cur.source(), to);
|
| return;
|
| }
|
| }
|
| @@ -762,24 +760,24 @@
|
| int start = block->first_instruction_index();
|
| int end = block->last_instruction_index();
|
| for (int i = start; i <= end; ++i) {
|
| - if (chunk_->IsGapAt(i)) {
|
| - InstructionSummary* summary = NULL;
|
| - InstructionSummary* prev_summary = NULL;
|
| - if (i < end) summary = GetSummary(i + 1);
|
| - if (i > start) prev_summary = GetSummary(i - 1);
|
| - MeetConstraintsBetween(prev_summary, summary, i);
|
| + if (IsGapAt(i)) {
|
| + LInstruction* instr = NULL;
|
| + LInstruction* prev_instr = NULL;
|
| + if (i < end) instr = InstructionAt(i + 1);
|
| + if (i > start) prev_instr = InstructionAt(i - 1);
|
| + MeetConstraintsBetween(prev_instr, instr, i);
|
| }
|
| }
|
| }
|
|
|
|
|
| -void LAllocator::MeetConstraintsBetween(InstructionSummary* first,
|
| - InstructionSummary* second,
|
| +void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| + LInstruction* second,
|
| int gap_index) {
|
| // Handle fixed temporaries.
|
| if (first != NULL) {
|
| - for (int i = 0; i < first->TempCount(); ++i) {
|
| - LUnallocated* temp = LUnallocated::cast(first->TempAt(i));
|
| + for (TempIterator it(first); it.HasNext(); it.Advance()) {
|
| + LUnallocated* temp = LUnallocated::cast(it.Next());
|
| if (temp->HasFixedPolicy()) {
|
| AllocateFixed(temp, gap_index - 1, false);
|
| }
|
| @@ -812,7 +810,7 @@
|
| // and splitting of live ranges do not account for it.
|
| // Thus it should be inserted to a lifetime position corresponding to
|
| // the instruction end.
|
| - LGap* gap = chunk_->GetGapAt(gap_index);
|
| + LGap* gap = GapAt(gap_index);
|
| LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
|
| move->AddMove(first_output, range->GetSpillOperand());
|
| }
|
| @@ -820,14 +818,18 @@
|
|
|
| // Handle fixed input operands of second instruction.
|
| if (second != NULL) {
|
| - for (int i = 0; i < second->InputCount(); ++i) {
|
| - LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i));
|
| + for (UseIterator it(second); it.HasNext(); it.Advance()) {
|
| + LUnallocated* cur_input = LUnallocated::cast(it.Next());
|
| if (cur_input->HasFixedPolicy()) {
|
| LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
| bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
|
| AllocateFixed(cur_input, gap_index + 1, is_tagged);
|
| AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
| } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
|
| + // The live range of writable input registers always goes until the end
|
| + // of the instruction.
|
| + ASSERT(!cur_input->IsUsedAtStart());
|
| +
|
| LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
| cur_input->set_virtual_register(next_virtual_register_++);
|
|
|
| @@ -837,7 +839,6 @@
|
| cur_input->virtual_register() - first_artificial_register_);
|
| }
|
|
|
| - second->AddTemp(cur_input);
|
| AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
| }
|
| }
|
| @@ -847,7 +848,7 @@
|
| if (second != NULL && second->Output() != NULL) {
|
| LUnallocated* second_output = LUnallocated::cast(second->Output());
|
| if (second_output->HasSameAsInputPolicy()) {
|
| - LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0));
|
| + LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
|
| int output_vreg = second_output->VirtualRegister();
|
| int input_vreg = cur_input->VirtualRegister();
|
|
|
| @@ -857,7 +858,7 @@
|
|
|
| if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
|
| int index = gap_index + 1;
|
| - LInstruction* instr = chunk_->instructions()->at(index);
|
| + LInstruction* instr = InstructionAt(index);
|
| if (instr->HasPointerMap()) {
|
| instr->pointer_map()->RecordPointer(input_copy);
|
| }
|
| @@ -885,16 +886,16 @@
|
| LifetimePosition curr_position =
|
| LifetimePosition::FromInstructionIndex(index);
|
|
|
| - if (chunk_->IsGapAt(index)) {
|
| + if (IsGapAt(index)) {
|
| // We have a gap at this position.
|
| - LGap* gap = chunk_->GetGapAt(index);
|
| + LGap* gap = GapAt(index);
|
| LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
| for (int i = 0; i < move_operands->length(); ++i) {
|
| LMoveOperands* cur = &move_operands->at(i);
|
| if (cur->IsIgnored()) continue;
|
| - LOperand* from = cur->from();
|
| - LOperand* to = cur->to();
|
| + LOperand* from = cur->source();
|
| + LOperand* to = cur->destination();
|
| HPhi* phi = LookupPhi(to);
|
| LOperand* hint = to;
|
| if (phi != NULL) {
|
| @@ -921,17 +922,17 @@
|
| }
|
| }
|
| } else {
|
| - ASSERT(!chunk_->IsGapAt(index));
|
| - InstructionSummary* summary = GetSummary(index);
|
| + ASSERT(!IsGapAt(index));
|
| + LInstruction* instr = InstructionAt(index);
|
|
|
| - if (summary != NULL) {
|
| - LOperand* output = summary->Output();
|
| + if (instr != NULL) {
|
| + LOperand* output = instr->Output();
|
| if (output != NULL) {
|
| if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
|
| Define(curr_position, output, NULL);
|
| }
|
|
|
| - if (summary->IsCall()) {
|
| + if (instr->IsMarkedAsCall()) {
|
| for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
|
| if (output == NULL || !output->IsRegister() ||
|
| output->index() != i) {
|
| @@ -940,6 +941,9 @@
|
| curr_position.InstructionEnd());
|
| }
|
| }
|
| + }
|
| +
|
| + if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
|
| for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
|
| if (output == NULL || !output->IsDoubleRegister() ||
|
| output->index() != i) {
|
| @@ -950,8 +954,8 @@
|
| }
|
| }
|
|
|
| - for (int i = 0; i < summary->InputCount(); ++i) {
|
| - LOperand* input = summary->InputAt(i);
|
| + for (UseIterator it(instr); it.HasNext(); it.Advance()) {
|
| + LOperand* input = it.Next();
|
|
|
| LifetimePosition use_pos;
|
| if (input->IsUnallocated() &&
|
| @@ -965,9 +969,9 @@
|
| if (input->IsUnallocated()) live->Add(input->VirtualRegister());
|
| }
|
|
|
| - for (int i = 0; i < summary->TempCount(); ++i) {
|
| - LOperand* temp = summary->TempAt(i);
|
| - if (summary->IsCall()) {
|
| + for (TempIterator it(instr); it.HasNext(); it.Advance()) {
|
| + LOperand* temp = it.Next();
|
| + if (instr->IsMarkedAsCall()) {
|
| if (temp->IsRegister()) continue;
|
| if (temp->IsUnallocated()) {
|
| LUnallocated* temp_unalloc = LUnallocated::cast(temp);
|
| @@ -1038,9 +1042,9 @@
|
|
|
|
|
| void LAllocator::MeetRegisterConstraints() {
|
| - HPhase phase("Register constraints", chunk());
|
| + HPhase phase("Register constraints", chunk_);
|
| first_artificial_register_ = next_virtual_register_;
|
| - const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
|
| + const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
| for (int i = 0; i < blocks->length(); ++i) {
|
| HBasicBlock* block = blocks->at(i);
|
| MeetRegisterConstraints(block);
|
| @@ -1049,10 +1053,10 @@
|
|
|
|
|
| void LAllocator::ResolvePhis() {
|
| - HPhase phase("Resolve phis", chunk());
|
| + HPhase phase("Resolve phis", chunk_);
|
|
|
| // Process the blocks in reverse order.
|
| - const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
|
| + const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
| for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
|
| HBasicBlock* block = blocks->at(block_id);
|
| ResolvePhis(block);
|
| @@ -1064,9 +1068,7 @@
|
| HBasicBlock* block,
|
| HBasicBlock* pred) {
|
| LifetimePosition pred_end =
|
| - LifetimePosition::FromInstructionIndex(pred->last_instruction_index()).
|
| - PrevInstruction();
|
| -
|
| + LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
|
| LifetimePosition cur_start =
|
| LifetimePosition::FromInstructionIndex(block->first_instruction_index());
|
| LiveRange* pred_cover = NULL;
|
| @@ -1092,7 +1094,7 @@
|
| if (!pred_op->Equals(cur_op)) {
|
| LGap* gap = NULL;
|
| if (block->predecessors()->length() == 1) {
|
| - gap = chunk_->GetGapAt(block->first_instruction_index());
|
| + gap = GapAt(block->first_instruction_index());
|
| } else {
|
| ASSERT(pred->end()->SecondSuccessor() == NULL);
|
| gap = GetLastGap(pred);
|
| @@ -1105,19 +1107,19 @@
|
|
|
| LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
|
| int index = pos.InstructionIndex();
|
| - if (chunk_->IsGapAt(index)) {
|
| - LGap* gap = chunk_->GetGapAt(index);
|
| + if (IsGapAt(index)) {
|
| + LGap* gap = GapAt(index);
|
| return gap->GetOrCreateParallelMove(
|
| pos.IsInstructionStart() ? LGap::START : LGap::END);
|
| }
|
| int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
|
| - return chunk_->GetGapAt(gap_pos)->GetOrCreateParallelMove(
|
| + return GapAt(gap_pos)->GetOrCreateParallelMove(
|
| (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
|
| }
|
|
|
|
|
| HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
|
| - LGap* gap = chunk_->GetGapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
|
| + LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
|
| return gap->block();
|
| }
|
|
|
| @@ -1164,7 +1166,7 @@
|
|
|
| void LAllocator::ResolveControlFlow() {
|
| HPhase phase("Resolve control flow", this);
|
| - const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
|
| + const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
| for (int block_id = 1; block_id < blocks->length(); ++block_id) {
|
| HBasicBlock* block = blocks->at(block_id);
|
| if (CanEagerlyResolveControlFlow(block)) continue;
|
| @@ -1187,7 +1189,7 @@
|
| HPhase phase("Build live ranges", this);
|
| InitializeLivenessAnalysis();
|
| // Process the blocks in reverse order.
|
| - const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
|
| + const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
|
| for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
|
| HBasicBlock* block = blocks->at(block_id);
|
| BitVector* live = ComputeLiveOut(block);
|
| @@ -1211,9 +1213,9 @@
|
| LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
|
| LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| for (int j = 0; j < move->move_operands()->length(); ++j) {
|
| - LOperand* to = move->move_operands()->at(j).to();
|
| + LOperand* to = move->move_operands()->at(j).destination();
|
| if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
|
| - hint = move->move_operands()->at(j).from();
|
| + hint = move->move_operands()->at(j).source();
|
| phi_operand = to;
|
| break;
|
| }
|
| @@ -1241,7 +1243,7 @@
|
| LifetimePosition start = LifetimePosition::FromInstructionIndex(
|
| block->first_instruction_index());
|
| LifetimePosition end = LifetimePosition::FromInstructionIndex(
|
| - back_edge->last_instruction_index());
|
| + back_edge->last_instruction_index()).NextInstruction();
|
| while (!iterator.Done()) {
|
| int operand_index = iterator.Current();
|
| LiveRange* range = LiveRangeFor(operand_index);
|
| @@ -1262,7 +1264,7 @@
|
| found = true;
|
| int operand_index = iterator.Current();
|
| PrintF("Function: %s\n",
|
| - *graph()->info()->function()->debug_name()->ToCString());
|
| + *graph_->info()->function()->debug_name()->ToCString());
|
| PrintF("Value %d used before first definition!\n", operand_index);
|
| LiveRange* range = LiveRangeFor(operand_index);
|
| PrintF("First use is at %d\n", range->first_pos()->pos().Value());
|
| @@ -1467,7 +1469,7 @@
|
| if (current->HasAllocatedSpillOperand()) {
|
| TraceAlloc("Live range %d already has a spill operand\n", current->id());
|
| LifetimePosition next_pos = position;
|
| - if (chunk_->IsGapAt(next_pos.InstructionIndex())) {
|
| + if (IsGapAt(next_pos.InstructionIndex())) {
|
| next_pos = next_pos.NextInstruction();
|
| }
|
| UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
|
| @@ -1554,14 +1556,8 @@
|
| }
|
|
|
|
|
| -void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
|
| - operand->set_virtual_register(value->id());
|
| - current_summary()->AddInput(operand);
|
| -}
|
| -
|
| -
|
| bool LAllocator::HasTaggedValue(int virtual_register) const {
|
| - HValue* value = graph()->LookupValue(virtual_register);
|
| + HValue* value = graph_->LookupValue(virtual_register);
|
| if (value == NULL) return false;
|
| return value->representation().IsTagged();
|
| }
|
| @@ -1569,7 +1565,7 @@
|
|
|
| RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
|
| if (virtual_register < first_artificial_register_) {
|
| - HValue* value = graph()->LookupValue(virtual_register);
|
| + HValue* value = graph_->LookupValue(virtual_register);
|
| if (value != NULL && value->representation().IsDouble()) {
|
| return DOUBLE_REGISTERS;
|
| }
|
| @@ -1582,34 +1578,8 @@
|
| }
|
|
|
|
|
| -void LAllocator::MarkAsCall() {
|
| - // Call instructions can use only fixed registers as
|
| - // temporaries and outputs because all registers
|
| - // are blocked by the calling convention.
|
| - // Inputs can use either fixed register or have a short lifetime (be
|
| - // used at start of the instruction).
|
| - InstructionSummary* summary = current_summary();
|
| -#ifdef DEBUG
|
| - ASSERT(summary->Output() == NULL ||
|
| - LUnallocated::cast(summary->Output())->HasFixedPolicy() ||
|
| - !LUnallocated::cast(summary->Output())->HasRegisterPolicy());
|
| - for (int i = 0; i < summary->InputCount(); i++) {
|
| - ASSERT(LUnallocated::cast(summary->InputAt(i))->HasFixedPolicy() ||
|
| - LUnallocated::cast(summary->InputAt(i))->IsUsedAtStart() ||
|
| - !LUnallocated::cast(summary->InputAt(i))->HasRegisterPolicy());
|
| - }
|
| - for (int i = 0; i < summary->TempCount(); i++) {
|
| - ASSERT(LUnallocated::cast(summary->TempAt(i))->HasFixedPolicy() ||
|
| - !LUnallocated::cast(summary->TempAt(i))->HasRegisterPolicy());
|
| - }
|
| -#endif
|
| - summary->MarkAsCall();
|
| -}
|
| -
|
| -
|
| void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
|
| operand->set_virtual_register(instr->id());
|
| - current_summary()->SetOutput(operand);
|
| }
|
|
|
|
|
| @@ -1618,43 +1588,19 @@
|
| if (!operand->HasFixedPolicy()) {
|
| operand->set_virtual_register(next_virtual_register_++);
|
| }
|
| - current_summary()->AddTemp(operand);
|
| }
|
|
|
|
|
| -int LAllocator::max_initial_value_ids() {
|
| - return LUnallocated::kMaxVirtualRegisters / 32;
|
| +void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
|
| + operand->set_virtual_register(value->id());
|
| }
|
|
|
|
|
| -void LAllocator::BeginInstruction() {
|
| - if (next_summary_ == NULL) {
|
| - next_summary_ = new InstructionSummary();
|
| - }
|
| - summary_stack_.Add(next_summary_);
|
| - next_summary_ = NULL;
|
| +int LAllocator::max_initial_value_ids() {
|
| + return LUnallocated::kMaxVirtualRegisters / 32;
|
| }
|
|
|
|
|
| -void LAllocator::SummarizeInstruction(int index) {
|
| - InstructionSummary* sum = summary_stack_.RemoveLast();
|
| - if (summaries_.length() <= index) {
|
| - summaries_.AddBlock(NULL, index + 1 - summaries_.length());
|
| - }
|
| - ASSERT(summaries_[index] == NULL);
|
| - if (sum->Output() != NULL || sum->InputCount() > 0 || sum->TempCount() > 0) {
|
| - summaries_[index] = sum;
|
| - } else {
|
| - next_summary_ = sum;
|
| - }
|
| -}
|
| -
|
| -
|
| -void LAllocator::OmitInstruction() {
|
| - summary_stack_.RemoveLast();
|
| -}
|
| -
|
| -
|
| void LAllocator::AddToActive(LiveRange* range) {
|
| TraceAlloc("Add live range %d to active\n", range->id());
|
| active_live_ranges_.Add(range);
|
| @@ -2000,30 +1946,21 @@
|
|
|
| bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
|
| return pos.IsInstructionStart() &&
|
| - chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
|
| + InstructionAt(pos.InstructionIndex())->IsLabel();
|
| }
|
|
|
|
|
| -void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
|
| - UsePosition* prev_pos = prev->AddUsePosition(
|
| - LifetimePosition::FromInstructionIndex(pos));
|
| - UsePosition* next_pos = next->AddUsePosition(
|
| - LifetimePosition::FromInstructionIndex(pos));
|
| - LOperand* prev_operand = prev_pos->operand();
|
| - LOperand* next_operand = next_pos->operand();
|
| - LGap* gap = chunk_->GetGapAt(pos);
|
| - gap->GetOrCreateParallelMove(LGap::START)->
|
| - AddMove(prev_operand, next_operand);
|
| - next_pos->set_hint(prev_operand);
|
| -}
|
| -
|
| -
|
| LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
|
| ASSERT(!range->IsFixed());
|
| TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
|
|
|
| if (pos.Value() <= range->Start().Value()) return range;
|
|
|
| + // We can't properly connect liveranges if split occured at the end
|
| + // of control instruction.
|
| + ASSERT(pos.IsInstructionStart() ||
|
| + !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
|
| +
|
| LiveRange* result = LiveRangeFor(next_virtual_register_++);
|
| range->SplitAt(pos, result);
|
| return result;
|
|
|