Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/lithium-allocator.cc

Issue 6597029: [Isolates] Merge r 6300:6500 from bleeding_edge to isolates. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/lithium-allocator.h ('k') | src/liveobjectlist.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
64 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 64 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
65 return a.Value() < b.Value() ? a : b; 65 return a.Value() < b.Value() ? a : b;
66 } 66 }
67 67
68 68
69 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 69 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
70 return a.Value() > b.Value() ? a : b; 70 return a.Value() > b.Value() ? a : b;
71 } 71 }
72 72
73 73
74 void LOperand::PrintTo(StringStream* stream) { 74 UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
75 LUnallocated* unalloc = NULL; 75 : operand_(operand),
76 switch (kind()) { 76 hint_(NULL),
77 case INVALID: 77 pos_(pos),
78 break; 78 next_(NULL),
79 case UNALLOCATED: 79 requires_reg_(false),
80 unalloc = LUnallocated::cast(this); 80 register_beneficial_(true) {
81 stream->Add("v%d", unalloc->virtual_register()); 81 if (operand_ != NULL && operand_->IsUnallocated()) {
82 switch (unalloc->policy()) { 82 LUnallocated* unalloc = LUnallocated::cast(operand_);
83 case LUnallocated::NONE: 83 requires_reg_ = unalloc->HasRegisterPolicy();
84 break; 84 register_beneficial_ = !unalloc->HasAnyPolicy();
85 case LUnallocated::FIXED_REGISTER: {
86 const char* register_name =
87 Register::AllocationIndexToString(unalloc->fixed_index());
88 stream->Add("(=%s)", register_name);
89 break;
90 }
91 case LUnallocated::FIXED_DOUBLE_REGISTER: {
92 const char* double_register_name =
93 DoubleRegister::AllocationIndexToString(unalloc->fixed_index());
94 stream->Add("(=%s)", double_register_name);
95 break;
96 }
97 case LUnallocated::FIXED_SLOT:
98 stream->Add("(=%dS)", unalloc->fixed_index());
99 break;
100 case LUnallocated::MUST_HAVE_REGISTER:
101 stream->Add("(R)");
102 break;
103 case LUnallocated::WRITABLE_REGISTER:
104 stream->Add("(WR)");
105 break;
106 case LUnallocated::SAME_AS_FIRST_INPUT:
107 stream->Add("(1)");
108 break;
109 case LUnallocated::ANY:
110 stream->Add("(-)");
111 break;
112 case LUnallocated::IGNORE:
113 stream->Add("(0)");
114 break;
115 }
116 break;
117 case CONSTANT_OPERAND:
118 stream->Add("[constant:%d]", index());
119 break;
120 case STACK_SLOT:
121 stream->Add("[stack:%d]", index());
122 break;
123 case DOUBLE_STACK_SLOT:
124 stream->Add("[double_stack:%d]", index());
125 break;
126 case REGISTER:
127 stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
128 break;
129 case DOUBLE_REGISTER:
130 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
131 break;
132 case ARGUMENT:
133 stream->Add("[arg:%d]", index());
134 break;
135 } 85 }
136 } 86 ASSERT(pos_.IsValid());
137
138 int LOperand::VirtualRegister() {
139 LUnallocated* unalloc = LUnallocated::cast(this);
140 return unalloc->virtual_register();
141 } 87 }
142 88
143 89
90 bool UsePosition::HasHint() const {
91 return hint_ != NULL && !hint_->IsUnallocated();
92 }
93
94
144 bool UsePosition::RequiresRegister() const { 95 bool UsePosition::RequiresRegister() const {
145 return requires_reg_; 96 return requires_reg_;
146 } 97 }
147 98
148 99
149 bool UsePosition::RegisterIsBeneficial() const { 100 bool UsePosition::RegisterIsBeneficial() const {
150 return register_beneficial_; 101 return register_beneficial_;
151 } 102 }
152 103
153 104
(...skipping 29 matching lines...) Expand all
183 } 134 }
184 current_interval = current_interval->next(); 135 current_interval = current_interval->next();
185 } 136 }
186 return false; 137 return false;
187 } 138 }
188 139
189 140
190 #endif 141 #endif
191 142
192 143
144 LiveRange::LiveRange(int id)
145 : id_(id),
146 spilled_(false),
147 assigned_register_(kInvalidAssignment),
148 assigned_register_kind_(NONE),
149 last_interval_(NULL),
150 first_interval_(NULL),
151 first_pos_(NULL),
152 parent_(NULL),
153 next_(NULL),
154 current_interval_(NULL),
155 last_processed_use_(NULL),
156 spill_start_index_(kMaxInt) {
157 spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
158 }
159
160
161 void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
162 ASSERT(!HasRegisterAssigned() && !IsSpilled());
163 assigned_register_ = reg;
164 assigned_register_kind_ = register_kind;
165 ConvertOperands();
166 }
167
168
169 void LiveRange::MakeSpilled() {
170 ASSERT(!IsSpilled());
171 ASSERT(TopLevel()->HasAllocatedSpillOperand());
172 spilled_ = true;
173 assigned_register_ = kInvalidAssignment;
174 ConvertOperands();
175 }
176
177
178 bool LiveRange::HasAllocatedSpillOperand() const {
179 return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
180 }
181
182
183 void LiveRange::SetSpillOperand(LOperand* operand) {
184 ASSERT(!operand->IsUnallocated());
185 ASSERT(spill_operand_ != NULL);
186 ASSERT(spill_operand_->IsUnallocated());
187 spill_operand_->ConvertTo(operand->kind(), operand->index());
188 }
189
190
193 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { 191 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
194 UsePosition* use_pos = last_processed_use_; 192 UsePosition* use_pos = last_processed_use_;
195 if (use_pos == NULL) use_pos = first_pos(); 193 if (use_pos == NULL) use_pos = first_pos();
196 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { 194 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
197 use_pos = use_pos->next(); 195 use_pos = use_pos->next();
198 } 196 }
199 last_processed_use_ = use_pos; 197 last_processed_use_ = use_pos;
200 return use_pos; 198 return use_pos;
201 } 199 }
202 200
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after
738 736
739 void LAllocator::AddConstraintsGapMove(int index, 737 void LAllocator::AddConstraintsGapMove(int index,
740 LOperand* from, 738 LOperand* from,
741 LOperand* to) { 739 LOperand* to) {
742 LGap* gap = chunk_->GetGapAt(index); 740 LGap* gap = chunk_->GetGapAt(index);
743 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 741 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
744 if (from->IsUnallocated()) { 742 if (from->IsUnallocated()) {
745 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 743 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
746 for (int i = 0; i < move_operands->length(); ++i) { 744 for (int i = 0; i < move_operands->length(); ++i) {
747 LMoveOperands cur = move_operands->at(i); 745 LMoveOperands cur = move_operands->at(i);
748 LOperand* cur_to = cur.to(); 746 LOperand* cur_to = cur.destination();
749 if (cur_to->IsUnallocated()) { 747 if (cur_to->IsUnallocated()) {
750 if (cur_to->VirtualRegister() == from->VirtualRegister()) { 748 if (cur_to->VirtualRegister() == from->VirtualRegister()) {
751 move->AddMove(cur.from(), to); 749 move->AddMove(cur.source(), to);
752 return; 750 return;
753 } 751 }
754 } 752 }
755 } 753 }
756 } 754 }
757 move->AddMove(from, to); 755 move->AddMove(from, to);
758 } 756 }
759 757
760 758
761 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { 759 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
821 // Handle fixed input operands of second instruction. 819 // Handle fixed input operands of second instruction.
822 if (second != NULL) { 820 if (second != NULL) {
823 for (int i = 0; i < second->InputCount(); ++i) { 821 for (int i = 0; i < second->InputCount(); ++i) {
824 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i)); 822 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i));
825 if (cur_input->HasFixedPolicy()) { 823 if (cur_input->HasFixedPolicy()) {
826 LUnallocated* input_copy = cur_input->CopyUnconstrained(); 824 LUnallocated* input_copy = cur_input->CopyUnconstrained();
827 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister()); 825 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
828 AllocateFixed(cur_input, gap_index + 1, is_tagged); 826 AllocateFixed(cur_input, gap_index + 1, is_tagged);
829 AddConstraintsGapMove(gap_index, input_copy, cur_input); 827 AddConstraintsGapMove(gap_index, input_copy, cur_input);
830 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) { 828 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
829 // The live range of writable input registers always goes until the end
830 // of the instruction.
831 ASSERT(!cur_input->IsUsedAtStart());
832
831 LUnallocated* input_copy = cur_input->CopyUnconstrained(); 833 LUnallocated* input_copy = cur_input->CopyUnconstrained();
832 cur_input->set_virtual_register(next_virtual_register_++); 834 cur_input->set_virtual_register(next_virtual_register_++);
833 835
834 if (RequiredRegisterKind(input_copy->virtual_register()) == 836 if (RequiredRegisterKind(input_copy->virtual_register()) ==
835 DOUBLE_REGISTERS) { 837 DOUBLE_REGISTERS) {
836 double_artificial_registers_.Add( 838 double_artificial_registers_.Add(
837 cur_input->virtual_register() - first_artificial_register_); 839 cur_input->virtual_register() - first_artificial_register_);
838 } 840 }
839 841
840 second->AddTemp(cur_input);
841 AddConstraintsGapMove(gap_index, input_copy, cur_input); 842 AddConstraintsGapMove(gap_index, input_copy, cur_input);
842 } 843 }
843 } 844 }
844 } 845 }
845 846
846 // Handle "output same as input" for second instruction. 847 // Handle "output same as input" for second instruction.
847 if (second != NULL && second->Output() != NULL) { 848 if (second != NULL && second->Output() != NULL) {
848 LUnallocated* second_output = LUnallocated::cast(second->Output()); 849 LUnallocated* second_output = LUnallocated::cast(second->Output());
849 if (second_output->HasSameAsInputPolicy()) { 850 if (second_output->HasSameAsInputPolicy()) {
850 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0)); 851 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
886 LifetimePosition::FromInstructionIndex(index); 887 LifetimePosition::FromInstructionIndex(index);
887 888
888 if (chunk_->IsGapAt(index)) { 889 if (chunk_->IsGapAt(index)) {
889 // We have a gap at this position. 890 // We have a gap at this position.
890 LGap* gap = chunk_->GetGapAt(index); 891 LGap* gap = chunk_->GetGapAt(index);
891 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 892 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
892 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 893 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
893 for (int i = 0; i < move_operands->length(); ++i) { 894 for (int i = 0; i < move_operands->length(); ++i) {
894 LMoveOperands* cur = &move_operands->at(i); 895 LMoveOperands* cur = &move_operands->at(i);
895 if (cur->IsIgnored()) continue; 896 if (cur->IsIgnored()) continue;
896 LOperand* from = cur->from(); 897 LOperand* from = cur->source();
897 LOperand* to = cur->to(); 898 LOperand* to = cur->destination();
898 HPhi* phi = LookupPhi(to); 899 HPhi* phi = LookupPhi(to);
899 LOperand* hint = to; 900 LOperand* hint = to;
900 if (phi != NULL) { 901 if (phi != NULL) {
901 // This is a phi resolving move. 902 // This is a phi resolving move.
902 if (!phi->block()->IsLoopHeader()) { 903 if (!phi->block()->IsLoopHeader()) {
903 hint = LiveRangeFor(phi->id())->FirstHint(); 904 hint = LiveRangeFor(phi->id())->FirstHint();
904 } 905 }
905 } else { 906 } else {
906 if (to->IsUnallocated()) { 907 if (to->IsUnallocated()) {
907 if (live->Contains(to->VirtualRegister())) { 908 if (live->Contains(to->VirtualRegister())) {
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
1207 // The live range interval already ends at the first instruction of the 1208 // The live range interval already ends at the first instruction of the
1208 // block. 1209 // block.
1209 HPhi* phi = phis->at(i); 1210 HPhi* phi = phis->at(i);
1210 live->Remove(phi->id()); 1211 live->Remove(phi->id());
1211 1212
1212 LOperand* hint = NULL; 1213 LOperand* hint = NULL;
1213 LOperand* phi_operand = NULL; 1214 LOperand* phi_operand = NULL;
1214 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); 1215 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1215 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); 1216 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1216 for (int j = 0; j < move->move_operands()->length(); ++j) { 1217 for (int j = 0; j < move->move_operands()->length(); ++j) {
1217 LOperand* to = move->move_operands()->at(j).to(); 1218 LOperand* to = move->move_operands()->at(j).destination();
1218 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) { 1219 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1219 hint = move->move_operands()->at(j).from(); 1220 hint = move->move_operands()->at(j).source();
1220 phi_operand = to; 1221 phi_operand = to;
1221 break; 1222 break;
1222 } 1223 }
1223 } 1224 }
1224 ASSERT(hint != NULL); 1225 ASSERT(hint != NULL);
1225 1226
1226 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( 1227 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1227 block->first_instruction_index()); 1228 block->first_instruction_index());
1228 Define(block_start, phi_operand, hint); 1229 Define(block_start, phi_operand, hint);
1229 } 1230 }
(...skipping 775 matching lines...) Expand 10 before | Expand all | Expand 10 after
2005 } 2006 }
2006 } 2007 }
2007 2008
2008 2009
2009 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { 2010 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2010 return pos.IsInstructionStart() && 2011 return pos.IsInstructionStart() &&
2011 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); 2012 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel();
2012 } 2013 }
2013 2014
2014 2015
2015 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) {
2016 UsePosition* prev_pos = prev->AddUsePosition(
2017 LifetimePosition::FromInstructionIndex(pos));
2018 UsePosition* next_pos = next->AddUsePosition(
2019 LifetimePosition::FromInstructionIndex(pos));
2020 LOperand* prev_operand = prev_pos->operand();
2021 LOperand* next_operand = next_pos->operand();
2022 LGap* gap = chunk_->GetGapAt(pos);
2023 gap->GetOrCreateParallelMove(LGap::START)->
2024 AddMove(prev_operand, next_operand);
2025 next_pos->set_hint(prev_operand);
2026 }
2027
2028
2029 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) { 2016 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
2030 ASSERT(!range->IsFixed()); 2017 ASSERT(!range->IsFixed());
2031 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); 2018 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2032 2019
2033 if (pos.Value() <= range->Start().Value()) return range; 2020 if (pos.Value() <= range->Start().Value()) return range;
2034 2021
2035 LiveRange* result = LiveRangeFor(next_virtual_register_++); 2022 LiveRange* result = LiveRangeFor(next_virtual_register_++);
2036 range->SplitAt(pos, result); 2023 range->SplitAt(pos, result);
2037 return result; 2024 return result;
2038 } 2025 }
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 LiveRange* current = live_ranges()->at(i); 2132 LiveRange* current = live_ranges()->at(i);
2146 if (current != NULL) current->Verify(); 2133 if (current != NULL) current->Verify();
2147 } 2134 }
2148 } 2135 }
2149 2136
2150 2137
2151 #endif 2138 #endif
2152 2139
2153 2140
2154 } } // namespace v8::internal 2141 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/lithium-allocator.h ('k') | src/liveobjectlist.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698