Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(298)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1612013002: Revert of [turbofan] optimize spills in defered blocks (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 return 8; 106 return 8;
107 case MachineRepresentation::kNone: 107 case MachineRepresentation::kNone:
108 break; 108 break;
109 } 109 }
110 UNREACHABLE(); 110 UNREACHABLE();
111 return 0; 111 return 0;
112 } 112 }
113 113
114 } // namespace 114 } // namespace
115 115
116 class LiveRangeBound {
117 public:
118 explicit LiveRangeBound(LiveRange* range, bool skip)
119 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
120 DCHECK(!range->IsEmpty());
121 }
122
123 bool CanCover(LifetimePosition position) {
124 return start_ <= position && position < end_;
125 }
126
127 LiveRange* const range_;
128 const LifetimePosition start_;
129 const LifetimePosition end_;
130 const bool skip_;
131
132 private:
133 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
134 };
135
136
137 struct FindResult {
138 LiveRange* cur_cover_;
139 LiveRange* pred_cover_;
140 };
141
142
143 class LiveRangeBoundArray {
144 public:
145 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
146
147 bool ShouldInitialize() { return start_ == nullptr; }
148
149 void Initialize(Zone* zone, TopLevelLiveRange* range) {
150 length_ = range->GetChildCount();
151
152 start_ = zone->NewArray<LiveRangeBound>(length_);
153 LiveRangeBound* curr = start_;
154 // Normally, spilled ranges do not need connecting moves, because the spill
155 // location has been assigned at definition. For ranges spilled in deferred
156 // blocks, that is not the case, so we need to connect the spilled children.
157 for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
158 new (curr) LiveRangeBound(i, i->spilled());
159 }
160 }
161
162 LiveRangeBound* Find(const LifetimePosition position) const {
163 size_t left_index = 0;
164 size_t right_index = length_;
165 while (true) {
166 size_t current_index = left_index + (right_index - left_index) / 2;
167 DCHECK(right_index > current_index);
168 LiveRangeBound* bound = &start_[current_index];
169 if (bound->start_ <= position) {
170 if (position < bound->end_) return bound;
171 DCHECK(left_index < current_index);
172 left_index = current_index;
173 } else {
174 right_index = current_index;
175 }
176 }
177 }
178
179 LiveRangeBound* FindPred(const InstructionBlock* pred) {
180 LifetimePosition pred_end =
181 LifetimePosition::InstructionFromInstructionIndex(
182 pred->last_instruction_index());
183 return Find(pred_end);
184 }
185
186 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
187 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
188 succ->first_instruction_index());
189 return Find(succ_start);
190 }
191
192 bool FindConnectableSubranges(const InstructionBlock* block,
193 const InstructionBlock* pred,
194 FindResult* result) const {
195 LifetimePosition pred_end =
196 LifetimePosition::InstructionFromInstructionIndex(
197 pred->last_instruction_index());
198 LiveRangeBound* bound = Find(pred_end);
199 result->pred_cover_ = bound->range_;
200 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
201 block->first_instruction_index());
202
203 if (bound->CanCover(cur_start)) {
204 // Both blocks are covered by the same range, so there is nothing to
205 // connect.
206 return false;
207 }
208 bound = Find(cur_start);
209 if (bound->skip_) {
210 return false;
211 }
212 result->cur_cover_ = bound->range_;
213 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
214 return (result->cur_cover_ != result->pred_cover_);
215 }
216
217 private:
218 size_t length_;
219 LiveRangeBound* start_;
220
221 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
222 };
223
224
225 class LiveRangeFinder {
226 public:
227 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
228 : data_(data),
229 bounds_length_(static_cast<int>(data_->live_ranges().size())),
230 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
231 zone_(zone) {
232 for (int i = 0; i < bounds_length_; ++i) {
233 new (&bounds_[i]) LiveRangeBoundArray();
234 }
235 }
236
237 LiveRangeBoundArray* ArrayFor(int operand_index) {
238 DCHECK(operand_index < bounds_length_);
239 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
240 DCHECK(range != nullptr && !range->IsEmpty());
241 LiveRangeBoundArray* array = &bounds_[operand_index];
242 if (array->ShouldInitialize()) {
243 array->Initialize(zone_, range);
244 }
245 return array;
246 }
247
248 private:
249 const RegisterAllocationData* const data_;
250 const int bounds_length_;
251 LiveRangeBoundArray* const bounds_;
252 Zone* const zone_;
253
254 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
255 };
256
257
258 typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
259
260
261 struct DelayedInsertionMapCompare {
262 bool operator()(const DelayedInsertionMapKey& a,
263 const DelayedInsertionMapKey& b) const {
264 if (a.first == b.first) {
265 return a.second.Compare(b.second);
266 }
267 return a.first < b.first;
268 }
269 };
270
271
272 typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
273 DelayedInsertionMapCompare> DelayedInsertionMap;
274
275 116
276 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 117 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
277 void* hint, UsePositionHintType hint_type) 118 void* hint, UsePositionHintType hint_type)
278 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) { 119 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
279 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone); 120 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
280 bool register_beneficial = true; 121 bool register_beneficial = true;
281 UsePositionType type = UsePositionType::kAny; 122 UsePositionType type = UsePositionType::kAny;
282 if (operand_ != nullptr && operand_->IsUnallocated()) { 123 if (operand_ != nullptr && operand_->IsUnallocated()) {
283 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 124 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
284 if (unalloc->HasRegisterPolicy()) { 125 if (unalloc->HasRegisterPolicy()) {
(...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after
886 #endif 727 #endif
887 728
888 729
889 void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index, 730 void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
890 InstructionOperand* operand) { 731 InstructionOperand* operand) {
891 DCHECK(HasNoSpillType()); 732 DCHECK(HasNoSpillType());
892 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList( 733 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
893 gap_index, operand, spill_move_insertion_locations_); 734 gap_index, operand, spill_move_insertion_locations_);
894 } 735 }
895 736
737
738 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock(
739 InstructionSequence* code, const InstructionOperand& spill_operand) {
740 if (!IsSpilledOnlyInDeferredBlocks()) return false;
741
742 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg());
743 // If we have ranges that aren't spilled but require the operand on the stack,
744 // make sure we insert the spill.
745 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
746 if (!child->spilled() &&
747 child->NextSlotPosition(child->Start()) != nullptr) {
748 Instruction* instr =
749 code->InstructionAt(child->Start().ToInstructionIndex());
750 // Insert spill at the end to let live range connections happen at START.
751 ParallelMove* move =
752 instr->GetOrCreateParallelMove(Instruction::END, code->zone());
753 InstructionOperand assigned = child->GetAssignedOperand();
754 if (TopLevel()->has_slot_use()) {
755 bool found = false;
756 for (MoveOperands* move_op : *move) {
757 if (move_op->IsEliminated()) continue;
758 if (move_op->source().Equals(assigned) &&
759 move_op->destination().Equals(spill_operand)) {
760 found = true;
761 break;
762 }
763 }
764 if (found) continue;
765 }
766
767 move->AddMove(assigned, spill_operand);
768 }
769 }
770
771 return true;
772 }
773
774
896 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence, 775 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
897 const InstructionOperand& op, 776 const InstructionOperand& op,
898 bool might_be_duplicated) { 777 bool might_be_duplicated) {
899 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr); 778 DCHECK_IMPLIES(op.IsConstant(), spill_move_insertion_locations() == nullptr);
900 Zone* zone = sequence->zone(); 779 Zone* zone = sequence->zone();
901 780
902 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations(); 781 for (SpillMoveInsertionList* to_spill = spill_move_insertion_locations();
903 to_spill != nullptr; to_spill = to_spill->next) { 782 to_spill != nullptr; to_spill = to_spill->next) {
904 Instruction* instr = sequence->InstructionAt(to_spill->gap_index); 783 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
905 ParallelMove* move = 784 ParallelMove* move =
906 instr->GetOrCreateParallelMove(Instruction::START, zone); 785 instr->GetOrCreateParallelMove(Instruction::START, zone);
907 // Skip insertion if it's possible that the move exists already as a 786 // Skip insertion if it's possible that the move exists already as a
908 // constraint move from a fixed output register to a slot. 787 // constraint move from a fixed output register to a slot.
909 if (might_be_duplicated || has_preassigned_slot()) { 788 if (might_be_duplicated || has_preassigned_slot()) {
910 bool found = false; 789 bool found = false;
911 for (MoveOperands* move_op : *move) { 790 for (MoveOperands* move_op : *move) {
912 if (move_op->IsEliminated()) continue; 791 if (move_op->IsEliminated()) continue;
(...skipping 2166 matching lines...) Expand 10 before | Expand all | Expand 10 after
3079 if (!range->HasSpillRange()) continue; 2958 if (!range->HasSpillRange()) continue;
3080 if (range->IsSpilledOnlyInDeferredBlocks()) { 2959 if (range->IsSpilledOnlyInDeferredBlocks()) {
3081 for (LiveRange* child = range; child != nullptr; child = child->next()) { 2960 for (LiveRange* child = range; child != nullptr; child = child->next()) {
3082 if (child->spilled()) { 2961 if (child->spilled()) {
3083 code->GetInstructionBlock(child->Start().ToInstructionIndex()) 2962 code->GetInstructionBlock(child->Start().ToInstructionIndex())
3084 ->mark_needs_frame(); 2963 ->mark_needs_frame();
3085 } 2964 }
3086 } 2965 }
3087 } else { 2966 } else {
3088 TopLevelLiveRange::SpillMoveInsertionList* spills = 2967 TopLevelLiveRange::SpillMoveInsertionList* spills =
3089 range->GetSpillMoveInsertionLocations(); 2968 range->spill_move_insertion_locations();
3090 DCHECK_NOT_NULL(spills); 2969 DCHECK_NOT_NULL(spills);
3091 for (; spills != nullptr; spills = spills->next) { 2970 for (; spills != nullptr; spills = spills->next) {
3092 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); 2971 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
3093 } 2972 }
3094 } 2973 }
3095 } 2974 }
3096 } 2975 }
3097 2976
3098 2977
3099 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2978 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3146 } 3025 }
3147 3026
3148 if (!spill_operand.IsInvalid()) { 3027 if (!spill_operand.IsInvalid()) {
3149 // If this top level range has a child spilled in a deferred block, we use 3028 // If this top level range has a child spilled in a deferred block, we use
3150 // the range and control flow connection mechanism instead of spilling at 3029 // the range and control flow connection mechanism instead of spilling at
3151 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow 3030 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3152 // phases. Normally, when we spill at definition, we do not insert a 3031 // phases. Normally, when we spill at definition, we do not insert a
3153 // connecting move when a successor child range is spilled - because the 3032 // connecting move when a successor child range is spilled - because the
3154 // spilled range picks up its value from the slot which was assigned at 3033 // spilled range picks up its value from the slot which was assigned at
3155 // definition. For ranges that are determined to spill only in deferred 3034 // definition. For ranges that are determined to spill only in deferred
3156 // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks 3035 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such
3157 // where a spill operand is expected, and then finalize by inserting the 3036 // moves between ranges. Because of how the ranges are split around
3158 // spills in the deferred blocks dominators. 3037 // deferred blocks, this amounts to spilling and filling inside such
3159 if (!top_range->IsSpilledOnlyInDeferredBlocks()) { 3038 // blocks.
3039 if (!top_range->TryCommitSpillInDeferredBlock(data()->code(),
3040 spill_operand)) {
3160 // Spill at definition if the range isn't spilled only in deferred 3041 // Spill at definition if the range isn't spilled only in deferred
3161 // blocks. 3042 // blocks.
3162 top_range->CommitSpillMoves( 3043 top_range->CommitSpillMoves(
3163 data()->code(), spill_operand, 3044 data()->code(), spill_operand,
3164 top_range->has_slot_use() || top_range->spilled()); 3045 top_range->has_slot_use() || top_range->spilled());
3165 } 3046 }
3166 } 3047 }
3167 } 3048 }
3168 } 3049 }
3169 3050
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
3300 DCHECK(!operand.IsStackSlot()); 3181 DCHECK(!operand.IsStackSlot());
3301 DCHECK_EQ(MachineRepresentation::kTagged, 3182 DCHECK_EQ(MachineRepresentation::kTagged,
3302 AllocatedOperand::cast(operand).representation()); 3183 AllocatedOperand::cast(operand).representation());
3303 map->RecordReference(AllocatedOperand::cast(operand)); 3184 map->RecordReference(AllocatedOperand::cast(operand));
3304 } 3185 }
3305 } 3186 }
3306 } 3187 }
3307 } 3188 }
3308 3189
3309 3190
3191 namespace {
3192
3193 class LiveRangeBound {
3194 public:
3195 explicit LiveRangeBound(const LiveRange* range, bool skip)
3196 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
3197 DCHECK(!range->IsEmpty());
3198 }
3199
3200 bool CanCover(LifetimePosition position) {
3201 return start_ <= position && position < end_;
3202 }
3203
3204 const LiveRange* const range_;
3205 const LifetimePosition start_;
3206 const LifetimePosition end_;
3207 const bool skip_;
3208
3209 private:
3210 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
3211 };
3212
3213
3214 struct FindResult {
3215 const LiveRange* cur_cover_;
3216 const LiveRange* pred_cover_;
3217 };
3218
3219
3220 class LiveRangeBoundArray {
3221 public:
3222 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
3223
3224 bool ShouldInitialize() { return start_ == nullptr; }
3225
3226 void Initialize(Zone* zone, const TopLevelLiveRange* const range) {
3227 length_ = range->GetChildCount();
3228
3229 start_ = zone->NewArray<LiveRangeBound>(length_);
3230 LiveRangeBound* curr = start_;
3231 // Normally, spilled ranges do not need connecting moves, because the spill
3232 // location has been assigned at definition. For ranges spilled in deferred
3233 // blocks, that is not the case, so we need to connect the spilled children.
3234 bool spilled_in_blocks = range->IsSpilledOnlyInDeferredBlocks();
3235 for (const LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
3236 new (curr) LiveRangeBound(i, !spilled_in_blocks && i->spilled());
3237 }
3238 }
3239
3240 LiveRangeBound* Find(const LifetimePosition position) const {
3241 size_t left_index = 0;
3242 size_t right_index = length_;
3243 while (true) {
3244 size_t current_index = left_index + (right_index - left_index) / 2;
3245 DCHECK(right_index > current_index);
3246 LiveRangeBound* bound = &start_[current_index];
3247 if (bound->start_ <= position) {
3248 if (position < bound->end_) return bound;
3249 DCHECK(left_index < current_index);
3250 left_index = current_index;
3251 } else {
3252 right_index = current_index;
3253 }
3254 }
3255 }
3256
3257 LiveRangeBound* FindPred(const InstructionBlock* pred) {
3258 LifetimePosition pred_end =
3259 LifetimePosition::InstructionFromInstructionIndex(
3260 pred->last_instruction_index());
3261 return Find(pred_end);
3262 }
3263
3264 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
3265 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
3266 succ->first_instruction_index());
3267 return Find(succ_start);
3268 }
3269
3270 bool FindConnectableSubranges(const InstructionBlock* block,
3271 const InstructionBlock* pred,
3272 FindResult* result) const {
3273 LifetimePosition pred_end =
3274 LifetimePosition::InstructionFromInstructionIndex(
3275 pred->last_instruction_index());
3276 LiveRangeBound* bound = Find(pred_end);
3277 result->pred_cover_ = bound->range_;
3278 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
3279 block->first_instruction_index());
3280
3281 if (bound->CanCover(cur_start)) {
3282 // Both blocks are covered by the same range, so there is nothing to
3283 // connect.
3284 return false;
3285 }
3286 bound = Find(cur_start);
3287 if (bound->skip_) {
3288 return false;
3289 }
3290 result->cur_cover_ = bound->range_;
3291 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
3292 return (result->cur_cover_ != result->pred_cover_);
3293 }
3294
3295 private:
3296 size_t length_;
3297 LiveRangeBound* start_;
3298
3299 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
3300 };
3301
3302
3303 class LiveRangeFinder {
3304 public:
3305 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
3306 : data_(data),
3307 bounds_length_(static_cast<int>(data_->live_ranges().size())),
3308 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
3309 zone_(zone) {
3310 for (int i = 0; i < bounds_length_; ++i) {
3311 new (&bounds_[i]) LiveRangeBoundArray();
3312 }
3313 }
3314
3315 LiveRangeBoundArray* ArrayFor(int operand_index) {
3316 DCHECK(operand_index < bounds_length_);
3317 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
3318 DCHECK(range != nullptr && !range->IsEmpty());
3319 LiveRangeBoundArray* array = &bounds_[operand_index];
3320 if (array->ShouldInitialize()) {
3321 array->Initialize(zone_, range);
3322 }
3323 return array;
3324 }
3325
3326 private:
3327 const RegisterAllocationData* const data_;
3328 const int bounds_length_;
3329 LiveRangeBoundArray* const bounds_;
3330 Zone* const zone_;
3331
3332 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
3333 };
3334
3335
3336 typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
3337
3338
3339 struct DelayedInsertionMapCompare {
3340 bool operator()(const DelayedInsertionMapKey& a,
3341 const DelayedInsertionMapKey& b) const {
3342 if (a.first == b.first) {
3343 return a.second.Compare(b.second);
3344 }
3345 return a.first < b.first;
3346 }
3347 };
3348
3349
3350 typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
3351 DelayedInsertionMapCompare> DelayedInsertionMap;
3352
3353 } // namespace
3354
3355
3310 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data) 3356 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3311 : data_(data) {} 3357 : data_(data) {}
3312 3358
3313 3359
3314 bool LiveRangeConnector::CanEagerlyResolveControlFlow( 3360 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3315 const InstructionBlock* block) const { 3361 const InstructionBlock* block) const {
3316 if (block->PredecessorCount() != 1) return false; 3362 if (block->PredecessorCount() != 1) return false;
3317 return block->predecessors()[0].IsNext(block->rpo_number()); 3363 return block->predecessors()[0].IsNext(block->rpo_number());
3318 } 3364 }
3319 3365
(...skipping 10 matching lines...) Expand all
3330 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current()); 3376 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
3331 for (const RpoNumber& pred : block->predecessors()) { 3377 for (const RpoNumber& pred : block->predecessors()) {
3332 FindResult result; 3378 FindResult result;
3333 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); 3379 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3334 if (!array->FindConnectableSubranges(block, pred_block, &result)) { 3380 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3335 continue; 3381 continue;
3336 } 3382 }
3337 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand(); 3383 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3338 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand(); 3384 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3339 if (pred_op.Equals(cur_op)) continue; 3385 if (pred_op.Equals(cur_op)) continue;
3340 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3341 // We're doing a reload.
3342 const LiveRange* current = result.cur_cover_;
3343
3344 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3345 pred_block->IsDeferred()) {
3346 // The spill location should be defined in pred_block, so add
3347 // pred_block to the list of blocks requiring a spill operand.
3348 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3349 pred_block->rpo_number().ToInt());
3350 }
3351 }
3352 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op); 3386 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3353 USE(move_loc); 3387 USE(move_loc);
3354 DCHECK_IMPLIES( 3388 DCHECK_IMPLIES(
3355 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() && 3389 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3356 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()), 3390 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3357 code()->GetInstructionBlock(move_loc)->IsDeferred()); 3391 code()->GetInstructionBlock(move_loc)->IsDeferred());
3358 } 3392 }
3359 iterator.Advance(); 3393 iterator.Advance();
3360 } 3394 }
3361 } 3395 }
3362
3363 // At this stage, we collected blocks needing a spill operand from
3364 // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3365 // deferred blocks.
3366 for (TopLevelLiveRange* top : data()->live_ranges()) {
3367 if (top == nullptr || top->IsEmpty() ||
3368 !top->IsSpilledOnlyInDeferredBlocks())
3369 continue;
3370 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3371 }
3372 } 3396 }
3373 3397
3374 3398
3375 int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block, 3399 int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3376 const InstructionOperand& cur_op, 3400 const InstructionOperand& cur_op,
3377 const InstructionBlock* pred, 3401 const InstructionBlock* pred,
3378 const InstructionOperand& pred_op) { 3402 const InstructionOperand& pred_op) {
3379 DCHECK(!pred_op.Equals(cur_op)); 3403 DCHECK(!pred_op.Equals(cur_op));
3380 int gap_index; 3404 int gap_index;
3381 Instruction::GapPosition position; 3405 Instruction::GapPosition position;
(...skipping 17 matching lines...) Expand all
3399 DelayedInsertionMap delayed_insertion_map(local_zone); 3423 DelayedInsertionMap delayed_insertion_map(local_zone);
3400 for (TopLevelLiveRange* top_range : data()->live_ranges()) { 3424 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3401 if (top_range == nullptr) continue; 3425 if (top_range == nullptr) continue;
3402 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(); 3426 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3403 LiveRange* first_range = top_range; 3427 LiveRange* first_range = top_range;
3404 for (LiveRange *second_range = first_range->next(); second_range != nullptr; 3428 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3405 first_range = second_range, second_range = second_range->next()) { 3429 first_range = second_range, second_range = second_range->next()) {
3406 LifetimePosition pos = second_range->Start(); 3430 LifetimePosition pos = second_range->Start();
3407 // Add gap move if the two live ranges touch and there is no block 3431 // Add gap move if the two live ranges touch and there is no block
3408 // boundary. 3432 // boundary.
3409 if (second_range->spilled()) continue; 3433 if (!connect_spilled && second_range->spilled()) continue;
3410 if (first_range->End() != pos) continue; 3434 if (first_range->End() != pos) continue;
3411 if (data()->IsBlockBoundary(pos) && 3435 if (data()->IsBlockBoundary(pos) &&
3412 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { 3436 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
3413 continue; 3437 continue;
3414 } 3438 }
3415 InstructionOperand prev_operand = first_range->GetAssignedOperand(); 3439 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3416 InstructionOperand cur_operand = second_range->GetAssignedOperand(); 3440 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3417 if (prev_operand.Equals(cur_operand)) continue; 3441 if (prev_operand.Equals(cur_operand)) continue;
3418 bool delay_insertion = false; 3442 bool delay_insertion = false;
3419 Instruction::GapPosition gap_pos; 3443 Instruction::GapPosition gap_pos;
3420 int gap_index = pos.ToInstructionIndex(); 3444 int gap_index = pos.ToInstructionIndex();
3421 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3422 cur_operand.IsAnyRegister()) {
3423 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3424 DCHECK(block->IsDeferred());
3425 // Performing a reload in this block, meaning the spill operand must
3426 // be defined here.
3427 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3428 block->rpo_number().ToInt());
3429 }
3430
3431 if (pos.IsGapPosition()) { 3445 if (pos.IsGapPosition()) {
3432 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END; 3446 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
3433 } else { 3447 } else {
3434 if (pos.IsStart()) { 3448 if (pos.IsStart()) {
3435 delay_insertion = true; 3449 delay_insertion = true;
3436 } else { 3450 } else {
3437 gap_index++; 3451 gap_index++;
3438 } 3452 }
3439 gap_pos = delay_insertion ? Instruction::END : Instruction::START; 3453 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3440 } 3454 }
3441 // Reloads or spills for spilled in deferred blocks ranges must happen 3455 // Fills or spills for spilled in deferred blocks ranges must happen
3442 // only in deferred blocks. 3456 // only in deferred blocks.
3443 DCHECK_IMPLIES( 3457 DCHECK_IMPLIES(
3444 connect_spilled && 3458 connect_spilled &&
3445 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()), 3459 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3446 code()->GetInstructionBlock(gap_index)->IsDeferred()); 3460 code()->GetInstructionBlock(gap_index)->IsDeferred());
3447 3461
3448 ParallelMove* move = 3462 ParallelMove* move =
3449 code()->InstructionAt(gap_index)->GetOrCreateParallelMove( 3463 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3450 gap_pos, code_zone()); 3464 gap_pos, code_zone());
3451 if (!delay_insertion) { 3465 if (!delay_insertion) {
(...skipping 30 matching lines...) Expand all
3482 // Gather all MoveOperands for a single ParallelMove. 3496 // Gather all MoveOperands for a single ParallelMove.
3483 MoveOperands* move = 3497 MoveOperands* move =
3484 new (code_zone()) MoveOperands(it->first.second, it->second); 3498 new (code_zone()) MoveOperands(it->first.second, it->second);
3485 MoveOperands* eliminate = moves->PrepareInsertAfter(move); 3499 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3486 to_insert.push_back(move); 3500 to_insert.push_back(move);
3487 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3501 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3488 } 3502 }
3489 } 3503 }
3490 3504
3491 3505
3492 void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3493 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3494 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3495 DCHECK(!range->spilled());
3496
3497 InstructionSequence* code = data()->code();
3498 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3499
3500 TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3501 range->vreg());
3502 // If we have ranges that aren't spilled but require the operand on the stack,
3503 // make sure we insert the spill.
3504 for (const LiveRange* child = range; child != nullptr;
3505 child = child->next()) {
3506 for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3507 pos = pos->next()) {
3508 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3509 continue;
3510 range->AddBlockRequiringSpillOperand(
3511 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3512 ->rpo_number());
3513 }
3514 }
3515
3516 ZoneQueue<int> worklist(temp_zone);
3517
3518 for (BitVector::Iterator iterator(
3519 range->GetListOfBlocksRequiringSpillOperands());
3520 !iterator.Done(); iterator.Advance()) {
3521 worklist.push(iterator.Current());
3522 }
3523
3524 // Seek the deferred blocks that dominate locations requiring spill operands,
3525 // and spill there. We only need to spill at the start of such blocks.
3526 BitVector done_blocks(
3527 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3528 while (!worklist.empty()) {
3529 int block_id = worklist.front();
3530 worklist.pop();
3531 if (done_blocks.Contains(block_id)) continue;
3532 done_blocks.Add(block_id);
3533 const InstructionBlock* spill_block =
3534 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3535
3536 for (const RpoNumber& pred : spill_block->predecessors()) {
3537 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3538
3539 if (pred_block->IsDeferred()) {
3540 worklist.push(pred_block->rpo_number().ToInt());
3541 } else {
3542 LifetimePosition pred_end =
3543 LifetimePosition::InstructionFromInstructionIndex(
3544 pred_block->last_instruction_index());
3545
3546 LiveRangeBound* bound = array->Find(pred_end);
3547
3548 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3549
3550 data()->AddGapMove(spill_block->first_instruction_index(),
3551 Instruction::GapPosition::START, pred_op,
3552 spill_operand);
3553 }
3554 }
3555 }
3556 }
3557
3558
3559 } // namespace compiler 3506 } // namespace compiler
3560 } // namespace internal 3507 } // namespace internal
3561 } // namespace v8 3508 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698