Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1612923003: Revert "Revert of [turbofan] optimize spills in defered blocks (patchset #3 id:240001 of https://co… (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 return 8; 106 return 8;
107 case MachineRepresentation::kNone: 107 case MachineRepresentation::kNone:
108 break; 108 break;
109 } 109 }
110 UNREACHABLE(); 110 UNREACHABLE();
111 return 0; 111 return 0;
112 } 112 }
113 113
114 } // namespace 114 } // namespace
115 115
116 class LiveRangeBound {
117 public:
118 explicit LiveRangeBound(LiveRange* range, bool skip)
119 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
120 DCHECK(!range->IsEmpty());
121 }
122
123 bool CanCover(LifetimePosition position) {
124 return start_ <= position && position < end_;
125 }
126
127 LiveRange* const range_;
128 const LifetimePosition start_;
129 const LifetimePosition end_;
130 const bool skip_;
131
132 private:
133 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
134 };
135
136
137 struct FindResult {
138 LiveRange* cur_cover_;
139 LiveRange* pred_cover_;
140 };
141
142
143 class LiveRangeBoundArray {
144 public:
145 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
146
147 bool ShouldInitialize() { return start_ == nullptr; }
148
149 void Initialize(Zone* zone, TopLevelLiveRange* range) {
150 length_ = range->GetChildCount();
151
152 start_ = zone->NewArray<LiveRangeBound>(length_);
153 LiveRangeBound* curr = start_;
154 // Normally, spilled ranges do not need connecting moves, because the spill
155 // location has been assigned at definition. For ranges spilled in deferred
156 // blocks, that is not the case, so we need to connect the spilled children.
157 for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
158 new (curr) LiveRangeBound(i, i->spilled());
159 }
160 }
161
162 LiveRangeBound* Find(const LifetimePosition position) const {
163 size_t left_index = 0;
164 size_t right_index = length_;
165 while (true) {
166 size_t current_index = left_index + (right_index - left_index) / 2;
167 DCHECK(right_index > current_index);
168 LiveRangeBound* bound = &start_[current_index];
169 if (bound->start_ <= position) {
170 if (position < bound->end_) return bound;
171 DCHECK(left_index < current_index);
172 left_index = current_index;
173 } else {
174 right_index = current_index;
175 }
176 }
177 }
178
179 LiveRangeBound* FindPred(const InstructionBlock* pred) {
180 LifetimePosition pred_end =
181 LifetimePosition::InstructionFromInstructionIndex(
182 pred->last_instruction_index());
183 return Find(pred_end);
184 }
185
186 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
187 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
188 succ->first_instruction_index());
189 return Find(succ_start);
190 }
191
192 bool FindConnectableSubranges(const InstructionBlock* block,
193 const InstructionBlock* pred,
194 FindResult* result) const {
195 LifetimePosition pred_end =
196 LifetimePosition::InstructionFromInstructionIndex(
197 pred->last_instruction_index());
198 LiveRangeBound* bound = Find(pred_end);
199 result->pred_cover_ = bound->range_;
200 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
201 block->first_instruction_index());
202
203 if (bound->CanCover(cur_start)) {
204 // Both blocks are covered by the same range, so there is nothing to
205 // connect.
206 return false;
207 }
208 bound = Find(cur_start);
209 if (bound->skip_) {
210 return false;
211 }
212 result->cur_cover_ = bound->range_;
213 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
214 return (result->cur_cover_ != result->pred_cover_);
215 }
216
217 private:
218 size_t length_;
219 LiveRangeBound* start_;
220
221 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
222 };
223
224
225 class LiveRangeFinder {
226 public:
227 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
228 : data_(data),
229 bounds_length_(static_cast<int>(data_->live_ranges().size())),
230 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
231 zone_(zone) {
232 for (int i = 0; i < bounds_length_; ++i) {
233 new (&bounds_[i]) LiveRangeBoundArray();
234 }
235 }
236
237 LiveRangeBoundArray* ArrayFor(int operand_index) {
238 DCHECK(operand_index < bounds_length_);
239 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
240 DCHECK(range != nullptr && !range->IsEmpty());
241 LiveRangeBoundArray* array = &bounds_[operand_index];
242 if (array->ShouldInitialize()) {
243 array->Initialize(zone_, range);
244 }
245 return array;
246 }
247
248 private:
249 const RegisterAllocationData* const data_;
250 const int bounds_length_;
251 LiveRangeBoundArray* const bounds_;
252 Zone* const zone_;
253
254 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
255 };
256
257
258 typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
259
260
261 struct DelayedInsertionMapCompare {
262 bool operator()(const DelayedInsertionMapKey& a,
263 const DelayedInsertionMapKey& b) const {
264 if (a.first == b.first) {
265 return a.second.Compare(b.second);
266 }
267 return a.first < b.first;
268 }
269 };
270
271
272 typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
273 DelayedInsertionMapCompare> DelayedInsertionMap;
274
116 275
117 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, 276 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
118 void* hint, UsePositionHintType hint_type) 277 void* hint, UsePositionHintType hint_type)
119 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) { 278 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
120 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone); 279 DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
121 bool register_beneficial = true; 280 bool register_beneficial = true;
122 UsePositionType type = UsePositionType::kAny; 281 UsePositionType type = UsePositionType::kAny;
123 if (operand_ != nullptr && operand_->IsUnallocated()) { 282 if (operand_ != nullptr && operand_->IsUnallocated()) {
124 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); 283 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
125 if (unalloc->HasRegisterPolicy()) { 284 if (unalloc->HasRegisterPolicy()) {
(...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after
727 #endif 886 #endif
728 887
729 888
730 void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index, 889 void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
731 InstructionOperand* operand) { 890 InstructionOperand* operand) {
732 DCHECK(HasNoSpillType()); 891 DCHECK(HasNoSpillType());
733 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList( 892 spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
734 gap_index, operand, spill_move_insertion_locations_); 893 gap_index, operand, spill_move_insertion_locations_);
735 } 894 }
736 895
737
738 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock(
739 InstructionSequence* code, const InstructionOperand& spill_operand) {
740 if (!IsSpilledOnlyInDeferredBlocks()) return false;
741
742 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg());
743 // If we have ranges that aren't spilled but require the operand on the stack,
744 // make sure we insert the spill.
745 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
746 if (!child->spilled() &&
747 child->NextSlotPosition(child->Start()) != nullptr) {
748 Instruction* instr =
749 code->InstructionAt(child->Start().ToInstructionIndex());
750 // Insert spill at the end to let live range connections happen at START.
751 ParallelMove* move =
752 instr->GetOrCreateParallelMove(Instruction::END, code->zone());
753 InstructionOperand assigned = child->GetAssignedOperand();
754 if (TopLevel()->has_slot_use()) {
755 bool found = false;
756 for (MoveOperands* move_op : *move) {
757 if (move_op->IsEliminated()) continue;
758 if (move_op->source().Equals(assigned) &&
759 move_op->destination().Equals(spill_operand)) {
760 found = true;
761 break;
762 }
763 }
764 if (found) continue;
765 }
766
767 move->AddMove(assigned, spill_operand);
768 }
769 }
770
771 return true;
772 }
773
774
775 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence, 896 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
776 const InstructionOperand& op, 897 const InstructionOperand& op,
777 bool might_be_duplicated) { 898 bool might_be_duplicated) {
778 DCHECK_IMPLIES(op.IsConstant(), spill_move_insertion_locations() == nullptr); 899 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
779 Zone* zone = sequence->zone(); 900 Zone* zone = sequence->zone();
780 901
781 for (SpillMoveInsertionList* to_spill = spill_move_insertion_locations(); 902 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
782 to_spill != nullptr; to_spill = to_spill->next) { 903 to_spill != nullptr; to_spill = to_spill->next) {
783 Instruction* instr = sequence->InstructionAt(to_spill->gap_index); 904 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
784 ParallelMove* move = 905 ParallelMove* move =
785 instr->GetOrCreateParallelMove(Instruction::START, zone); 906 instr->GetOrCreateParallelMove(Instruction::START, zone);
786 // Skip insertion if it's possible that the move exists already as a 907 // Skip insertion if it's possible that the move exists already as a
787 // constraint move from a fixed output register to a slot. 908 // constraint move from a fixed output register to a slot.
788 if (might_be_duplicated || has_preassigned_slot()) { 909 if (might_be_duplicated || has_preassigned_slot()) {
789 bool found = false; 910 bool found = false;
790 for (MoveOperands* move_op : *move) { 911 for (MoveOperands* move_op : *move) {
791 if (move_op->IsEliminated()) continue; 912 if (move_op->IsEliminated()) continue;
(...skipping 2166 matching lines...) Expand 10 before | Expand all | Expand 10 after
2958 if (!range->HasSpillRange()) continue; 3079 if (!range->HasSpillRange()) continue;
2959 if (range->IsSpilledOnlyInDeferredBlocks()) { 3080 if (range->IsSpilledOnlyInDeferredBlocks()) {
2960 for (LiveRange* child = range; child != nullptr; child = child->next()) { 3081 for (LiveRange* child = range; child != nullptr; child = child->next()) {
2961 if (child->spilled()) { 3082 if (child->spilled()) {
2962 code->GetInstructionBlock(child->Start().ToInstructionIndex()) 3083 code->GetInstructionBlock(child->Start().ToInstructionIndex())
2963 ->mark_needs_frame(); 3084 ->mark_needs_frame();
2964 } 3085 }
2965 } 3086 }
2966 } else { 3087 } else {
2967 TopLevelLiveRange::SpillMoveInsertionList* spills = 3088 TopLevelLiveRange::SpillMoveInsertionList* spills =
2968 range->spill_move_insertion_locations(); 3089 range->GetSpillMoveInsertionLocations();
2969 DCHECK_NOT_NULL(spills); 3090 DCHECK_NOT_NULL(spills);
2970 for (; spills != nullptr; spills = spills->next) { 3091 for (; spills != nullptr; spills = spills->next) {
2971 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); 3092 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
2972 } 3093 }
2973 } 3094 }
2974 } 3095 }
2975 } 3096 }
2976 3097
2977 3098
2978 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 3099 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3025 } 3146 }
3026 3147
3027 if (!spill_operand.IsInvalid()) { 3148 if (!spill_operand.IsInvalid()) {
3028 // If this top level range has a child spilled in a deferred block, we use 3149 // If this top level range has a child spilled in a deferred block, we use
3029 // the range and control flow connection mechanism instead of spilling at 3150 // the range and control flow connection mechanism instead of spilling at
3030 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow 3151 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3031 // phases. Normally, when we spill at definition, we do not insert a 3152 // phases. Normally, when we spill at definition, we do not insert a
3032 // connecting move when a successor child range is spilled - because the 3153 // connecting move when a successor child range is spilled - because the
3033 // spilled range picks up its value from the slot which was assigned at 3154 // spilled range picks up its value from the slot which was assigned at
3034 // definition. For ranges that are determined to spill only in deferred 3155 // definition. For ranges that are determined to spill only in deferred
3035 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such 3156 // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3036 // moves between ranges. Because of how the ranges are split around 3157 // where a spill operand is expected, and then finalize by inserting the
3037 // deferred blocks, this amounts to spilling and filling inside such 3158 // spills in the deferred blocks dominators.
3038 // blocks. 3159 if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
3039 if (!top_range->TryCommitSpillInDeferredBlock(data()->code(),
3040 spill_operand)) {
3041 // Spill at definition if the range isn't spilled only in deferred 3160 // Spill at definition if the range isn't spilled only in deferred
3042 // blocks. 3161 // blocks.
3043 top_range->CommitSpillMoves( 3162 top_range->CommitSpillMoves(
3044 data()->code(), spill_operand, 3163 data()->code(), spill_operand,
3045 top_range->has_slot_use() || top_range->spilled()); 3164 top_range->has_slot_use() || top_range->spilled());
3046 } 3165 }
3047 } 3166 }
3048 } 3167 }
3049 } 3168 }
3050 3169
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
3181 DCHECK(!operand.IsStackSlot()); 3300 DCHECK(!operand.IsStackSlot());
3182 DCHECK_EQ(MachineRepresentation::kTagged, 3301 DCHECK_EQ(MachineRepresentation::kTagged,
3183 AllocatedOperand::cast(operand).representation()); 3302 AllocatedOperand::cast(operand).representation());
3184 map->RecordReference(AllocatedOperand::cast(operand)); 3303 map->RecordReference(AllocatedOperand::cast(operand));
3185 } 3304 }
3186 } 3305 }
3187 } 3306 }
3188 } 3307 }
3189 3308
3190 3309
3191 namespace {
3192
3193 class LiveRangeBound {
3194 public:
3195 explicit LiveRangeBound(const LiveRange* range, bool skip)
3196 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
3197 DCHECK(!range->IsEmpty());
3198 }
3199
3200 bool CanCover(LifetimePosition position) {
3201 return start_ <= position && position < end_;
3202 }
3203
3204 const LiveRange* const range_;
3205 const LifetimePosition start_;
3206 const LifetimePosition end_;
3207 const bool skip_;
3208
3209 private:
3210 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
3211 };
3212
3213
3214 struct FindResult {
3215 const LiveRange* cur_cover_;
3216 const LiveRange* pred_cover_;
3217 };
3218
3219
3220 class LiveRangeBoundArray {
3221 public:
3222 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
3223
3224 bool ShouldInitialize() { return start_ == nullptr; }
3225
3226 void Initialize(Zone* zone, const TopLevelLiveRange* const range) {
3227 length_ = range->GetChildCount();
3228
3229 start_ = zone->NewArray<LiveRangeBound>(length_);
3230 LiveRangeBound* curr = start_;
3231 // Normally, spilled ranges do not need connecting moves, because the spill
3232 // location has been assigned at definition. For ranges spilled in deferred
3233 // blocks, that is not the case, so we need to connect the spilled children.
3234 bool spilled_in_blocks = range->IsSpilledOnlyInDeferredBlocks();
3235 for (const LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
3236 new (curr) LiveRangeBound(i, !spilled_in_blocks && i->spilled());
3237 }
3238 }
3239
3240 LiveRangeBound* Find(const LifetimePosition position) const {
3241 size_t left_index = 0;
3242 size_t right_index = length_;
3243 while (true) {
3244 size_t current_index = left_index + (right_index - left_index) / 2;
3245 DCHECK(right_index > current_index);
3246 LiveRangeBound* bound = &start_[current_index];
3247 if (bound->start_ <= position) {
3248 if (position < bound->end_) return bound;
3249 DCHECK(left_index < current_index);
3250 left_index = current_index;
3251 } else {
3252 right_index = current_index;
3253 }
3254 }
3255 }
3256
3257 LiveRangeBound* FindPred(const InstructionBlock* pred) {
3258 LifetimePosition pred_end =
3259 LifetimePosition::InstructionFromInstructionIndex(
3260 pred->last_instruction_index());
3261 return Find(pred_end);
3262 }
3263
3264 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
3265 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
3266 succ->first_instruction_index());
3267 return Find(succ_start);
3268 }
3269
3270 bool FindConnectableSubranges(const InstructionBlock* block,
3271 const InstructionBlock* pred,
3272 FindResult* result) const {
3273 LifetimePosition pred_end =
3274 LifetimePosition::InstructionFromInstructionIndex(
3275 pred->last_instruction_index());
3276 LiveRangeBound* bound = Find(pred_end);
3277 result->pred_cover_ = bound->range_;
3278 LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
3279 block->first_instruction_index());
3280
3281 if (bound->CanCover(cur_start)) {
3282 // Both blocks are covered by the same range, so there is nothing to
3283 // connect.
3284 return false;
3285 }
3286 bound = Find(cur_start);
3287 if (bound->skip_) {
3288 return false;
3289 }
3290 result->cur_cover_ = bound->range_;
3291 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
3292 return (result->cur_cover_ != result->pred_cover_);
3293 }
3294
3295 private:
3296 size_t length_;
3297 LiveRangeBound* start_;
3298
3299 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
3300 };
3301
3302
3303 class LiveRangeFinder {
3304 public:
3305 explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
3306 : data_(data),
3307 bounds_length_(static_cast<int>(data_->live_ranges().size())),
3308 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
3309 zone_(zone) {
3310 for (int i = 0; i < bounds_length_; ++i) {
3311 new (&bounds_[i]) LiveRangeBoundArray();
3312 }
3313 }
3314
3315 LiveRangeBoundArray* ArrayFor(int operand_index) {
3316 DCHECK(operand_index < bounds_length_);
3317 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
3318 DCHECK(range != nullptr && !range->IsEmpty());
3319 LiveRangeBoundArray* array = &bounds_[operand_index];
3320 if (array->ShouldInitialize()) {
3321 array->Initialize(zone_, range);
3322 }
3323 return array;
3324 }
3325
3326 private:
3327 const RegisterAllocationData* const data_;
3328 const int bounds_length_;
3329 LiveRangeBoundArray* const bounds_;
3330 Zone* const zone_;
3331
3332 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
3333 };
3334
3335
3336 typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
3337
3338
3339 struct DelayedInsertionMapCompare {
3340 bool operator()(const DelayedInsertionMapKey& a,
3341 const DelayedInsertionMapKey& b) const {
3342 if (a.first == b.first) {
3343 return a.second.Compare(b.second);
3344 }
3345 return a.first < b.first;
3346 }
3347 };
3348
3349
3350 typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
3351 DelayedInsertionMapCompare> DelayedInsertionMap;
3352
3353 } // namespace
3354
3355
3356 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data) 3310 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3357 : data_(data) {} 3311 : data_(data) {}
3358 3312
3359 3313
3360 bool LiveRangeConnector::CanEagerlyResolveControlFlow( 3314 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3361 const InstructionBlock* block) const { 3315 const InstructionBlock* block) const {
3362 if (block->PredecessorCount() != 1) return false; 3316 if (block->PredecessorCount() != 1) return false;
3363 return block->predecessors()[0].IsNext(block->rpo_number()); 3317 return block->predecessors()[0].IsNext(block->rpo_number());
3364 } 3318 }
3365 3319
(...skipping 10 matching lines...) Expand all
3376 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current()); 3330 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
3377 for (const RpoNumber& pred : block->predecessors()) { 3331 for (const RpoNumber& pred : block->predecessors()) {
3378 FindResult result; 3332 FindResult result;
3379 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); 3333 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3380 if (!array->FindConnectableSubranges(block, pred_block, &result)) { 3334 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3381 continue; 3335 continue;
3382 } 3336 }
3383 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand(); 3337 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3384 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand(); 3338 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3385 if (pred_op.Equals(cur_op)) continue; 3339 if (pred_op.Equals(cur_op)) continue;
3340 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3341 // We're doing a reload.
3342 const LiveRange* current = result.cur_cover_;
3343
3344 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3345 pred_block->IsDeferred()) {
3346 // The spill location should be defined in pred_block, so add
3347 // pred_block to the list of blocks requiring a spill operand.
3348 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3349 pred_block->rpo_number().ToInt());
3350 }
3351 }
3386 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op); 3352 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3387 USE(move_loc); 3353 USE(move_loc);
3388 DCHECK_IMPLIES( 3354 DCHECK_IMPLIES(
3389 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() && 3355 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3390 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()), 3356 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3391 code()->GetInstructionBlock(move_loc)->IsDeferred()); 3357 code()->GetInstructionBlock(move_loc)->IsDeferred());
3392 } 3358 }
3393 iterator.Advance(); 3359 iterator.Advance();
3394 } 3360 }
3395 } 3361 }
3362
3363 // At this stage, we collected blocks needing a spill operand from
3364 // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3365 // deferred blocks.
3366 for (TopLevelLiveRange* top : data()->live_ranges()) {
3367 if (top == nullptr || top->IsEmpty() ||
3368 !top->IsSpilledOnlyInDeferredBlocks())
3369 continue;
3370 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3371 }
3396 } 3372 }
3397 3373
3398 3374
3399 int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block, 3375 int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3400 const InstructionOperand& cur_op, 3376 const InstructionOperand& cur_op,
3401 const InstructionBlock* pred, 3377 const InstructionBlock* pred,
3402 const InstructionOperand& pred_op) { 3378 const InstructionOperand& pred_op) {
3403 DCHECK(!pred_op.Equals(cur_op)); 3379 DCHECK(!pred_op.Equals(cur_op));
3404 int gap_index; 3380 int gap_index;
3405 Instruction::GapPosition position; 3381 Instruction::GapPosition position;
(...skipping 17 matching lines...) Expand all
3423 DelayedInsertionMap delayed_insertion_map(local_zone); 3399 DelayedInsertionMap delayed_insertion_map(local_zone);
3424 for (TopLevelLiveRange* top_range : data()->live_ranges()) { 3400 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3425 if (top_range == nullptr) continue; 3401 if (top_range == nullptr) continue;
3426 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(); 3402 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3427 LiveRange* first_range = top_range; 3403 LiveRange* first_range = top_range;
3428 for (LiveRange *second_range = first_range->next(); second_range != nullptr; 3404 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3429 first_range = second_range, second_range = second_range->next()) { 3405 first_range = second_range, second_range = second_range->next()) {
3430 LifetimePosition pos = second_range->Start(); 3406 LifetimePosition pos = second_range->Start();
3431 // Add gap move if the two live ranges touch and there is no block 3407 // Add gap move if the two live ranges touch and there is no block
3432 // boundary. 3408 // boundary.
3433 if (!connect_spilled && second_range->spilled()) continue; 3409 if (second_range->spilled()) continue;
3434 if (first_range->End() != pos) continue; 3410 if (first_range->End() != pos) continue;
3435 if (data()->IsBlockBoundary(pos) && 3411 if (data()->IsBlockBoundary(pos) &&
3436 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { 3412 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
3437 continue; 3413 continue;
3438 } 3414 }
3439 InstructionOperand prev_operand = first_range->GetAssignedOperand(); 3415 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3440 InstructionOperand cur_operand = second_range->GetAssignedOperand(); 3416 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3441 if (prev_operand.Equals(cur_operand)) continue; 3417 if (prev_operand.Equals(cur_operand)) continue;
3442 bool delay_insertion = false; 3418 bool delay_insertion = false;
3443 Instruction::GapPosition gap_pos; 3419 Instruction::GapPosition gap_pos;
3444 int gap_index = pos.ToInstructionIndex(); 3420 int gap_index = pos.ToInstructionIndex();
3421 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3422 cur_operand.IsAnyRegister()) {
3423 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3424 DCHECK(block->IsDeferred());
3425 // Performing a reload in this block, meaning the spill operand must
3426 // be defined here.
3427 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3428 block->rpo_number().ToInt());
3429 }
3430
3445 if (pos.IsGapPosition()) { 3431 if (pos.IsGapPosition()) {
3446 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END; 3432 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
3447 } else { 3433 } else {
3448 if (pos.IsStart()) { 3434 if (pos.IsStart()) {
3449 delay_insertion = true; 3435 delay_insertion = true;
3450 } else { 3436 } else {
3451 gap_index++; 3437 gap_index++;
3452 } 3438 }
3453 gap_pos = delay_insertion ? Instruction::END : Instruction::START; 3439 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3454 } 3440 }
3455 // Fills or spills for spilled in deferred blocks ranges must happen 3441 // Reloads or spills for spilled in deferred blocks ranges must happen
3456 // only in deferred blocks. 3442 // only in deferred blocks.
3457 DCHECK_IMPLIES( 3443 DCHECK_IMPLIES(
3458 connect_spilled && 3444 connect_spilled &&
3459 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()), 3445 !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3460 code()->GetInstructionBlock(gap_index)->IsDeferred()); 3446 code()->GetInstructionBlock(gap_index)->IsDeferred());
3461 3447
3462 ParallelMove* move = 3448 ParallelMove* move =
3463 code()->InstructionAt(gap_index)->GetOrCreateParallelMove( 3449 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3464 gap_pos, code_zone()); 3450 gap_pos, code_zone());
3465 if (!delay_insertion) { 3451 if (!delay_insertion) {
(...skipping 30 matching lines...) Expand all
3496 // Gather all MoveOperands for a single ParallelMove. 3482 // Gather all MoveOperands for a single ParallelMove.
3497 MoveOperands* move = 3483 MoveOperands* move =
3498 new (code_zone()) MoveOperands(it->first.second, it->second); 3484 new (code_zone()) MoveOperands(it->first.second, it->second);
3499 MoveOperands* eliminate = moves->PrepareInsertAfter(move); 3485 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3500 to_insert.push_back(move); 3486 to_insert.push_back(move);
3501 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3487 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3502 } 3488 }
3503 } 3489 }
3504 3490
3505 3491
3492 void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3493 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3494 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3495 DCHECK(!range->spilled());
3496
3497 InstructionSequence* code = data()->code();
3498 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3499
3500 TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3501 range->vreg());
3502 // If we have ranges that aren't spilled but require the operand on the stack,
3503 // make sure we insert the spill.
3504 for (const LiveRange* child = range; child != nullptr;
3505 child = child->next()) {
3506 for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3507 pos = pos->next()) {
3508 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3509 continue;
3510 range->AddBlockRequiringSpillOperand(
3511 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3512 ->rpo_number());
3513 }
3514 }
3515
3516 ZoneQueue<int> worklist(temp_zone);
3517
3518 for (BitVector::Iterator iterator(
3519 range->GetListOfBlocksRequiringSpillOperands());
3520 !iterator.Done(); iterator.Advance()) {
3521 worklist.push(iterator.Current());
3522 }
3523
3524 // Seek the deferred blocks that dominate locations requiring spill operands,
3525 // and spill there. We only need to spill at the start of such blocks.
3526 BitVector done_blocks(
3527 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3528 while (!worklist.empty()) {
3529 int block_id = worklist.front();
3530 worklist.pop();
3531 if (done_blocks.Contains(block_id)) continue;
3532 done_blocks.Add(block_id);
3533 const InstructionBlock* spill_block =
3534 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3535
3536 for (const RpoNumber& pred : spill_block->predecessors()) {
3537 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3538
3539 if (pred_block->IsDeferred()) {
3540 worklist.push(pred_block->rpo_number().ToInt());
3541 } else {
3542 LifetimePosition pred_end =
3543 LifetimePosition::InstructionFromInstructionIndex(
3544 pred_block->last_instruction_index());
3545
3546 LiveRangeBound* bound = array->Find(pred_end);
3547
3548 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3549
3550 data()->AddGapMove(spill_block->first_instruction_index(),
3551 Instruction::GapPosition::START, pred_op,
3552 spill_operand);
3553 }
3554 }
3555 }
3556 }
3557
3558
3506 } // namespace compiler 3559 } // namespace compiler
3507 } // namespace internal 3560 } // namespace internal
3508 } // namespace v8 3561 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698