| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/linkage.h" | 5 #include "src/compiler/linkage.h" |
| 6 #include "src/compiler/register-allocator.h" | 6 #include "src/compiler/register-allocator.h" |
| 7 #include "src/string-stream.h" | 7 #include "src/string-stream.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 23 static void TraceAlloc(const char* msg, ...) { | 23 static void TraceAlloc(const char* msg, ...) { |
| 24 if (FLAG_trace_alloc) { | 24 if (FLAG_trace_alloc) { |
| 25 va_list arguments; | 25 va_list arguments; |
| 26 va_start(arguments, msg); | 26 va_start(arguments, msg); |
| 27 base::OS::VPrint(msg, arguments); | 27 base::OS::VPrint(msg, arguments); |
| 28 va_end(arguments); | 28 va_end(arguments); |
| 29 } | 29 } |
| 30 } | 30 } |
| 31 | 31 |
| 32 | 32 |
| 33 static void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) { |
| 34 auto it = std::find(v->begin(), v->end(), range); |
| 35 DCHECK(it != v->end()); |
| 36 v->erase(it); |
| 37 } |
| 38 |
| 39 |
| 33 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, | 40 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, |
| 34 InstructionOperand* hint) | 41 InstructionOperand* hint) |
| 35 : operand_(operand), | 42 : operand_(operand), |
| 36 hint_(hint), | 43 hint_(hint), |
| 37 pos_(pos), | 44 pos_(pos), |
| 38 next_(NULL), | 45 next_(nullptr), |
| 39 requires_reg_(false), | 46 requires_reg_(false), |
| 40 register_beneficial_(true) { | 47 register_beneficial_(true) { |
| 41 if (operand_ != NULL && operand_->IsUnallocated()) { | 48 if (operand_ != nullptr && operand_->IsUnallocated()) { |
| 42 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); | 49 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); |
| 43 requires_reg_ = unalloc->HasRegisterPolicy(); | 50 requires_reg_ = unalloc->HasRegisterPolicy(); |
| 44 register_beneficial_ = !unalloc->HasAnyPolicy(); | 51 register_beneficial_ = !unalloc->HasAnyPolicy(); |
| 45 } | 52 } |
| 46 DCHECK(pos_.IsValid()); | 53 DCHECK(pos_.IsValid()); |
| 47 } | 54 } |
| 48 | 55 |
| 49 | 56 |
| 50 bool UsePosition::HasHint() const { | 57 bool UsePosition::HasHint() const { |
| 51 return hint_ != NULL && !hint_->IsUnallocated(); | 58 return hint_ != nullptr && !hint_->IsUnallocated(); |
| 52 } | 59 } |
| 53 | 60 |
| 54 | 61 |
| 55 bool UsePosition::RequiresRegister() const { return requires_reg_; } | 62 bool UsePosition::RequiresRegister() const { return requires_reg_; } |
| 56 | 63 |
| 57 | 64 |
| 58 bool UsePosition::RegisterIsBeneficial() const { return register_beneficial_; } | 65 bool UsePosition::RegisterIsBeneficial() const { return register_beneficial_; } |
| 59 | 66 |
| 60 | 67 |
| 61 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { | 68 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { |
| 62 DCHECK(Contains(pos) && pos.Value() != start().Value()); | 69 DCHECK(Contains(pos) && pos.Value() != start().Value()); |
| 63 UseInterval* after = new (zone) UseInterval(pos, end_); | 70 auto after = new (zone) UseInterval(pos, end_); |
| 64 after->next_ = next_; | 71 after->next_ = next_; |
| 65 next_ = after; | 72 next_ = after; |
| 66 end_ = pos; | 73 end_ = pos; |
| 67 } | 74 } |
| 68 | 75 |
| 69 | 76 |
| 70 struct LiveRange::SpillAtDefinitionList : ZoneObject { | 77 struct LiveRange::SpillAtDefinitionList : ZoneObject { |
| 71 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, | 78 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, |
| 72 SpillAtDefinitionList* next) | 79 SpillAtDefinitionList* next) |
| 73 : gap_index(gap_index), operand(operand), next(next) {} | 80 : gap_index(gap_index), operand(operand), next(next) {} |
| 74 const int gap_index; | 81 const int gap_index; |
| 75 InstructionOperand* const operand; | 82 InstructionOperand* const operand; |
| 76 SpillAtDefinitionList* const next; | 83 SpillAtDefinitionList* const next; |
| 77 }; | 84 }; |
| 78 | 85 |
| 79 | 86 |
| 80 #ifdef DEBUG | 87 #ifdef DEBUG |
| 81 | 88 |
| 82 | 89 |
| 83 void LiveRange::Verify() const { | 90 void LiveRange::Verify() const { |
| 84 UsePosition* cur = first_pos_; | 91 UsePosition* cur = first_pos_; |
| 85 while (cur != NULL) { | 92 while (cur != nullptr) { |
| 86 DCHECK(Start().Value() <= cur->pos().Value() && | 93 DCHECK(Start().Value() <= cur->pos().Value() && |
| 87 cur->pos().Value() <= End().Value()); | 94 cur->pos().Value() <= End().Value()); |
| 88 cur = cur->next(); | 95 cur = cur->next(); |
| 89 } | 96 } |
| 90 } | 97 } |
| 91 | 98 |
| 92 | 99 |
| 93 bool LiveRange::HasOverlap(UseInterval* target) const { | 100 bool LiveRange::HasOverlap(UseInterval* target) const { |
| 94 UseInterval* current_interval = first_interval_; | 101 UseInterval* current_interval = first_interval_; |
| 95 while (current_interval != NULL) { | 102 while (current_interval != nullptr) { |
| 96 // Intervals overlap if the start of one is contained in the other. | 103 // Intervals overlap if the start of one is contained in the other. |
| 97 if (current_interval->Contains(target->start()) || | 104 if (current_interval->Contains(target->start()) || |
| 98 target->Contains(current_interval->start())) { | 105 target->Contains(current_interval->start())) { |
| 99 return true; | 106 return true; |
| 100 } | 107 } |
| 101 current_interval = current_interval->next(); | 108 current_interval = current_interval->next(); |
| 102 } | 109 } |
| 103 return false; | 110 return false; |
| 104 } | 111 } |
| 105 | 112 |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 147 DCHECK(HasNoSpillType()); | 154 DCHECK(HasNoSpillType()); |
| 148 spills_at_definition_ = new (zone) | 155 spills_at_definition_ = new (zone) |
| 149 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); | 156 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); |
| 150 } | 157 } |
| 151 | 158 |
| 152 | 159 |
| 153 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | 160 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 154 InstructionOperand* op) { | 161 InstructionOperand* op) { |
| 155 auto to_spill = TopLevel()->spills_at_definition_; | 162 auto to_spill = TopLevel()->spills_at_definition_; |
| 156 if (to_spill == nullptr) return; | 163 if (to_spill == nullptr) return; |
| 157 Zone* zone = sequence->zone(); | 164 auto zone = sequence->zone(); |
| 158 for (; to_spill != nullptr; to_spill = to_spill->next) { | 165 for (; to_spill != nullptr; to_spill = to_spill->next) { |
| 159 auto gap = sequence->GapAt(to_spill->gap_index); | 166 auto gap = sequence->GapAt(to_spill->gap_index); |
| 160 auto move = gap->GetOrCreateParallelMove(GapInstruction::START, zone); | 167 auto move = gap->GetOrCreateParallelMove(GapInstruction::START, zone); |
| 161 move->AddMove(to_spill->operand, op, zone); | 168 move->AddMove(to_spill->operand, op, zone); |
| 162 } | 169 } |
| 163 TopLevel()->spills_at_definition_ = nullptr; | 170 TopLevel()->spills_at_definition_ = nullptr; |
| 164 } | 171 } |
| 165 | 172 |
| 166 | 173 |
| 167 void LiveRange::SetSpillOperand(InstructionOperand* operand) { | 174 void LiveRange::SetSpillOperand(InstructionOperand* operand) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 184 DCHECK(HasSpillRange()); | 191 DCHECK(HasSpillRange()); |
| 185 DCHECK(!operand->IsUnallocated()); | 192 DCHECK(!operand->IsUnallocated()); |
| 186 DCHECK(!IsChild()); | 193 DCHECK(!IsChild()); |
| 187 spill_type_ = SpillType::kSpillOperand; | 194 spill_type_ = SpillType::kSpillOperand; |
| 188 spill_operand_ = operand; | 195 spill_operand_ = operand; |
| 189 } | 196 } |
| 190 | 197 |
| 191 | 198 |
| 192 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { | 199 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
| 193 UsePosition* use_pos = last_processed_use_; | 200 UsePosition* use_pos = last_processed_use_; |
| 194 if (use_pos == NULL) use_pos = first_pos(); | 201 if (use_pos == nullptr) use_pos = first_pos(); |
| 195 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { | 202 while (use_pos != nullptr && use_pos->pos().Value() < start.Value()) { |
| 196 use_pos = use_pos->next(); | 203 use_pos = use_pos->next(); |
| 197 } | 204 } |
| 198 last_processed_use_ = use_pos; | 205 last_processed_use_ = use_pos; |
| 199 return use_pos; | 206 return use_pos; |
| 200 } | 207 } |
| 201 | 208 |
| 202 | 209 |
| 203 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial( | 210 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial( |
| 204 LifetimePosition start) { | 211 LifetimePosition start) { |
| 205 UsePosition* pos = NextUsePosition(start); | 212 UsePosition* pos = NextUsePosition(start); |
| 206 while (pos != NULL && !pos->RegisterIsBeneficial()) { | 213 while (pos != nullptr && !pos->RegisterIsBeneficial()) { |
| 207 pos = pos->next(); | 214 pos = pos->next(); |
| 208 } | 215 } |
| 209 return pos; | 216 return pos; |
| 210 } | 217 } |
| 211 | 218 |
| 212 | 219 |
| 213 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial( | 220 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial( |
| 214 LifetimePosition start) { | 221 LifetimePosition start) { |
| 215 UsePosition* pos = first_pos(); | 222 auto pos = first_pos(); |
| 216 UsePosition* prev = NULL; | 223 UsePosition* prev = nullptr; |
| 217 while (pos != NULL && pos->pos().Value() < start.Value()) { | 224 while (pos != nullptr && pos->pos().Value() < start.Value()) { |
| 218 if (pos->RegisterIsBeneficial()) prev = pos; | 225 if (pos->RegisterIsBeneficial()) prev = pos; |
| 219 pos = pos->next(); | 226 pos = pos->next(); |
| 220 } | 227 } |
| 221 return prev; | 228 return prev; |
| 222 } | 229 } |
| 223 | 230 |
| 224 | 231 |
| 225 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) { | 232 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) { |
| 226 UsePosition* pos = NextUsePosition(start); | 233 UsePosition* pos = NextUsePosition(start); |
| 227 while (pos != NULL && !pos->RequiresRegister()) { | 234 while (pos != nullptr && !pos->RequiresRegister()) { |
| 228 pos = pos->next(); | 235 pos = pos->next(); |
| 229 } | 236 } |
| 230 return pos; | 237 return pos; |
| 231 } | 238 } |
| 232 | 239 |
| 233 | 240 |
| 234 bool LiveRange::CanBeSpilled(LifetimePosition pos) { | 241 bool LiveRange::CanBeSpilled(LifetimePosition pos) { |
| 235 // We cannot spill a live range that has a use requiring a register | 242 // We cannot spill a live range that has a use requiring a register |
| 236 // at the current or the immediate next position. | 243 // at the current or the immediate next position. |
| 237 UsePosition* use_pos = NextRegisterPosition(pos); | 244 auto use_pos = NextRegisterPosition(pos); |
| 238 if (use_pos == NULL) return true; | 245 if (use_pos == nullptr) return true; |
| 239 return use_pos->pos().Value() > | 246 return use_pos->pos().Value() > |
| 240 pos.NextInstruction().InstructionEnd().Value(); | 247 pos.NextInstruction().InstructionEnd().Value(); |
| 241 } | 248 } |
| 242 | 249 |
| 243 | 250 |
| 244 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const { | 251 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const { |
| 245 InstructionOperand* op = nullptr; | 252 InstructionOperand* op = nullptr; |
| 246 if (HasRegisterAssigned()) { | 253 if (HasRegisterAssigned()) { |
| 247 DCHECK(!IsSpilled()); | 254 DCHECK(!IsSpilled()); |
| 248 switch (Kind()) { | 255 switch (Kind()) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 260 DCHECK(!HasRegisterAssigned()); | 267 DCHECK(!HasRegisterAssigned()); |
| 261 op = TopLevel()->GetSpillOperand(); | 268 op = TopLevel()->GetSpillOperand(); |
| 262 DCHECK(!op->IsUnallocated()); | 269 DCHECK(!op->IsUnallocated()); |
| 263 } | 270 } |
| 264 return op; | 271 return op; |
| 265 } | 272 } |
| 266 | 273 |
| 267 | 274 |
| 268 UseInterval* LiveRange::FirstSearchIntervalForPosition( | 275 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
| 269 LifetimePosition position) const { | 276 LifetimePosition position) const { |
| 270 if (current_interval_ == NULL) return first_interval_; | 277 if (current_interval_ == nullptr) return first_interval_; |
| 271 if (current_interval_->start().Value() > position.Value()) { | 278 if (current_interval_->start().Value() > position.Value()) { |
| 272 current_interval_ = NULL; | 279 current_interval_ = nullptr; |
| 273 return first_interval_; | 280 return first_interval_; |
| 274 } | 281 } |
| 275 return current_interval_; | 282 return current_interval_; |
| 276 } | 283 } |
| 277 | 284 |
| 278 | 285 |
| 279 void LiveRange::AdvanceLastProcessedMarker( | 286 void LiveRange::AdvanceLastProcessedMarker( |
| 280 UseInterval* to_start_of, LifetimePosition but_not_past) const { | 287 UseInterval* to_start_of, LifetimePosition but_not_past) const { |
| 281 if (to_start_of == NULL) return; | 288 if (to_start_of == nullptr) return; |
| 282 if (to_start_of->start().Value() > but_not_past.Value()) return; | 289 if (to_start_of->start().Value() > but_not_past.Value()) return; |
| 283 LifetimePosition start = current_interval_ == NULL | 290 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() |
| 284 ? LifetimePosition::Invalid() | 291 : current_interval_->start(); |
| 285 : current_interval_->start(); | |
| 286 if (to_start_of->start().Value() > start.Value()) { | 292 if (to_start_of->start().Value() > start.Value()) { |
| 287 current_interval_ = to_start_of; | 293 current_interval_ = to_start_of; |
| 288 } | 294 } |
| 289 } | 295 } |
| 290 | 296 |
| 291 | 297 |
| 292 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, | 298 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, |
| 293 Zone* zone) { | 299 Zone* zone) { |
| 294 DCHECK(Start().Value() < position.Value()); | 300 DCHECK(Start().Value() < position.Value()); |
| 295 DCHECK(result->IsEmpty()); | 301 DCHECK(result->IsEmpty()); |
| 296 // Find the last interval that ends before the position. If the | 302 // Find the last interval that ends before the position. If the |
| 297 // position is contained in one of the intervals in the chain, we | 303 // position is contained in one of the intervals in the chain, we |
| 298 // split that interval and use the first part. | 304 // split that interval and use the first part. |
| 299 UseInterval* current = FirstSearchIntervalForPosition(position); | 305 auto current = FirstSearchIntervalForPosition(position); |
| 300 | 306 |
| 301 // If the split position coincides with the beginning of a use interval | 307 // If the split position coincides with the beginning of a use interval |
| 302 // we need to split use positons in a special way. | 308 // we need to split use positons in a special way. |
| 303 bool split_at_start = false; | 309 bool split_at_start = false; |
| 304 | 310 |
| 305 if (current->start().Value() == position.Value()) { | 311 if (current->start().Value() == position.Value()) { |
| 306 // When splitting at start we need to locate the previous use interval. | 312 // When splitting at start we need to locate the previous use interval. |
| 307 current = first_interval_; | 313 current = first_interval_; |
| 308 } | 314 } |
| 309 | 315 |
| 310 while (current != NULL) { | 316 while (current != nullptr) { |
| 311 if (current->Contains(position)) { | 317 if (current->Contains(position)) { |
| 312 current->SplitAt(position, zone); | 318 current->SplitAt(position, zone); |
| 313 break; | 319 break; |
| 314 } | 320 } |
| 315 UseInterval* next = current->next(); | 321 auto next = current->next(); |
| 316 if (next->start().Value() >= position.Value()) { | 322 if (next->start().Value() >= position.Value()) { |
| 317 split_at_start = (next->start().Value() == position.Value()); | 323 split_at_start = (next->start().Value() == position.Value()); |
| 318 break; | 324 break; |
| 319 } | 325 } |
| 320 current = next; | 326 current = next; |
| 321 } | 327 } |
| 322 | 328 |
| 323 // Partition original use intervals to the two live ranges. | 329 // Partition original use intervals to the two live ranges. |
| 324 UseInterval* before = current; | 330 auto before = current; |
| 325 UseInterval* after = before->next(); | 331 auto after = before->next(); |
| 326 result->last_interval_ = | 332 result->last_interval_ = |
| 327 (last_interval_ == before) | 333 (last_interval_ == before) |
| 328 ? after // Only interval in the range after split. | 334 ? after // Only interval in the range after split. |
| 329 : last_interval_; // Last interval of the original range. | 335 : last_interval_; // Last interval of the original range. |
| 330 result->first_interval_ = after; | 336 result->first_interval_ = after; |
| 331 last_interval_ = before; | 337 last_interval_ = before; |
| 332 | 338 |
| 333 // Find the last use position before the split and the first use | 339 // Find the last use position before the split and the first use |
| 334 // position after it. | 340 // position after it. |
| 335 UsePosition* use_after = first_pos_; | 341 auto use_after = first_pos_; |
| 336 UsePosition* use_before = NULL; | 342 UsePosition* use_before = nullptr; |
| 337 if (split_at_start) { | 343 if (split_at_start) { |
| 338 // The split position coincides with the beginning of a use interval (the | 344 // The split position coincides with the beginning of a use interval (the |
| 339 // end of a lifetime hole). Use at this position should be attributed to | 345 // end of a lifetime hole). Use at this position should be attributed to |
| 340 // the split child because split child owns use interval covering it. | 346 // the split child because split child owns use interval covering it. |
| 341 while (use_after != NULL && use_after->pos().Value() < position.Value()) { | 347 while (use_after != nullptr && |
| 348 use_after->pos().Value() < position.Value()) { |
| 342 use_before = use_after; | 349 use_before = use_after; |
| 343 use_after = use_after->next(); | 350 use_after = use_after->next(); |
| 344 } | 351 } |
| 345 } else { | 352 } else { |
| 346 while (use_after != NULL && use_after->pos().Value() <= position.Value()) { | 353 while (use_after != nullptr && |
| 354 use_after->pos().Value() <= position.Value()) { |
| 347 use_before = use_after; | 355 use_before = use_after; |
| 348 use_after = use_after->next(); | 356 use_after = use_after->next(); |
| 349 } | 357 } |
| 350 } | 358 } |
| 351 | 359 |
| 352 // Partition original use positions to the two live ranges. | 360 // Partition original use positions to the two live ranges. |
| 353 if (use_before != NULL) { | 361 if (use_before != nullptr) { |
| 354 use_before->next_ = NULL; | 362 use_before->next_ = nullptr; |
| 355 } else { | 363 } else { |
| 356 first_pos_ = NULL; | 364 first_pos_ = nullptr; |
| 357 } | 365 } |
| 358 result->first_pos_ = use_after; | 366 result->first_pos_ = use_after; |
| 359 | 367 |
| 360 // Discard cached iteration state. It might be pointing | 368 // Discard cached iteration state. It might be pointing |
| 361 // to the use that no longer belongs to this live range. | 369 // to the use that no longer belongs to this live range. |
| 362 last_processed_use_ = NULL; | 370 last_processed_use_ = nullptr; |
| 363 current_interval_ = NULL; | 371 current_interval_ = nullptr; |
| 364 | 372 |
| 365 // Link the new live range in the chain before any of the other | 373 // Link the new live range in the chain before any of the other |
| 366 // ranges linked from the range before the split. | 374 // ranges linked from the range before the split. |
| 367 result->parent_ = (parent_ == NULL) ? this : parent_; | 375 result->parent_ = (parent_ == nullptr) ? this : parent_; |
| 368 result->kind_ = result->parent_->kind_; | 376 result->kind_ = result->parent_->kind_; |
| 369 result->next_ = next_; | 377 result->next_ = next_; |
| 370 next_ = result; | 378 next_ = result; |
| 371 | 379 |
| 372 #ifdef DEBUG | 380 #ifdef DEBUG |
| 373 Verify(); | 381 Verify(); |
| 374 result->Verify(); | 382 result->Verify(); |
| 375 #endif | 383 #endif |
| 376 } | 384 } |
| 377 | 385 |
| 378 | 386 |
| 379 // This implements an ordering on live ranges so that they are ordered by their | 387 // This implements an ordering on live ranges so that they are ordered by their |
| 380 // start positions. This is needed for the correctness of the register | 388 // start positions. This is needed for the correctness of the register |
| 381 // allocation algorithm. If two live ranges start at the same offset then there | 389 // allocation algorithm. If two live ranges start at the same offset then there |
| 382 // is a tie breaker based on where the value is first used. This part of the | 390 // is a tie breaker based on where the value is first used. This part of the |
| 383 // ordering is merely a heuristic. | 391 // ordering is merely a heuristic. |
| 384 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { | 392 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { |
| 385 LifetimePosition start = Start(); | 393 LifetimePosition start = Start(); |
| 386 LifetimePosition other_start = other->Start(); | 394 LifetimePosition other_start = other->Start(); |
| 387 if (start.Value() == other_start.Value()) { | 395 if (start.Value() == other_start.Value()) { |
| 388 UsePosition* pos = first_pos(); | 396 UsePosition* pos = first_pos(); |
| 389 if (pos == NULL) return false; | 397 if (pos == nullptr) return false; |
| 390 UsePosition* other_pos = other->first_pos(); | 398 UsePosition* other_pos = other->first_pos(); |
| 391 if (other_pos == NULL) return true; | 399 if (other_pos == nullptr) return true; |
| 392 return pos->pos().Value() < other_pos->pos().Value(); | 400 return pos->pos().Value() < other_pos->pos().Value(); |
| 393 } | 401 } |
| 394 return start.Value() < other_start.Value(); | 402 return start.Value() < other_start.Value(); |
| 395 } | 403 } |
| 396 | 404 |
| 397 | 405 |
| 398 void LiveRange::ShortenTo(LifetimePosition start) { | 406 void LiveRange::ShortenTo(LifetimePosition start) { |
| 399 TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); | 407 TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); |
| 400 DCHECK(first_interval_ != NULL); | 408 DCHECK(first_interval_ != nullptr); |
| 401 DCHECK(first_interval_->start().Value() <= start.Value()); | 409 DCHECK(first_interval_->start().Value() <= start.Value()); |
| 402 DCHECK(start.Value() < first_interval_->end().Value()); | 410 DCHECK(start.Value() < first_interval_->end().Value()); |
| 403 first_interval_->set_start(start); | 411 first_interval_->set_start(start); |
| 404 } | 412 } |
| 405 | 413 |
| 406 | 414 |
| 407 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, | 415 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, |
| 408 Zone* zone) { | 416 Zone* zone) { |
| 409 TraceAlloc("Ensure live range %d in interval [%d %d[\n", id_, start.Value(), | 417 TraceAlloc("Ensure live range %d in interval [%d %d[\n", id_, start.Value(), |
| 410 end.Value()); | 418 end.Value()); |
| 411 LifetimePosition new_end = end; | 419 auto new_end = end; |
| 412 while (first_interval_ != NULL && | 420 while (first_interval_ != nullptr && |
| 413 first_interval_->start().Value() <= end.Value()) { | 421 first_interval_->start().Value() <= end.Value()) { |
| 414 if (first_interval_->end().Value() > end.Value()) { | 422 if (first_interval_->end().Value() > end.Value()) { |
| 415 new_end = first_interval_->end(); | 423 new_end = first_interval_->end(); |
| 416 } | 424 } |
| 417 first_interval_ = first_interval_->next(); | 425 first_interval_ = first_interval_->next(); |
| 418 } | 426 } |
| 419 | 427 |
| 420 UseInterval* new_interval = new (zone) UseInterval(start, new_end); | 428 auto new_interval = new (zone) UseInterval(start, new_end); |
| 421 new_interval->next_ = first_interval_; | 429 new_interval->next_ = first_interval_; |
| 422 first_interval_ = new_interval; | 430 first_interval_ = new_interval; |
| 423 if (new_interval->next() == NULL) { | 431 if (new_interval->next() == nullptr) { |
| 424 last_interval_ = new_interval; | 432 last_interval_ = new_interval; |
| 425 } | 433 } |
| 426 } | 434 } |
| 427 | 435 |
| 428 | 436 |
| 429 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, | 437 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, |
| 430 Zone* zone) { | 438 Zone* zone) { |
| 431 TraceAlloc("Add to live range %d interval [%d %d[\n", id_, start.Value(), | 439 TraceAlloc("Add to live range %d interval [%d %d[\n", id_, start.Value(), |
| 432 end.Value()); | 440 end.Value()); |
| 433 if (first_interval_ == NULL) { | 441 if (first_interval_ == nullptr) { |
| 434 UseInterval* interval = new (zone) UseInterval(start, end); | 442 auto interval = new (zone) UseInterval(start, end); |
| 435 first_interval_ = interval; | 443 first_interval_ = interval; |
| 436 last_interval_ = interval; | 444 last_interval_ = interval; |
| 437 } else { | 445 } else { |
| 438 if (end.Value() == first_interval_->start().Value()) { | 446 if (end.Value() == first_interval_->start().Value()) { |
| 439 first_interval_->set_start(start); | 447 first_interval_->set_start(start); |
| 440 } else if (end.Value() < first_interval_->start().Value()) { | 448 } else if (end.Value() < first_interval_->start().Value()) { |
| 441 UseInterval* interval = new (zone) UseInterval(start, end); | 449 auto interval = new (zone) UseInterval(start, end); |
| 442 interval->set_next(first_interval_); | 450 interval->set_next(first_interval_); |
| 443 first_interval_ = interval; | 451 first_interval_ = interval; |
| 444 } else { | 452 } else { |
| 445 // Order of instruction's processing (see ProcessInstructions) guarantees | 453 // Order of instruction's processing (see ProcessInstructions) guarantees |
| 446 // that each new use interval either precedes or intersects with | 454 // that each new use interval either precedes or intersects with |
| 447 // last added interval. | 455 // last added interval. |
| 448 DCHECK(start.Value() < first_interval_->end().Value()); | 456 DCHECK(start.Value() < first_interval_->end().Value()); |
| 449 first_interval_->start_ = Min(start, first_interval_->start_); | 457 first_interval_->start_ = Min(start, first_interval_->start_); |
| 450 first_interval_->end_ = Max(end, first_interval_->end_); | 458 first_interval_->end_ = Max(end, first_interval_->end_); |
| 451 } | 459 } |
| 452 } | 460 } |
| 453 } | 461 } |
| 454 | 462 |
| 455 | 463 |
| 456 void LiveRange::AddUsePosition(LifetimePosition pos, | 464 void LiveRange::AddUsePosition(LifetimePosition pos, |
| 457 InstructionOperand* operand, | 465 InstructionOperand* operand, |
| 458 InstructionOperand* hint, Zone* zone) { | 466 InstructionOperand* hint, Zone* zone) { |
| 459 TraceAlloc("Add to live range %d use position %d\n", id_, pos.Value()); | 467 TraceAlloc("Add to live range %d use position %d\n", id_, pos.Value()); |
| 460 UsePosition* use_pos = new (zone) UsePosition(pos, operand, hint); | 468 auto use_pos = new (zone) UsePosition(pos, operand, hint); |
| 461 UsePosition* prev_hint = NULL; | 469 UsePosition* prev_hint = nullptr; |
| 462 UsePosition* prev = NULL; | 470 UsePosition* prev = nullptr; |
| 463 UsePosition* current = first_pos_; | 471 auto current = first_pos_; |
| 464 while (current != NULL && current->pos().Value() < pos.Value()) { | 472 while (current != nullptr && current->pos().Value() < pos.Value()) { |
| 465 prev_hint = current->HasHint() ? current : prev_hint; | 473 prev_hint = current->HasHint() ? current : prev_hint; |
| 466 prev = current; | 474 prev = current; |
| 467 current = current->next(); | 475 current = current->next(); |
| 468 } | 476 } |
| 469 | 477 |
| 470 if (prev == NULL) { | 478 if (prev == nullptr) { |
| 471 use_pos->set_next(first_pos_); | 479 use_pos->set_next(first_pos_); |
| 472 first_pos_ = use_pos; | 480 first_pos_ = use_pos; |
| 473 } else { | 481 } else { |
| 474 use_pos->next_ = prev->next_; | 482 use_pos->next_ = prev->next_; |
| 475 prev->next_ = use_pos; | 483 prev->next_ = use_pos; |
| 476 } | 484 } |
| 477 | 485 |
| 478 if (prev_hint == NULL && use_pos->HasHint()) { | 486 if (prev_hint == nullptr && use_pos->HasHint()) { |
| 479 current_hint_operand_ = hint; | 487 current_hint_operand_ = hint; |
| 480 } | 488 } |
| 481 } | 489 } |
| 482 | 490 |
| 483 | 491 |
| 484 void LiveRange::ConvertUsesToOperand(InstructionOperand* op) { | 492 void LiveRange::ConvertUsesToOperand(InstructionOperand* op) { |
| 485 UsePosition* use_pos = first_pos(); | 493 auto use_pos = first_pos(); |
| 486 while (use_pos != NULL) { | 494 while (use_pos != nullptr) { |
| 487 DCHECK(Start().Value() <= use_pos->pos().Value() && | 495 DCHECK(Start().Value() <= use_pos->pos().Value() && |
| 488 use_pos->pos().Value() <= End().Value()); | 496 use_pos->pos().Value() <= End().Value()); |
| 489 | 497 |
| 490 if (use_pos->HasOperand()) { | 498 if (use_pos->HasOperand()) { |
| 491 DCHECK(op->IsRegister() || op->IsDoubleRegister() || | 499 DCHECK(op->IsRegister() || op->IsDoubleRegister() || |
| 492 !use_pos->RequiresRegister()); | 500 !use_pos->RequiresRegister()); |
| 493 use_pos->operand()->ConvertTo(op->kind(), op->index()); | 501 use_pos->operand()->ConvertTo(op->kind(), op->index()); |
| 494 } | 502 } |
| 495 use_pos = use_pos->next(); | 503 use_pos = use_pos->next(); |
| 496 } | 504 } |
| 497 } | 505 } |
| 498 | 506 |
| 499 | 507 |
| 500 bool LiveRange::CanCover(LifetimePosition position) const { | 508 bool LiveRange::CanCover(LifetimePosition position) const { |
| 501 if (IsEmpty()) return false; | 509 if (IsEmpty()) return false; |
| 502 return Start().Value() <= position.Value() && | 510 return Start().Value() <= position.Value() && |
| 503 position.Value() < End().Value(); | 511 position.Value() < End().Value(); |
| 504 } | 512 } |
| 505 | 513 |
| 506 | 514 |
| 507 bool LiveRange::Covers(LifetimePosition position) { | 515 bool LiveRange::Covers(LifetimePosition position) { |
| 508 if (!CanCover(position)) return false; | 516 if (!CanCover(position)) return false; |
| 509 UseInterval* start_search = FirstSearchIntervalForPosition(position); | 517 auto start_search = FirstSearchIntervalForPosition(position); |
| 510 for (UseInterval* interval = start_search; interval != NULL; | 518 for (auto interval = start_search; interval != nullptr; |
| 511 interval = interval->next()) { | 519 interval = interval->next()) { |
| 512 DCHECK(interval->next() == NULL || | 520 DCHECK(interval->next() == nullptr || |
| 513 interval->next()->start().Value() >= interval->start().Value()); | 521 interval->next()->start().Value() >= interval->start().Value()); |
| 514 AdvanceLastProcessedMarker(interval, position); | 522 AdvanceLastProcessedMarker(interval, position); |
| 515 if (interval->Contains(position)) return true; | 523 if (interval->Contains(position)) return true; |
| 516 if (interval->start().Value() > position.Value()) return false; | 524 if (interval->start().Value() > position.Value()) return false; |
| 517 } | 525 } |
| 518 return false; | 526 return false; |
| 519 } | 527 } |
| 520 | 528 |
| 521 | 529 |
| 522 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { | 530 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { |
| 523 UseInterval* b = other->first_interval(); | 531 auto b = other->first_interval(); |
| 524 if (b == NULL) return LifetimePosition::Invalid(); | 532 if (b == nullptr) return LifetimePosition::Invalid(); |
| 525 LifetimePosition advance_last_processed_up_to = b->start(); | 533 auto advance_last_processed_up_to = b->start(); |
| 526 UseInterval* a = FirstSearchIntervalForPosition(b->start()); | 534 auto a = FirstSearchIntervalForPosition(b->start()); |
| 527 while (a != NULL && b != NULL) { | 535 while (a != nullptr && b != nullptr) { |
| 528 if (a->start().Value() > other->End().Value()) break; | 536 if (a->start().Value() > other->End().Value()) break; |
| 529 if (b->start().Value() > End().Value()) break; | 537 if (b->start().Value() > End().Value()) break; |
| 530 LifetimePosition cur_intersection = a->Intersect(b); | 538 auto cur_intersection = a->Intersect(b); |
| 531 if (cur_intersection.IsValid()) { | 539 if (cur_intersection.IsValid()) { |
| 532 return cur_intersection; | 540 return cur_intersection; |
| 533 } | 541 } |
| 534 if (a->start().Value() < b->start().Value()) { | 542 if (a->start().Value() < b->start().Value()) { |
| 535 a = a->next(); | 543 a = a->next(); |
| 536 if (a == NULL || a->start().Value() > other->End().Value()) break; | 544 if (a == nullptr || a->start().Value() > other->End().Value()) break; |
| 537 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | 545 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); |
| 538 } else { | 546 } else { |
| 539 b = b->next(); | 547 b = b->next(); |
| 540 } | 548 } |
| 541 } | 549 } |
| 542 return LifetimePosition::Invalid(); | 550 return LifetimePosition::Invalid(); |
| 543 } | 551 } |
| 544 | 552 |
| 545 | 553 |
| 546 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, | 554 RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, |
| 547 Zone* zone, Frame* frame, | 555 Zone* zone, Frame* frame, |
| 548 InstructionSequence* code, | 556 InstructionSequence* code, |
| 549 const char* debug_name) | 557 const char* debug_name) |
| 550 : local_zone_(zone), | 558 : local_zone_(zone), |
| 551 frame_(frame), | 559 frame_(frame), |
| 552 code_(code), | 560 code_(code), |
| 553 debug_name_(debug_name), | 561 debug_name_(debug_name), |
| 554 config_(config), | 562 config_(config), |
| 555 phi_map_(PhiMap::key_compare(), PhiMap::allocator_type(local_zone())), | 563 phi_map_(PhiMap::key_compare(), PhiMap::allocator_type(local_zone())), |
| 556 live_in_sets_(code->InstructionBlockCount(), local_zone()), | 564 live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), |
| 557 live_ranges_(code->VirtualRegisterCount() * 2, local_zone()), | 565 live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), |
| 558 fixed_live_ranges_(this->config()->num_general_registers(), NULL, | 566 fixed_live_ranges_(this->config()->num_general_registers(), nullptr, |
| 559 local_zone()), | 567 local_zone()), |
| 560 fixed_double_live_ranges_(this->config()->num_double_registers(), NULL, | 568 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr, |
| 561 local_zone()), | 569 local_zone()), |
| 562 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, local_zone()), | 570 unhandled_live_ranges_(local_zone()), |
| 563 active_live_ranges_(8, local_zone()), | 571 active_live_ranges_(local_zone()), |
| 564 inactive_live_ranges_(8, local_zone()), | 572 inactive_live_ranges_(local_zone()), |
| 565 reusable_slots_(8, local_zone()), | 573 reusable_slots_(local_zone()), |
| 566 spill_ranges_(8, local_zone()), | 574 spill_ranges_(local_zone()), |
| 567 mode_(UNALLOCATED_REGISTERS), | 575 mode_(UNALLOCATED_REGISTERS), |
| 568 num_registers_(-1), | 576 num_registers_(-1), |
| 569 allocation_ok_(true) { | 577 allocation_ok_(true) { |
| 570 DCHECK(this->config()->num_general_registers() <= | 578 DCHECK(this->config()->num_general_registers() <= |
| 571 RegisterConfiguration::kMaxGeneralRegisters); | 579 RegisterConfiguration::kMaxGeneralRegisters); |
| 572 DCHECK(this->config()->num_double_registers() <= | 580 DCHECK(this->config()->num_double_registers() <= |
| 573 RegisterConfiguration::kMaxDoubleRegisters); | 581 RegisterConfiguration::kMaxDoubleRegisters); |
| 574 // TryAllocateFreeReg and AllocateBlockedReg assume this | 582 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 575 // when allocating local arrays. | 583 // when allocating local arrays. |
| 576 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 584 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
| 577 this->config()->num_general_registers()); | 585 this->config()->num_general_registers()); |
| 586 unhandled_live_ranges().reserve( |
| 587 static_cast<size_t>(code->VirtualRegisterCount() * 2)); |
| 588 active_live_ranges().reserve(8); |
| 589 inactive_live_ranges().reserve(8); |
| 590 reusable_slots().reserve(8); |
| 591 spill_ranges().reserve(8); |
| 578 assigned_registers_ = | 592 assigned_registers_ = |
| 579 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); | 593 new (code_zone()) BitVector(config->num_general_registers(), code_zone()); |
| 580 assigned_double_registers_ = new (code_zone()) | 594 assigned_double_registers_ = new (code_zone()) |
| 581 BitVector(config->num_aliased_double_registers(), code_zone()); | 595 BitVector(config->num_aliased_double_registers(), code_zone()); |
| 582 frame->SetAllocatedRegisters(assigned_registers_); | 596 frame->SetAllocatedRegisters(assigned_registers_); |
| 583 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); | 597 frame->SetAllocatedDoubleRegisters(assigned_double_registers_); |
| 584 } | 598 } |
| 585 | 599 |
| 586 | 600 |
| 587 void RegisterAllocator::InitializeLivenessAnalysis() { | |
| 588 // Initialize the live_in sets for each block to NULL. | |
| 589 std::fill(live_in_sets_.begin(), live_in_sets_.end(), nullptr); | |
| 590 } | |
| 591 | |
| 592 | |
| 593 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { | 601 BitVector* RegisterAllocator::ComputeLiveOut(const InstructionBlock* block) { |
| 594 // Compute live out for the given block, except not including backward | 602 // Compute live out for the given block, except not including backward |
| 595 // successor edges. | 603 // successor edges. |
| 596 BitVector* live_out = new (local_zone()) | 604 auto live_out = new (local_zone()) |
| 597 BitVector(code()->VirtualRegisterCount(), local_zone()); | 605 BitVector(code()->VirtualRegisterCount(), local_zone()); |
| 598 | 606 |
| 599 // Process all successor blocks. | 607 // Process all successor blocks. |
| 600 for (auto succ : block->successors()) { | 608 for (auto succ : block->successors()) { |
| 601 // Add values live on entry to the successor. Note the successor's | 609 // Add values live on entry to the successor. Note the successor's |
| 602 // live_in will not be computed yet for backwards edges. | 610 // live_in will not be computed yet for backwards edges. |
| 603 BitVector* live_in = live_in_sets_[succ.ToSize()]; | 611 auto live_in = live_in_sets_[succ.ToSize()]; |
| 604 if (live_in != NULL) live_out->Union(*live_in); | 612 if (live_in != nullptr) live_out->Union(*live_in); |
| 605 | 613 |
| 606 // All phi input operands corresponding to this successor edge are live | 614 // All phi input operands corresponding to this successor edge are live |
| 607 // out from this block. | 615 // out from this block. |
| 608 const InstructionBlock* successor = code()->InstructionBlockAt(succ); | 616 auto successor = code()->InstructionBlockAt(succ); |
| 609 size_t index = successor->PredecessorIndexOf(block->rpo_number()); | 617 size_t index = successor->PredecessorIndexOf(block->rpo_number()); |
| 610 DCHECK(index < successor->PredecessorCount()); | 618 DCHECK(index < successor->PredecessorCount()); |
| 611 for (auto phi : successor->phis()) { | 619 for (auto phi : successor->phis()) { |
| 612 live_out->Add(phi->operands()[index]); | 620 live_out->Add(phi->operands()[index]); |
| 613 } | 621 } |
| 614 } | 622 } |
| 615 return live_out; | 623 return live_out; |
| 616 } | 624 } |
| 617 | 625 |
| 618 | 626 |
| 619 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block, | 627 void RegisterAllocator::AddInitialIntervals(const InstructionBlock* block, |
| 620 BitVector* live_out) { | 628 BitVector* live_out) { |
| 621 // Add an interval that includes the entire block to the live range for | 629 // Add an interval that includes the entire block to the live range for |
| 622 // each live_out value. | 630 // each live_out value. |
| 623 LifetimePosition start = | 631 auto start = |
| 624 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 632 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
| 625 LifetimePosition end = LifetimePosition::FromInstructionIndex( | 633 auto end = LifetimePosition::FromInstructionIndex( |
| 626 block->last_instruction_index()).NextInstruction(); | 634 block->last_instruction_index()).NextInstruction(); |
| 627 BitVector::Iterator iterator(live_out); | 635 BitVector::Iterator iterator(live_out); |
| 628 while (!iterator.Done()) { | 636 while (!iterator.Done()) { |
| 629 int operand_index = iterator.Current(); | 637 int operand_index = iterator.Current(); |
| 630 LiveRange* range = LiveRangeFor(operand_index); | 638 auto range = LiveRangeFor(operand_index); |
| 631 range->AddUseInterval(start, end, local_zone()); | 639 range->AddUseInterval(start, end, local_zone()); |
| 632 iterator.Advance(); | 640 iterator.Advance(); |
| 633 } | 641 } |
| 634 } | 642 } |
| 635 | 643 |
| 636 | 644 |
| 637 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { | 645 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { |
| 638 return -index - 1 - config()->num_general_registers(); | 646 return -index - 1 - config()->num_general_registers(); |
| 639 } | 647 } |
| 640 | 648 |
| 641 | 649 |
| 642 InstructionOperand* RegisterAllocator::AllocateFixed( | 650 InstructionOperand* RegisterAllocator::AllocateFixed( |
| 643 UnallocatedOperand* operand, int pos, bool is_tagged) { | 651 UnallocatedOperand* operand, int pos, bool is_tagged) { |
| 644 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); | 652 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); |
| 645 DCHECK(operand->HasFixedPolicy()); | 653 DCHECK(operand->HasFixedPolicy()); |
| 646 if (operand->HasFixedSlotPolicy()) { | 654 if (operand->HasFixedSlotPolicy()) { |
| 647 operand->ConvertTo(InstructionOperand::STACK_SLOT, | 655 operand->ConvertTo(InstructionOperand::STACK_SLOT, |
| 648 operand->fixed_slot_index()); | 656 operand->fixed_slot_index()); |
| 649 } else if (operand->HasFixedRegisterPolicy()) { | 657 } else if (operand->HasFixedRegisterPolicy()) { |
| 650 int reg_index = operand->fixed_register_index(); | 658 int reg_index = operand->fixed_register_index(); |
| 651 operand->ConvertTo(InstructionOperand::REGISTER, reg_index); | 659 operand->ConvertTo(InstructionOperand::REGISTER, reg_index); |
| 652 } else if (operand->HasFixedDoubleRegisterPolicy()) { | 660 } else if (operand->HasFixedDoubleRegisterPolicy()) { |
| 653 int reg_index = operand->fixed_register_index(); | 661 int reg_index = operand->fixed_register_index(); |
| 654 operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index); | 662 operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index); |
| 655 } else { | 663 } else { |
| 656 UNREACHABLE(); | 664 UNREACHABLE(); |
| 657 } | 665 } |
| 658 if (is_tagged) { | 666 if (is_tagged) { |
| 659 TraceAlloc("Fixed reg is tagged at %d\n", pos); | 667 TraceAlloc("Fixed reg is tagged at %d\n", pos); |
| 660 Instruction* instr = InstructionAt(pos); | 668 auto instr = InstructionAt(pos); |
| 661 if (instr->HasPointerMap()) { | 669 if (instr->HasPointerMap()) { |
| 662 instr->pointer_map()->RecordPointer(operand, code_zone()); | 670 instr->pointer_map()->RecordPointer(operand, code_zone()); |
| 663 } | 671 } |
| 664 } | 672 } |
| 665 return operand; | 673 return operand; |
| 666 } | 674 } |
| 667 | 675 |
| 668 | 676 |
| 669 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { | 677 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { |
| 670 DCHECK(index < config()->num_general_registers()); | 678 DCHECK(index < config()->num_general_registers()); |
| 671 LiveRange* result = fixed_live_ranges_[index]; | 679 auto result = fixed_live_ranges()[index]; |
| 672 if (result == NULL) { | 680 if (result == nullptr) { |
| 673 // TODO(titzer): add a utility method to allocate a new LiveRange: | 681 // TODO(titzer): add a utility method to allocate a new LiveRange: |
| 674 // The LiveRange object itself can go in this zone, but the | 682 // The LiveRange object itself can go in this zone, but the |
| 675 // InstructionOperand needs | 683 // InstructionOperand needs |
| 676 // to go in the code zone, since it may survive register allocation. | 684 // to go in the code zone, since it may survive register allocation. |
| 677 result = new (local_zone()) LiveRange(FixedLiveRangeID(index), code_zone()); | 685 result = new (local_zone()) LiveRange(FixedLiveRangeID(index), code_zone()); |
| 678 DCHECK(result->IsFixed()); | 686 DCHECK(result->IsFixed()); |
| 679 result->kind_ = GENERAL_REGISTERS; | 687 result->kind_ = GENERAL_REGISTERS; |
| 680 SetLiveRangeAssignedRegister(result, index); | 688 SetLiveRangeAssignedRegister(result, index); |
| 681 fixed_live_ranges_[index] = result; | 689 fixed_live_ranges()[index] = result; |
| 682 } | 690 } |
| 683 return result; | 691 return result; |
| 684 } | 692 } |
| 685 | 693 |
| 686 | 694 |
| 687 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { | 695 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { |
| 688 DCHECK(index < config()->num_aliased_double_registers()); | 696 DCHECK(index < config()->num_aliased_double_registers()); |
| 689 LiveRange* result = fixed_double_live_ranges_[index]; | 697 auto result = fixed_double_live_ranges()[index]; |
| 690 if (result == NULL) { | 698 if (result == nullptr) { |
| 691 result = new (local_zone()) | 699 result = new (local_zone()) |
| 692 LiveRange(FixedDoubleLiveRangeID(index), code_zone()); | 700 LiveRange(FixedDoubleLiveRangeID(index), code_zone()); |
| 693 DCHECK(result->IsFixed()); | 701 DCHECK(result->IsFixed()); |
| 694 result->kind_ = DOUBLE_REGISTERS; | 702 result->kind_ = DOUBLE_REGISTERS; |
| 695 SetLiveRangeAssignedRegister(result, index); | 703 SetLiveRangeAssignedRegister(result, index); |
| 696 fixed_double_live_ranges_[index] = result; | 704 fixed_double_live_ranges()[index] = result; |
| 697 } | 705 } |
| 698 return result; | 706 return result; |
| 699 } | 707 } |
| 700 | 708 |
| 701 | 709 |
| 702 LiveRange* RegisterAllocator::LiveRangeFor(int index) { | 710 LiveRange* RegisterAllocator::LiveRangeFor(int index) { |
| 703 if (index >= static_cast<int>(live_ranges_.size())) { | 711 if (index >= static_cast<int>(live_ranges_.size())) { |
| 704 live_ranges_.resize(index + 1, NULL); | 712 live_ranges_.resize(index + 1, nullptr); |
| 705 } | 713 } |
| 706 LiveRange* result = live_ranges_[index]; | 714 auto result = live_ranges()[index]; |
| 707 if (result == NULL) { | 715 if (result == nullptr) { |
| 708 result = new (local_zone()) LiveRange(index, code_zone()); | 716 result = new (local_zone()) LiveRange(index, code_zone()); |
| 709 live_ranges_[index] = result; | 717 live_ranges()[index] = result; |
| 710 } | 718 } |
| 711 return result; | 719 return result; |
| 712 } | 720 } |
| 713 | 721 |
| 714 | 722 |
| 715 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) { | 723 GapInstruction* RegisterAllocator::GetLastGap(const InstructionBlock* block) { |
| 716 int last_instruction = block->last_instruction_index(); | 724 int last_instruction = block->last_instruction_index(); |
| 717 return code()->GapAt(last_instruction - 1); | 725 return code()->GapAt(last_instruction - 1); |
| 718 } | 726 } |
| 719 | 727 |
| 720 | 728 |
| 721 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { | 729 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { |
| 722 if (operand->IsUnallocated()) { | 730 if (operand->IsUnallocated()) { |
| 723 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); | 731 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); |
| 724 } else if (operand->IsRegister()) { | 732 } else if (operand->IsRegister()) { |
| 725 return FixedLiveRangeFor(operand->index()); | 733 return FixedLiveRangeFor(operand->index()); |
| 726 } else if (operand->IsDoubleRegister()) { | 734 } else if (operand->IsDoubleRegister()) { |
| 727 return FixedDoubleLiveRangeFor(operand->index()); | 735 return FixedDoubleLiveRangeFor(operand->index()); |
| 728 } else { | 736 } else { |
| 729 return NULL; | 737 return nullptr; |
| 730 } | 738 } |
| 731 } | 739 } |
| 732 | 740 |
| 733 | 741 |
| 734 void RegisterAllocator::Define(LifetimePosition position, | 742 void RegisterAllocator::Define(LifetimePosition position, |
| 735 InstructionOperand* operand, | 743 InstructionOperand* operand, |
| 736 InstructionOperand* hint) { | 744 InstructionOperand* hint) { |
| 737 LiveRange* range = LiveRangeFor(operand); | 745 auto range = LiveRangeFor(operand); |
| 738 if (range == NULL) return; | 746 if (range == nullptr) return; |
| 739 | 747 |
| 740 if (range->IsEmpty() || range->Start().Value() > position.Value()) { | 748 if (range->IsEmpty() || range->Start().Value() > position.Value()) { |
| 741 // Can happen if there is a definition without use. | 749 // Can happen if there is a definition without use. |
| 742 range->AddUseInterval(position, position.NextInstruction(), local_zone()); | 750 range->AddUseInterval(position, position.NextInstruction(), local_zone()); |
| 743 range->AddUsePosition(position.NextInstruction(), NULL, NULL, local_zone()); | 751 range->AddUsePosition(position.NextInstruction(), nullptr, nullptr, |
| 752 local_zone()); |
| 744 } else { | 753 } else { |
| 745 range->ShortenTo(position); | 754 range->ShortenTo(position); |
| 746 } | 755 } |
| 747 | 756 |
| 748 if (operand->IsUnallocated()) { | 757 if (operand->IsUnallocated()) { |
| 749 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); | 758 auto unalloc_operand = UnallocatedOperand::cast(operand); |
| 750 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); | 759 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 751 } | 760 } |
| 752 } | 761 } |
| 753 | 762 |
| 754 | 763 |
| 755 void RegisterAllocator::Use(LifetimePosition block_start, | 764 void RegisterAllocator::Use(LifetimePosition block_start, |
| 756 LifetimePosition position, | 765 LifetimePosition position, |
| 757 InstructionOperand* operand, | 766 InstructionOperand* operand, |
| 758 InstructionOperand* hint) { | 767 InstructionOperand* hint) { |
| 759 LiveRange* range = LiveRangeFor(operand); | 768 auto range = LiveRangeFor(operand); |
| 760 if (range == NULL) return; | 769 if (range == nullptr) return; |
| 761 if (operand->IsUnallocated()) { | 770 if (operand->IsUnallocated()) { |
| 762 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); | 771 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 763 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); | 772 range->AddUsePosition(position, unalloc_operand, hint, local_zone()); |
| 764 } | 773 } |
| 765 range->AddUseInterval(block_start, position, local_zone()); | 774 range->AddUseInterval(block_start, position, local_zone()); |
| 766 } | 775 } |
| 767 | 776 |
| 768 | 777 |
| 769 void RegisterAllocator::AddConstraintsGapMove(int index, | 778 void RegisterAllocator::AddConstraintsGapMove(int index, |
| 770 InstructionOperand* from, | 779 InstructionOperand* from, |
| 771 InstructionOperand* to) { | 780 InstructionOperand* to) { |
| 772 GapInstruction* gap = code()->GapAt(index); | 781 auto gap = code()->GapAt(index); |
| 773 ParallelMove* move = | 782 auto move = gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 774 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); | |
| 775 if (from->IsUnallocated()) { | 783 if (from->IsUnallocated()) { |
| 776 const ZoneList<MoveOperands>* move_operands = move->move_operands(); | 784 const ZoneList<MoveOperands>* move_operands = move->move_operands(); |
| 777 for (int i = 0; i < move_operands->length(); ++i) { | 785 for (int i = 0; i < move_operands->length(); ++i) { |
| 778 MoveOperands cur = move_operands->at(i); | 786 auto cur = move_operands->at(i); |
| 779 InstructionOperand* cur_to = cur.destination(); | 787 auto cur_to = cur.destination(); |
| 780 if (cur_to->IsUnallocated()) { | 788 if (cur_to->IsUnallocated()) { |
| 781 if (UnallocatedOperand::cast(cur_to)->virtual_register() == | 789 if (UnallocatedOperand::cast(cur_to)->virtual_register() == |
| 782 UnallocatedOperand::cast(from)->virtual_register()) { | 790 UnallocatedOperand::cast(from)->virtual_register()) { |
| 783 move->AddMove(cur.source(), to, code_zone()); | 791 move->AddMove(cur.source(), to, code_zone()); |
| 784 return; | 792 return; |
| 785 } | 793 } |
| 786 } | 794 } |
| 787 } | 795 } |
| 788 } | 796 } |
| 789 move->AddMove(from, to, code_zone()); | 797 move->AddMove(from, to, code_zone()); |
| 790 } | 798 } |
| 791 | 799 |
| 792 | 800 |
| 793 static bool AreUseIntervalsIntersecting(UseInterval* interval1, | 801 static bool AreUseIntervalsIntersecting(UseInterval* interval1, |
| 794 UseInterval* interval2) { | 802 UseInterval* interval2) { |
| 795 while (interval1 != NULL && interval2 != NULL) { | 803 while (interval1 != nullptr && interval2 != nullptr) { |
| 796 if (interval1->start().Value() < interval2->start().Value()) { | 804 if (interval1->start().Value() < interval2->start().Value()) { |
| 797 if (interval1->end().Value() > interval2->start().Value()) { | 805 if (interval1->end().Value() > interval2->start().Value()) { |
| 798 return true; | 806 return true; |
| 799 } | 807 } |
| 800 interval1 = interval1->next(); | 808 interval1 = interval1->next(); |
| 801 } else { | 809 } else { |
| 802 if (interval2->end().Value() > interval1->start().Value()) { | 810 if (interval2->end().Value() > interval1->start().Value()) { |
| 803 return true; | 811 return true; |
| 804 } | 812 } |
| 805 interval2 = interval2->next(); | 813 interval2 = interval2->next(); |
| 806 } | 814 } |
| 807 } | 815 } |
| 808 return false; | 816 return false; |
| 809 } | 817 } |
| 810 | 818 |
| 811 | 819 |
| 812 SpillRange::SpillRange(LiveRange* range, int id, Zone* zone) | 820 SpillRange::SpillRange(LiveRange* range, int id, Zone* zone) |
| 813 : id_(id), live_ranges_(1, zone), end_position_(range->End()) { | 821 : id_(id), live_ranges_(1, zone), end_position_(range->End()) { |
| 814 UseInterval* src = range->first_interval(); | 822 auto src = range->first_interval(); |
| 815 UseInterval* result = NULL; | 823 UseInterval* result = nullptr; |
| 816 UseInterval* node = NULL; | 824 UseInterval* node = nullptr; |
| 817 // Copy the nodes | 825 // Copy the nodes |
| 818 while (src != NULL) { | 826 while (src != nullptr) { |
| 819 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end()); | 827 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end()); |
| 820 if (result == NULL) { | 828 if (result == nullptr) { |
| 821 result = new_node; | 829 result = new_node; |
| 822 } else { | 830 } else { |
| 823 node->set_next(new_node); | 831 node->set_next(new_node); |
| 824 } | 832 } |
| 825 node = new_node; | 833 node = new_node; |
| 826 src = src->next(); | 834 src = src->next(); |
| 827 } | 835 } |
| 828 use_interval_ = result; | 836 use_interval_ = result; |
| 829 live_ranges_.Add(range, zone); | 837 live_ranges_.Add(range, zone); |
| 830 DCHECK(!range->HasSpillRange()); | 838 DCHECK(!range->HasSpillRange()); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 842 | 850 |
| 843 | 851 |
| 844 bool SpillRange::TryMerge(SpillRange* other, Zone* zone) { | 852 bool SpillRange::TryMerge(SpillRange* other, Zone* zone) { |
| 845 if (Kind() == other->Kind() && | 853 if (Kind() == other->Kind() && |
| 846 !AreUseIntervalsIntersecting(use_interval_, other->use_interval_)) { | 854 !AreUseIntervalsIntersecting(use_interval_, other->use_interval_)) { |
| 847 if (End().Value() < other->End().Value()) { | 855 if (End().Value() < other->End().Value()) { |
| 848 end_position_ = other->End(); | 856 end_position_ = other->End(); |
| 849 } | 857 } |
| 850 | 858 |
| 851 MergeDisjointIntervals(other->use_interval_, zone); | 859 MergeDisjointIntervals(other->use_interval_, zone); |
| 852 other->use_interval_ = NULL; | 860 other->use_interval_ = nullptr; |
| 853 | 861 |
| 854 for (int i = 0; i < other->live_ranges_.length(); i++) { | 862 for (int i = 0; i < other->live_ranges_.length(); i++) { |
| 855 DCHECK(other->live_ranges_.at(i)->GetSpillRange() == other); | 863 DCHECK(other->live_ranges_.at(i)->GetSpillRange() == other); |
| 856 other->live_ranges_.at(i)->SetSpillRange(this); | 864 other->live_ranges_.at(i)->SetSpillRange(this); |
| 857 } | 865 } |
| 858 | 866 |
| 859 live_ranges_.AddAll(other->live_ranges_, zone); | 867 live_ranges_.AddAll(other->live_ranges_, zone); |
| 860 other->live_ranges_.Clear(); | 868 other->live_ranges_.Clear(); |
| 861 | 869 |
| 862 return true; | 870 return true; |
| 863 } | 871 } |
| 864 return false; | 872 return false; |
| 865 } | 873 } |
| 866 | 874 |
| 867 | 875 |
| 868 void SpillRange::SetOperand(InstructionOperand* op) { | 876 void SpillRange::SetOperand(InstructionOperand* op) { |
| 869 for (int i = 0; i < live_ranges_.length(); i++) { | 877 for (int i = 0; i < live_ranges_.length(); i++) { |
| 870 DCHECK(live_ranges_.at(i)->GetSpillRange() == this); | 878 DCHECK(live_ranges_.at(i)->GetSpillRange() == this); |
| 871 live_ranges_.at(i)->CommitSpillOperand(op); | 879 live_ranges_.at(i)->CommitSpillOperand(op); |
| 872 } | 880 } |
| 873 } | 881 } |
| 874 | 882 |
| 875 | 883 |
| 876 void SpillRange::MergeDisjointIntervals(UseInterval* other, Zone* zone) { | 884 void SpillRange::MergeDisjointIntervals(UseInterval* other, Zone* zone) { |
| 877 UseInterval* tail = NULL; | 885 UseInterval* tail = nullptr; |
| 878 UseInterval* current = use_interval_; | 886 auto current = use_interval_; |
| 879 while (other != NULL) { | 887 while (other != nullptr) { |
| 880 // Make sure the 'current' list starts first | 888 // Make sure the 'current' list starts first |
| 881 if (current == NULL || current->start().Value() > other->start().Value()) { | 889 if (current == nullptr || |
| 890 current->start().Value() > other->start().Value()) { |
| 882 std::swap(current, other); | 891 std::swap(current, other); |
| 883 } | 892 } |
| 884 | 893 |
| 885 // Check disjointness | 894 // Check disjointness |
| 886 DCHECK(other == NULL || current->end().Value() <= other->start().Value()); | 895 DCHECK(other == nullptr || |
| 896 current->end().Value() <= other->start().Value()); |
| 887 | 897 |
| 888 // Append the 'current' node to the result accumulator and move forward | 898 // Append the 'current' node to the result accumulator and move forward |
| 889 if (tail == NULL) { | 899 if (tail == nullptr) { |
| 890 use_interval_ = current; | 900 use_interval_ = current; |
| 891 } else { | 901 } else { |
| 892 tail->set_next(current); | 902 tail->set_next(current); |
| 893 } | 903 } |
| 894 tail = current; | 904 tail = current; |
| 895 current = current->next(); | 905 current = current->next(); |
| 896 } | 906 } |
| 897 // Other list is empty => we are done | 907 // Other list is empty => we are done |
| 898 } | 908 } |
| 899 | 909 |
| 900 | 910 |
| 901 void RegisterAllocator::ReuseSpillSlots() { | 911 void RegisterAllocator::ReuseSpillSlots() { |
| 902 DCHECK(FLAG_turbo_reuse_spill_slots); | 912 DCHECK(FLAG_turbo_reuse_spill_slots); |
| 903 | 913 |
| 904 // Merge disjoint spill ranges | 914 // Merge disjoint spill ranges |
| 905 for (int i = 0; i < spill_ranges_.length(); i++) { | 915 for (size_t i = 0; i < spill_ranges().size(); i++) { |
| 906 SpillRange* range = spill_ranges_.at(i); | 916 auto range = spill_ranges()[i]; |
| 907 if (!range->IsEmpty()) { | 917 if (range->IsEmpty()) continue; |
| 908 for (int j = i + 1; j < spill_ranges_.length(); j++) { | 918 for (size_t j = i + 1; j < spill_ranges().size(); j++) { |
| 909 SpillRange* other = spill_ranges_.at(j); | 919 auto other = spill_ranges()[j]; |
| 910 if (!other->IsEmpty()) { | 920 if (!other->IsEmpty()) { |
| 911 range->TryMerge(spill_ranges_.at(j), local_zone()); | 921 range->TryMerge(other, local_zone()); |
| 912 } | |
| 913 } | 922 } |
| 914 } | 923 } |
| 915 } | 924 } |
| 916 | 925 |
| 917 // Allocate slots for the merged spill ranges. | 926 // Allocate slots for the merged spill ranges. |
| 918 for (int i = 0; i < spill_ranges_.length(); i++) { | 927 for (auto range : spill_ranges()) { |
| 919 auto range = spill_ranges_.at(i); | |
| 920 if (range->IsEmpty()) continue; | 928 if (range->IsEmpty()) continue; |
| 921 // Allocate a new operand referring to the spill slot. | 929 // Allocate a new operand referring to the spill slot. |
| 922 auto kind = range->Kind(); | 930 auto kind = range->Kind(); |
| 923 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); | 931 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); |
| 924 auto op_kind = kind == DOUBLE_REGISTERS | 932 auto op_kind = kind == DOUBLE_REGISTERS |
| 925 ? InstructionOperand::DOUBLE_STACK_SLOT | 933 ? InstructionOperand::DOUBLE_STACK_SLOT |
| 926 : InstructionOperand::STACK_SLOT; | 934 : InstructionOperand::STACK_SLOT; |
| 927 auto op = new (code_zone()) InstructionOperand(op_kind, index); | 935 auto op = new (code_zone()) InstructionOperand(op_kind, index); |
| 928 range->SetOperand(op); | 936 range->SetOperand(op); |
| 929 } | 937 } |
| 930 } | 938 } |
| 931 | 939 |
| 932 | 940 |
| 933 void RegisterAllocator::CommitAssignment() { | 941 void RegisterAllocator::CommitAssignment() { |
| 934 for (auto range : live_ranges()) { | 942 for (auto range : live_ranges()) { |
| 935 if (range == nullptr || range->IsEmpty()) continue; | 943 if (range == nullptr || range->IsEmpty()) continue; |
| 936 auto assigned = range->CreateAssignedOperand(code_zone()); | 944 auto assigned = range->CreateAssignedOperand(code_zone()); |
| 937 range->ConvertUsesToOperand(assigned); | 945 range->ConvertUsesToOperand(assigned); |
| 938 if (range->IsSpilled()) { | 946 if (range->IsSpilled()) { |
| 939 range->CommitSpillsAtDefinition(code(), assigned); | 947 range->CommitSpillsAtDefinition(code(), assigned); |
| 940 } | 948 } |
| 941 } | 949 } |
| 942 } | 950 } |
| 943 | 951 |
| 944 | 952 |
| 945 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { | 953 SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) { |
| 946 DCHECK(FLAG_turbo_reuse_spill_slots); | 954 DCHECK(FLAG_turbo_reuse_spill_slots); |
| 947 int spill_id = spill_ranges_.length(); | 955 int spill_id = static_cast<int>(spill_ranges().size()); |
| 948 SpillRange* spill_range = | 956 auto spill_range = |
| 949 new (local_zone()) SpillRange(range, spill_id, local_zone()); | 957 new (local_zone()) SpillRange(range, spill_id, local_zone()); |
| 950 spill_ranges_.Add(spill_range, local_zone()); | 958 spill_ranges().push_back(spill_range); |
| 951 return spill_range; | 959 return spill_range; |
| 952 } | 960 } |
| 953 | 961 |
| 954 | 962 |
| 955 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { | 963 bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) { |
| 956 DCHECK(FLAG_turbo_reuse_spill_slots); | 964 DCHECK(FLAG_turbo_reuse_spill_slots); |
| 957 DCHECK(range->HasNoSpillType()); | 965 DCHECK(range->HasNoSpillType()); |
| 958 if (range->IsChild() || !range->is_phi()) return false; | 966 if (range->IsChild() || !range->is_phi()) return false; |
| 959 | 967 |
| 960 auto lookup = phi_map_.find(range->id()); | 968 auto lookup = phi_map_.find(range->id()); |
| 961 DCHECK(lookup != phi_map_.end()); | 969 DCHECK(lookup != phi_map_.end()); |
| 962 auto phi = lookup->second.phi; | 970 auto phi = lookup->second.phi; |
| 963 auto block = lookup->second.block; | 971 auto block = lookup->second.block; |
| 964 // Count the number of spilled operands. | 972 // Count the number of spilled operands. |
| 965 size_t spilled_count = 0; | 973 size_t spilled_count = 0; |
| 966 LiveRange* first_op = nullptr; | 974 LiveRange* first_op = nullptr; |
| 967 for (size_t i = 0; i < phi->operands().size(); i++) { | 975 for (size_t i = 0; i < phi->operands().size(); i++) { |
| 968 int op = phi->operands()[i]; | 976 int op = phi->operands()[i]; |
| 969 LiveRange* op_range = LiveRangeFor(op); | 977 LiveRange* op_range = LiveRangeFor(op); |
| 970 if (op_range->GetSpillRange() == nullptr) continue; | 978 if (op_range->GetSpillRange() == nullptr) continue; |
| 971 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 979 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
| 972 LifetimePosition pred_end = | 980 auto pred_end = |
| 973 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 981 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
| 974 while (op_range != nullptr && !op_range->CanCover(pred_end)) { | 982 while (op_range != nullptr && !op_range->CanCover(pred_end)) { |
| 975 op_range = op_range->next(); | 983 op_range = op_range->next(); |
| 976 } | 984 } |
| 977 if (op_range != nullptr && op_range->IsSpilled()) { | 985 if (op_range != nullptr && op_range->IsSpilled()) { |
| 978 spilled_count++; | 986 spilled_count++; |
| 979 if (first_op == nullptr) { | 987 if (first_op == nullptr) { |
| 980 first_op = op_range->TopLevel(); | 988 first_op = op_range->TopLevel(); |
| 981 } | 989 } |
| 982 } | 990 } |
| 983 } | 991 } |
| 984 | 992 |
| 985 // Only continue if more than half of the operands are spilled. | 993 // Only continue if more than half of the operands are spilled. |
| 986 if (spilled_count * 2 <= phi->operands().size()) { | 994 if (spilled_count * 2 <= phi->operands().size()) { |
| 987 return false; | 995 return false; |
| 988 } | 996 } |
| 989 | 997 |
| 990 // Try to merge the spilled operands and count the number of merged spilled | 998 // Try to merge the spilled operands and count the number of merged spilled |
| 991 // operands. | 999 // operands. |
| 992 DCHECK(first_op != NULL); | 1000 DCHECK(first_op != nullptr); |
| 993 SpillRange* first_op_spill = first_op->GetSpillRange(); | 1001 auto first_op_spill = first_op->GetSpillRange(); |
| 994 size_t num_merged = 1; | 1002 size_t num_merged = 1; |
| 995 for (size_t i = 1; i < phi->operands().size(); i++) { | 1003 for (size_t i = 1; i < phi->operands().size(); i++) { |
| 996 int op = phi->operands()[i]; | 1004 int op = phi->operands()[i]; |
| 997 LiveRange* op_range = LiveRangeFor(op); | 1005 auto op_range = LiveRangeFor(op); |
| 998 SpillRange* op_spill = op_range->GetSpillRange(); | 1006 auto op_spill = op_range->GetSpillRange(); |
| 999 if (op_spill != NULL) { | 1007 if (op_spill != nullptr) { |
| 1000 if (op_spill->id() == first_op_spill->id() || | 1008 if (op_spill->id() == first_op_spill->id() || |
| 1001 first_op_spill->TryMerge(op_spill, local_zone())) { | 1009 first_op_spill->TryMerge(op_spill, local_zone())) { |
| 1002 num_merged++; | 1010 num_merged++; |
| 1003 } | 1011 } |
| 1004 } | 1012 } |
| 1005 } | 1013 } |
| 1006 | 1014 |
| 1007 // Only continue if enough operands could be merged to the | 1015 // Only continue if enough operands could be merged to the |
| 1008 // same spill slot. | 1016 // same spill slot. |
| 1009 if (num_merged * 2 <= phi->operands().size() || | 1017 if (num_merged * 2 <= phi->operands().size() || |
| 1010 AreUseIntervalsIntersecting(first_op_spill->interval(), | 1018 AreUseIntervalsIntersecting(first_op_spill->interval(), |
| 1011 range->first_interval())) { | 1019 range->first_interval())) { |
| 1012 return false; | 1020 return false; |
| 1013 } | 1021 } |
| 1014 | 1022 |
| 1015 // If the range does not need register soon, spill it to the merged | 1023 // If the range does not need register soon, spill it to the merged |
| 1016 // spill range. | 1024 // spill range. |
| 1017 LifetimePosition next_pos = range->Start(); | 1025 auto next_pos = range->Start(); |
| 1018 if (code()->IsGapAt(next_pos.InstructionIndex())) { | 1026 if (code()->IsGapAt(next_pos.InstructionIndex())) { |
| 1019 next_pos = next_pos.NextInstruction(); | 1027 next_pos = next_pos.NextInstruction(); |
| 1020 } | 1028 } |
| 1021 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | 1029 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1022 if (pos == NULL) { | 1030 if (pos == nullptr) { |
| 1023 SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); | 1031 auto spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); |
| 1024 CHECK(first_op_spill->TryMerge(spill_range, local_zone())); | 1032 CHECK(first_op_spill->TryMerge(spill_range, local_zone())); |
| 1025 Spill(range); | 1033 Spill(range); |
| 1026 return true; | 1034 return true; |
| 1027 } else if (pos->pos().Value() > range->Start().NextInstruction().Value()) { | 1035 } else if (pos->pos().Value() > range->Start().NextInstruction().Value()) { |
| 1028 SpillRange* spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); | 1036 auto spill_range = AssignSpillRangeToLiveRange(range->TopLevel()); |
| 1029 CHECK(first_op_spill->TryMerge(spill_range, local_zone())); | 1037 CHECK(first_op_spill->TryMerge(spill_range, local_zone())); |
| 1030 SpillBetween(range, range->Start(), pos->pos()); | 1038 SpillBetween(range, range->Start(), pos->pos()); |
| 1031 if (!AllocationOk()) return false; | 1039 if (!AllocationOk()) return false; |
| 1032 DCHECK(UnhandledIsSorted()); | 1040 DCHECK(UnhandledIsSorted()); |
| 1033 return true; | 1041 return true; |
| 1034 } | 1042 } |
| 1035 return false; | 1043 return false; |
| 1036 } | 1044 } |
| 1037 | 1045 |
| 1038 | 1046 |
| 1039 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) { | 1047 void RegisterAllocator::MeetRegisterConstraints(const InstructionBlock* block) { |
| 1040 int start = block->first_instruction_index(); | 1048 int start = block->first_instruction_index(); |
| 1041 int end = block->last_instruction_index(); | 1049 int end = block->last_instruction_index(); |
| 1042 DCHECK_NE(-1, start); | 1050 DCHECK_NE(-1, start); |
| 1043 for (int i = start; i <= end; ++i) { | 1051 for (int i = start; i <= end; ++i) { |
| 1044 if (code()->IsGapAt(i)) { | 1052 if (code()->IsGapAt(i)) { |
| 1045 Instruction* instr = NULL; | 1053 Instruction* instr = nullptr; |
| 1046 Instruction* prev_instr = NULL; | 1054 Instruction* prev_instr = nullptr; |
| 1047 if (i < end) instr = InstructionAt(i + 1); | 1055 if (i < end) instr = InstructionAt(i + 1); |
| 1048 if (i > start) prev_instr = InstructionAt(i - 1); | 1056 if (i > start) prev_instr = InstructionAt(i - 1); |
| 1049 MeetConstraintsBetween(prev_instr, instr, i); | 1057 MeetConstraintsBetween(prev_instr, instr, i); |
| 1050 if (!AllocationOk()) return; | 1058 if (!AllocationOk()) return; |
| 1051 } | 1059 } |
| 1052 } | 1060 } |
| 1053 | 1061 |
| 1054 // Meet register constraints for the instruction in the end. | 1062 // Meet register constraints for the instruction in the end. |
| 1055 if (!code()->IsGapAt(end)) { | 1063 if (!code()->IsGapAt(end)) { |
| 1056 MeetRegisterConstraintsForLastInstructionInBlock(block); | 1064 MeetRegisterConstraintsForLastInstructionInBlock(block); |
| 1057 } | 1065 } |
| 1058 } | 1066 } |
| 1059 | 1067 |
| 1060 | 1068 |
| 1061 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( | 1069 void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock( |
| 1062 const InstructionBlock* block) { | 1070 const InstructionBlock* block) { |
| 1063 int end = block->last_instruction_index(); | 1071 int end = block->last_instruction_index(); |
| 1064 Instruction* last_instruction = InstructionAt(end); | 1072 auto last_instruction = InstructionAt(end); |
| 1065 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { | 1073 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { |
| 1066 InstructionOperand* output_operand = last_instruction->OutputAt(i); | 1074 auto output_operand = last_instruction->OutputAt(i); |
| 1067 DCHECK(!output_operand->IsConstant()); | 1075 DCHECK(!output_operand->IsConstant()); |
| 1068 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand); | 1076 auto output = UnallocatedOperand::cast(output_operand); |
| 1069 int output_vreg = output->virtual_register(); | 1077 int output_vreg = output->virtual_register(); |
| 1070 LiveRange* range = LiveRangeFor(output_vreg); | 1078 auto range = LiveRangeFor(output_vreg); |
| 1071 bool assigned = false; | 1079 bool assigned = false; |
| 1072 if (output->HasFixedPolicy()) { | 1080 if (output->HasFixedPolicy()) { |
| 1073 AllocateFixed(output, -1, false); | 1081 AllocateFixed(output, -1, false); |
| 1074 // This value is produced on the stack, we never need to spill it. | 1082 // This value is produced on the stack, we never need to spill it. |
| 1075 if (output->IsStackSlot()) { | 1083 if (output->IsStackSlot()) { |
| 1076 DCHECK(output->index() < 0); | 1084 DCHECK(output->index() < 0); |
| 1077 range->SetSpillOperand(output); | 1085 range->SetSpillOperand(output); |
| 1078 range->SetSpillStartIndex(end); | 1086 range->SetSpillStartIndex(end); |
| 1079 assigned = true; | 1087 assigned = true; |
| 1080 } | 1088 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1104 range->SetSpillStartIndex(gap_index); | 1112 range->SetSpillStartIndex(gap_index); |
| 1105 } | 1113 } |
| 1106 } | 1114 } |
| 1107 } | 1115 } |
| 1108 } | 1116 } |
| 1109 | 1117 |
| 1110 | 1118 |
| 1111 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, | 1119 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, |
| 1112 Instruction* second, | 1120 Instruction* second, |
| 1113 int gap_index) { | 1121 int gap_index) { |
| 1114 if (first != NULL) { | 1122 if (first != nullptr) { |
| 1115 // Handle fixed temporaries. | 1123 // Handle fixed temporaries. |
| 1116 for (size_t i = 0; i < first->TempCount(); i++) { | 1124 for (size_t i = 0; i < first->TempCount(); i++) { |
| 1117 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i)); | 1125 auto temp = UnallocatedOperand::cast(first->TempAt(i)); |
| 1118 if (temp->HasFixedPolicy()) { | 1126 if (temp->HasFixedPolicy()) { |
| 1119 AllocateFixed(temp, gap_index - 1, false); | 1127 AllocateFixed(temp, gap_index - 1, false); |
| 1120 } | 1128 } |
| 1121 } | 1129 } |
| 1122 | 1130 |
| 1123 // Handle constant/fixed output operands. | 1131 // Handle constant/fixed output operands. |
| 1124 for (size_t i = 0; i < first->OutputCount(); i++) { | 1132 for (size_t i = 0; i < first->OutputCount(); i++) { |
| 1125 InstructionOperand* output = first->OutputAt(i); | 1133 InstructionOperand* output = first->OutputAt(i); |
| 1126 if (output->IsConstant()) { | 1134 if (output->IsConstant()) { |
| 1127 int output_vreg = output->index(); | 1135 int output_vreg = output->index(); |
| 1128 LiveRange* range = LiveRangeFor(output_vreg); | 1136 auto range = LiveRangeFor(output_vreg); |
| 1129 range->SetSpillStartIndex(gap_index - 1); | 1137 range->SetSpillStartIndex(gap_index - 1); |
| 1130 range->SetSpillOperand(output); | 1138 range->SetSpillOperand(output); |
| 1131 } else { | 1139 } else { |
| 1132 UnallocatedOperand* first_output = UnallocatedOperand::cast(output); | 1140 auto first_output = UnallocatedOperand::cast(output); |
| 1133 LiveRange* range = LiveRangeFor(first_output->virtual_register()); | 1141 auto range = LiveRangeFor(first_output->virtual_register()); |
| 1134 bool assigned = false; | 1142 bool assigned = false; |
| 1135 if (first_output->HasFixedPolicy()) { | 1143 if (first_output->HasFixedPolicy()) { |
| 1136 UnallocatedOperand* output_copy = | 1144 auto output_copy = first_output->CopyUnconstrained(code_zone()); |
| 1137 first_output->CopyUnconstrained(code_zone()); | |
| 1138 bool is_tagged = HasTaggedValue(first_output->virtual_register()); | 1145 bool is_tagged = HasTaggedValue(first_output->virtual_register()); |
| 1139 AllocateFixed(first_output, gap_index, is_tagged); | 1146 AllocateFixed(first_output, gap_index, is_tagged); |
| 1140 | 1147 |
| 1141 // This value is produced on the stack, we never need to spill it. | 1148 // This value is produced on the stack, we never need to spill it. |
| 1142 if (first_output->IsStackSlot()) { | 1149 if (first_output->IsStackSlot()) { |
| 1143 DCHECK(first_output->index() < 0); | 1150 DCHECK(first_output->index() < 0); |
| 1144 range->SetSpillOperand(first_output); | 1151 range->SetSpillOperand(first_output); |
| 1145 range->SetSpillStartIndex(gap_index - 1); | 1152 range->SetSpillStartIndex(gap_index - 1); |
| 1146 assigned = true; | 1153 assigned = true; |
| 1147 } | 1154 } |
| 1148 code()->AddGapMove(gap_index, first_output, output_copy); | 1155 code()->AddGapMove(gap_index, first_output, output_copy); |
| 1149 } | 1156 } |
| 1150 | 1157 |
| 1151 // Make sure we add a gap move for spilling (if we have not done | 1158 // Make sure we add a gap move for spilling (if we have not done |
| 1152 // so already). | 1159 // so already). |
| 1153 if (!assigned) { | 1160 if (!assigned) { |
| 1154 range->SpillAtDefinition(local_zone(), gap_index, first_output); | 1161 range->SpillAtDefinition(local_zone(), gap_index, first_output); |
| 1155 range->SetSpillStartIndex(gap_index); | 1162 range->SetSpillStartIndex(gap_index); |
| 1156 } | 1163 } |
| 1157 } | 1164 } |
| 1158 } | 1165 } |
| 1159 } | 1166 } |
| 1160 | 1167 |
| 1161 if (second != NULL) { | 1168 if (second != nullptr) { |
| 1162 // Handle fixed input operands of second instruction. | 1169 // Handle fixed input operands of second instruction. |
| 1163 for (size_t i = 0; i < second->InputCount(); i++) { | 1170 for (size_t i = 0; i < second->InputCount(); i++) { |
| 1164 InstructionOperand* input = second->InputAt(i); | 1171 auto input = second->InputAt(i); |
| 1165 if (input->IsImmediate()) continue; // Ignore immediates. | 1172 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1166 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input); | 1173 auto cur_input = UnallocatedOperand::cast(input); |
| 1167 if (cur_input->HasFixedPolicy()) { | 1174 if (cur_input->HasFixedPolicy()) { |
| 1168 UnallocatedOperand* input_copy = | 1175 auto input_copy = cur_input->CopyUnconstrained(code_zone()); |
| 1169 cur_input->CopyUnconstrained(code_zone()); | |
| 1170 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); | 1176 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); |
| 1171 AllocateFixed(cur_input, gap_index + 1, is_tagged); | 1177 AllocateFixed(cur_input, gap_index + 1, is_tagged); |
| 1172 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 1178 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 1173 } | 1179 } |
| 1174 } | 1180 } |
| 1175 | 1181 |
| 1176 // Handle "output same as input" for second instruction. | 1182 // Handle "output same as input" for second instruction. |
| 1177 for (size_t i = 0; i < second->OutputCount(); i++) { | 1183 for (size_t i = 0; i < second->OutputCount(); i++) { |
| 1178 InstructionOperand* output = second->OutputAt(i); | 1184 auto output = second->OutputAt(i); |
| 1179 if (!output->IsUnallocated()) continue; | 1185 if (!output->IsUnallocated()) continue; |
| 1180 UnallocatedOperand* second_output = UnallocatedOperand::cast(output); | 1186 auto second_output = UnallocatedOperand::cast(output); |
| 1181 if (second_output->HasSameAsInputPolicy()) { | 1187 if (second_output->HasSameAsInputPolicy()) { |
| 1182 DCHECK(i == 0); // Only valid for first output. | 1188 DCHECK(i == 0); // Only valid for first output. |
| 1183 UnallocatedOperand* cur_input = | 1189 UnallocatedOperand* cur_input = |
| 1184 UnallocatedOperand::cast(second->InputAt(0)); | 1190 UnallocatedOperand::cast(second->InputAt(0)); |
| 1185 int output_vreg = second_output->virtual_register(); | 1191 int output_vreg = second_output->virtual_register(); |
| 1186 int input_vreg = cur_input->virtual_register(); | 1192 int input_vreg = cur_input->virtual_register(); |
| 1187 | 1193 |
| 1188 UnallocatedOperand* input_copy = | 1194 auto input_copy = cur_input->CopyUnconstrained(code_zone()); |
| 1189 cur_input->CopyUnconstrained(code_zone()); | |
| 1190 cur_input->set_virtual_register(second_output->virtual_register()); | 1195 cur_input->set_virtual_register(second_output->virtual_register()); |
| 1191 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 1196 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 1192 | 1197 |
| 1193 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { | 1198 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { |
| 1194 int index = gap_index + 1; | 1199 int index = gap_index + 1; |
| 1195 Instruction* instr = InstructionAt(index); | 1200 Instruction* instr = InstructionAt(index); |
| 1196 if (instr->HasPointerMap()) { | 1201 if (instr->HasPointerMap()) { |
| 1197 instr->pointer_map()->RecordPointer(input_copy, code_zone()); | 1202 instr->pointer_map()->RecordPointer(input_copy, code_zone()); |
| 1198 } | 1203 } |
| 1199 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { | 1204 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { |
| 1200 // The input is assumed to immediately have a tagged representation, | 1205 // The input is assumed to immediately have a tagged representation, |
| 1201 // before the pointer map can be used. I.e. the pointer map at the | 1206 // before the pointer map can be used. I.e. the pointer map at the |
| 1202 // instruction will include the output operand (whose value at the | 1207 // instruction will include the output operand (whose value at the |
| 1203 // beginning of the instruction is equal to the input operand). If | 1208 // beginning of the instruction is equal to the input operand). If |
| 1204 // this is not desired, then the pointer map at this instruction needs | 1209 // this is not desired, then the pointer map at this instruction needs |
| 1205 // to be adjusted manually. | 1210 // to be adjusted manually. |
| 1206 } | 1211 } |
| 1207 } | 1212 } |
| 1208 } | 1213 } |
| 1209 } | 1214 } |
| 1210 } | 1215 } |
| 1211 | 1216 |
| 1212 | 1217 |
| 1213 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { | 1218 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) { |
| 1214 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1219 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1215 InstructionOperand* output = instr->OutputAt(i); | 1220 auto output = instr->OutputAt(i); |
| 1216 if (output->IsRegister() && output->index() == index) return true; | 1221 if (output->IsRegister() && output->index() == index) return true; |
| 1217 } | 1222 } |
| 1218 return false; | 1223 return false; |
| 1219 } | 1224 } |
| 1220 | 1225 |
| 1221 | 1226 |
| 1222 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, | 1227 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr, |
| 1223 int index) { | 1228 int index) { |
| 1224 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1229 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1225 InstructionOperand* output = instr->OutputAt(i); | 1230 auto output = instr->OutputAt(i); |
| 1226 if (output->IsDoubleRegister() && output->index() == index) return true; | 1231 if (output->IsDoubleRegister() && output->index() == index) return true; |
| 1227 } | 1232 } |
| 1228 return false; | 1233 return false; |
| 1229 } | 1234 } |
| 1230 | 1235 |
| 1231 | 1236 |
| 1232 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, | 1237 void RegisterAllocator::ProcessInstructions(const InstructionBlock* block, |
| 1233 BitVector* live) { | 1238 BitVector* live) { |
| 1234 int block_start = block->first_instruction_index(); | 1239 int block_start = block->first_instruction_index(); |
| 1235 | 1240 auto block_start_position = |
| 1236 LifetimePosition block_start_position = | |
| 1237 LifetimePosition::FromInstructionIndex(block_start); | 1241 LifetimePosition::FromInstructionIndex(block_start); |
| 1238 | 1242 |
| 1239 for (int index = block->last_instruction_index(); index >= block_start; | 1243 for (int index = block->last_instruction_index(); index >= block_start; |
| 1240 index--) { | 1244 index--) { |
| 1241 LifetimePosition curr_position = | 1245 auto curr_position = LifetimePosition::FromInstructionIndex(index); |
| 1242 LifetimePosition::FromInstructionIndex(index); | 1246 auto instr = InstructionAt(index); |
| 1243 | 1247 DCHECK(instr != nullptr); |
| 1244 Instruction* instr = InstructionAt(index); | |
| 1245 DCHECK(instr != NULL); | |
| 1246 if (instr->IsGapMoves()) { | 1248 if (instr->IsGapMoves()) { |
| 1247 // Process the moves of the gap instruction, making their sources live. | 1249 // Process the moves of the gap instruction, making their sources live. |
| 1248 GapInstruction* gap = code()->GapAt(index); | 1250 auto gap = code()->GapAt(index); |
| 1249 | 1251 |
| 1250 // TODO(titzer): no need to create the parallel move if it doesn't exist. | 1252 // TODO(titzer): no need to create the parallel move if it doesn't exist. |
| 1251 ParallelMove* move = | 1253 auto move = |
| 1252 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); | 1254 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 1253 const ZoneList<MoveOperands>* move_operands = move->move_operands(); | 1255 const ZoneList<MoveOperands>* move_operands = move->move_operands(); |
| 1254 for (int i = 0; i < move_operands->length(); ++i) { | 1256 for (int i = 0; i < move_operands->length(); ++i) { |
| 1255 MoveOperands* cur = &move_operands->at(i); | 1257 auto cur = &move_operands->at(i); |
| 1256 InstructionOperand* from = cur->source(); | 1258 auto from = cur->source(); |
| 1257 InstructionOperand* to = cur->destination(); | 1259 auto to = cur->destination(); |
| 1258 InstructionOperand* hint = to; | 1260 auto hint = to; |
| 1259 if (to->IsUnallocated()) { | 1261 if (to->IsUnallocated()) { |
| 1260 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); | 1262 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); |
| 1261 LiveRange* to_range = LiveRangeFor(to_vreg); | 1263 auto to_range = LiveRangeFor(to_vreg); |
| 1262 if (to_range->is_phi()) { | 1264 if (to_range->is_phi()) { |
| 1263 DCHECK(!FLAG_turbo_delay_ssa_decon); | 1265 DCHECK(!FLAG_turbo_delay_ssa_decon); |
| 1264 if (to_range->is_non_loop_phi()) { | 1266 if (to_range->is_non_loop_phi()) { |
| 1265 hint = to_range->current_hint_operand(); | 1267 hint = to_range->current_hint_operand(); |
| 1266 } | 1268 } |
| 1267 } else { | 1269 } else { |
| 1268 if (live->Contains(to_vreg)) { | 1270 if (live->Contains(to_vreg)) { |
| 1269 Define(curr_position, to, from); | 1271 Define(curr_position, to, from); |
| 1270 live->Remove(to_vreg); | 1272 live->Remove(to_vreg); |
| 1271 } else { | 1273 } else { |
| 1272 cur->Eliminate(); | 1274 cur->Eliminate(); |
| 1273 continue; | 1275 continue; |
| 1274 } | 1276 } |
| 1275 } | 1277 } |
| 1276 } else { | 1278 } else { |
| 1277 Define(curr_position, to, from); | 1279 Define(curr_position, to, from); |
| 1278 } | 1280 } |
| 1279 Use(block_start_position, curr_position, from, hint); | 1281 Use(block_start_position, curr_position, from, hint); |
| 1280 if (from->IsUnallocated()) { | 1282 if (from->IsUnallocated()) { |
| 1281 live->Add(UnallocatedOperand::cast(from)->virtual_register()); | 1283 live->Add(UnallocatedOperand::cast(from)->virtual_register()); |
| 1282 } | 1284 } |
| 1283 } | 1285 } |
| 1284 } else { | 1286 } else { |
| 1285 // Process output, inputs, and temps of this non-gap instruction. | 1287 // Process output, inputs, and temps of this non-gap instruction. |
| 1286 for (size_t i = 0; i < instr->OutputCount(); i++) { | 1288 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 1287 InstructionOperand* output = instr->OutputAt(i); | 1289 auto output = instr->OutputAt(i); |
| 1288 if (output->IsUnallocated()) { | 1290 if (output->IsUnallocated()) { |
| 1289 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); | 1291 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); |
| 1290 live->Remove(out_vreg); | 1292 live->Remove(out_vreg); |
| 1291 } else if (output->IsConstant()) { | 1293 } else if (output->IsConstant()) { |
| 1292 int out_vreg = output->index(); | 1294 int out_vreg = output->index(); |
| 1293 live->Remove(out_vreg); | 1295 live->Remove(out_vreg); |
| 1294 } | 1296 } |
| 1295 Define(curr_position, output, NULL); | 1297 Define(curr_position, output, nullptr); |
| 1296 } | 1298 } |
| 1297 | 1299 |
| 1298 if (instr->ClobbersRegisters()) { | 1300 if (instr->ClobbersRegisters()) { |
| 1299 for (int i = 0; i < config()->num_general_registers(); ++i) { | 1301 for (int i = 0; i < config()->num_general_registers(); ++i) { |
| 1300 if (!IsOutputRegisterOf(instr, i)) { | 1302 if (!IsOutputRegisterOf(instr, i)) { |
| 1301 LiveRange* range = FixedLiveRangeFor(i); | 1303 auto range = FixedLiveRangeFor(i); |
| 1302 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1304 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), |
| 1303 local_zone()); | 1305 local_zone()); |
| 1304 } | 1306 } |
| 1305 } | 1307 } |
| 1306 } | 1308 } |
| 1307 | 1309 |
| 1308 if (instr->ClobbersDoubleRegisters()) { | 1310 if (instr->ClobbersDoubleRegisters()) { |
| 1309 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { | 1311 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { |
| 1310 if (!IsOutputDoubleRegisterOf(instr, i)) { | 1312 if (!IsOutputDoubleRegisterOf(instr, i)) { |
| 1311 LiveRange* range = FixedDoubleLiveRangeFor(i); | 1313 auto range = FixedDoubleLiveRangeFor(i); |
| 1312 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), | 1314 range->AddUseInterval(curr_position, curr_position.InstructionEnd(), |
| 1313 local_zone()); | 1315 local_zone()); |
| 1314 } | 1316 } |
| 1315 } | 1317 } |
| 1316 } | 1318 } |
| 1317 | 1319 |
| 1318 for (size_t i = 0; i < instr->InputCount(); i++) { | 1320 for (size_t i = 0; i < instr->InputCount(); i++) { |
| 1319 InstructionOperand* input = instr->InputAt(i); | 1321 auto input = instr->InputAt(i); |
| 1320 if (input->IsImmediate()) continue; // Ignore immediates. | 1322 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1321 LifetimePosition use_pos; | 1323 LifetimePosition use_pos; |
| 1322 if (input->IsUnallocated() && | 1324 if (input->IsUnallocated() && |
| 1323 UnallocatedOperand::cast(input)->IsUsedAtStart()) { | 1325 UnallocatedOperand::cast(input)->IsUsedAtStart()) { |
| 1324 use_pos = curr_position; | 1326 use_pos = curr_position; |
| 1325 } else { | 1327 } else { |
| 1326 use_pos = curr_position.InstructionEnd(); | 1328 use_pos = curr_position.InstructionEnd(); |
| 1327 } | 1329 } |
| 1328 | 1330 |
| 1329 Use(block_start_position, use_pos, input, NULL); | 1331 Use(block_start_position, use_pos, input, nullptr); |
| 1330 if (input->IsUnallocated()) { | 1332 if (input->IsUnallocated()) { |
| 1331 live->Add(UnallocatedOperand::cast(input)->virtual_register()); | 1333 live->Add(UnallocatedOperand::cast(input)->virtual_register()); |
| 1332 } | 1334 } |
| 1333 } | 1335 } |
| 1334 | 1336 |
| 1335 for (size_t i = 0; i < instr->TempCount(); i++) { | 1337 for (size_t i = 0; i < instr->TempCount(); i++) { |
| 1336 InstructionOperand* temp = instr->TempAt(i); | 1338 auto temp = instr->TempAt(i); |
| 1337 if (instr->ClobbersTemps()) { | 1339 if (instr->ClobbersTemps()) { |
| 1338 if (temp->IsRegister()) continue; | 1340 if (temp->IsRegister()) continue; |
| 1339 if (temp->IsUnallocated()) { | 1341 if (temp->IsUnallocated()) { |
| 1340 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); | 1342 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); |
| 1341 if (temp_unalloc->HasFixedPolicy()) { | 1343 if (temp_unalloc->HasFixedPolicy()) { |
| 1342 continue; | 1344 continue; |
| 1343 } | 1345 } |
| 1344 } | 1346 } |
| 1345 } | 1347 } |
| 1346 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); | 1348 Use(block_start_position, curr_position.InstructionEnd(), temp, |
| 1347 Define(curr_position, temp, NULL); | 1349 nullptr); |
| 1350 Define(curr_position, temp, nullptr); |
| 1348 } | 1351 } |
| 1349 } | 1352 } |
| 1350 } | 1353 } |
| 1351 } | 1354 } |
| 1352 | 1355 |
| 1353 | 1356 |
| 1354 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { | 1357 void RegisterAllocator::ResolvePhis(const InstructionBlock* block) { |
| 1355 for (auto phi : block->phis()) { | 1358 for (auto phi : block->phis()) { |
| 1356 if (FLAG_turbo_reuse_spill_slots) { | 1359 if (FLAG_turbo_reuse_spill_slots) { |
| 1357 auto res = phi_map_.insert( | 1360 auto res = phi_map_.insert( |
| 1358 std::make_pair(phi->virtual_register(), PhiMapValue(phi, block))); | 1361 std::make_pair(phi->virtual_register(), PhiMapValue(phi, block))); |
| 1359 DCHECK(res.second); | 1362 DCHECK(res.second); |
| 1360 USE(res); | 1363 USE(res); |
| 1361 } | 1364 } |
| 1362 auto output = phi->output(); | 1365 auto output = phi->output(); |
| 1363 int phi_vreg = phi->virtual_register(); | 1366 int phi_vreg = phi->virtual_register(); |
| 1364 if (!FLAG_turbo_delay_ssa_decon) { | 1367 if (!FLAG_turbo_delay_ssa_decon) { |
| 1365 for (size_t i = 0; i < phi->operands().size(); ++i) { | 1368 for (size_t i = 0; i < phi->operands().size(); ++i) { |
| 1366 InstructionBlock* cur_block = | 1369 InstructionBlock* cur_block = |
| 1367 code()->InstructionBlockAt(block->predecessors()[i]); | 1370 code()->InstructionBlockAt(block->predecessors()[i]); |
| 1368 // The gap move must be added without any special processing as in | 1371 // The gap move must be added without any special processing as in |
| 1369 // the AddConstraintsGapMove. | 1372 // the AddConstraintsGapMove. |
| 1370 code()->AddGapMove(cur_block->last_instruction_index() - 1, | 1373 code()->AddGapMove(cur_block->last_instruction_index() - 1, |
| 1371 phi->inputs()[i], output); | 1374 phi->inputs()[i], output); |
| 1372 DCHECK(!InstructionAt(cur_block->last_instruction_index()) | 1375 DCHECK(!InstructionAt(cur_block->last_instruction_index()) |
| 1373 ->HasPointerMap()); | 1376 ->HasPointerMap()); |
| 1374 } | 1377 } |
| 1375 } | 1378 } |
| 1376 LiveRange* live_range = LiveRangeFor(phi_vreg); | 1379 auto live_range = LiveRangeFor(phi_vreg); |
| 1377 int gap_index = block->first_instruction_index(); | 1380 int gap_index = block->first_instruction_index(); |
| 1378 live_range->SpillAtDefinition(local_zone(), gap_index, output); | 1381 live_range->SpillAtDefinition(local_zone(), gap_index, output); |
| 1379 live_range->SetSpillStartIndex(gap_index); | 1382 live_range->SetSpillStartIndex(gap_index); |
| 1380 // We use the phi-ness of some nodes in some later heuristics. | 1383 // We use the phi-ness of some nodes in some later heuristics. |
| 1381 live_range->set_is_phi(true); | 1384 live_range->set_is_phi(true); |
| 1382 if (!block->IsLoopHeader()) { | 1385 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); |
| 1383 live_range->set_is_non_loop_phi(true); | |
| 1384 } | |
| 1385 } | 1386 } |
| 1386 } | 1387 } |
| 1387 | 1388 |
| 1388 | 1389 |
| 1389 void RegisterAllocator::MeetRegisterConstraints() { | 1390 void RegisterAllocator::MeetRegisterConstraints() { |
| 1390 for (auto block : code()->instruction_blocks()) { | 1391 for (auto block : code()->instruction_blocks()) { |
| 1391 MeetRegisterConstraints(block); | 1392 MeetRegisterConstraints(block); |
| 1392 } | 1393 } |
| 1393 } | 1394 } |
| 1394 | 1395 |
| 1395 | 1396 |
| 1396 void RegisterAllocator::ResolvePhis() { | 1397 void RegisterAllocator::ResolvePhis() { |
| 1397 // Process the blocks in reverse order. | 1398 // Process the blocks in reverse order. |
| 1398 for (auto i = code()->instruction_blocks().rbegin(); | 1399 for (auto i = code()->instruction_blocks().rbegin(); |
| 1399 i != code()->instruction_blocks().rend(); ++i) { | 1400 i != code()->instruction_blocks().rend(); ++i) { |
| 1400 ResolvePhis(*i); | 1401 ResolvePhis(*i); |
| 1401 } | 1402 } |
| 1402 } | 1403 } |
| 1403 | 1404 |
| 1404 | 1405 |
| 1405 ParallelMove* RegisterAllocator::GetConnectingParallelMove( | 1406 ParallelMove* RegisterAllocator::GetConnectingParallelMove( |
| 1406 LifetimePosition pos) { | 1407 LifetimePosition pos) { |
| 1407 int index = pos.InstructionIndex(); | 1408 int index = pos.InstructionIndex(); |
| 1408 if (code()->IsGapAt(index)) { | 1409 if (code()->IsGapAt(index)) { |
| 1409 GapInstruction* gap = code()->GapAt(index); | 1410 auto gap = code()->GapAt(index); |
| 1410 return gap->GetOrCreateParallelMove( | 1411 return gap->GetOrCreateParallelMove( |
| 1411 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, | 1412 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, |
| 1412 code_zone()); | 1413 code_zone()); |
| 1413 } | 1414 } |
| 1414 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); | 1415 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); |
| 1415 return code()->GapAt(gap_pos)->GetOrCreateParallelMove( | 1416 return code()->GapAt(gap_pos)->GetOrCreateParallelMove( |
| 1416 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE, | 1417 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE, |
| 1417 code_zone()); | 1418 code_zone()); |
| 1418 } | 1419 } |
| 1419 | 1420 |
| 1420 | 1421 |
| 1421 const InstructionBlock* RegisterAllocator::GetInstructionBlock( | 1422 const InstructionBlock* RegisterAllocator::GetInstructionBlock( |
| 1422 LifetimePosition pos) { | 1423 LifetimePosition pos) { |
| 1423 return code()->GetInstructionBlock(pos.InstructionIndex()); | 1424 return code()->GetInstructionBlock(pos.InstructionIndex()); |
| 1424 } | 1425 } |
| 1425 | 1426 |
| 1426 | 1427 |
| 1427 void RegisterAllocator::ConnectRanges() { | 1428 void RegisterAllocator::ConnectRanges() { |
| 1428 for (size_t i = 0; i < live_ranges().size(); ++i) { | 1429 for (auto first_range : live_ranges()) { |
| 1429 LiveRange* first_range = live_ranges().at(i); | 1430 if (first_range == nullptr || first_range->IsChild()) continue; |
| 1430 if (first_range == NULL || first_range->parent() != NULL) continue; | 1431 auto second_range = first_range->next(); |
| 1431 | 1432 while (second_range != nullptr) { |
| 1432 LiveRange* second_range = first_range->next(); | 1433 auto pos = second_range->Start(); |
| 1433 while (second_range != NULL) { | |
| 1434 LifetimePosition pos = second_range->Start(); | |
| 1435 | |
| 1436 if (!second_range->IsSpilled()) { | 1434 if (!second_range->IsSpilled()) { |
| 1437 // Add gap move if the two live ranges touch and there is no block | 1435 // Add gap move if the two live ranges touch and there is no block |
| 1438 // boundary. | 1436 // boundary. |
| 1439 if (first_range->End().Value() == pos.Value()) { | 1437 if (first_range->End().Value() == pos.Value()) { |
| 1440 bool should_insert = true; | 1438 bool should_insert = true; |
| 1441 if (IsBlockBoundary(pos)) { | 1439 if (IsBlockBoundary(pos)) { |
| 1442 should_insert = | 1440 should_insert = |
| 1443 CanEagerlyResolveControlFlow(GetInstructionBlock(pos)); | 1441 CanEagerlyResolveControlFlow(GetInstructionBlock(pos)); |
| 1444 } | 1442 } |
| 1445 if (should_insert) { | 1443 if (should_insert) { |
| 1446 ParallelMove* move = GetConnectingParallelMove(pos); | 1444 auto move = GetConnectingParallelMove(pos); |
| 1447 InstructionOperand* prev_operand = | 1445 auto prev_operand = first_range->CreateAssignedOperand(code_zone()); |
| 1448 first_range->CreateAssignedOperand(code_zone()); | 1446 auto cur_operand = second_range->CreateAssignedOperand(code_zone()); |
| 1449 InstructionOperand* cur_operand = | |
| 1450 second_range->CreateAssignedOperand(code_zone()); | |
| 1451 move->AddMove(prev_operand, cur_operand, code_zone()); | 1447 move->AddMove(prev_operand, cur_operand, code_zone()); |
| 1452 } | 1448 } |
| 1453 } | 1449 } |
| 1454 } | 1450 } |
| 1455 | |
| 1456 first_range = second_range; | 1451 first_range = second_range; |
| 1457 second_range = second_range->next(); | 1452 second_range = second_range->next(); |
| 1458 } | 1453 } |
| 1459 } | 1454 } |
| 1460 } | 1455 } |
| 1461 | 1456 |
| 1462 | 1457 |
| 1463 bool RegisterAllocator::CanEagerlyResolveControlFlow( | 1458 bool RegisterAllocator::CanEagerlyResolveControlFlow( |
| 1464 const InstructionBlock* block) const { | 1459 const InstructionBlock* block) const { |
| 1465 if (block->PredecessorCount() != 1) return false; | 1460 if (block->PredecessorCount() != 1) return false; |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1493 struct FindResult { | 1488 struct FindResult { |
| 1494 const LiveRange* cur_cover_; | 1489 const LiveRange* cur_cover_; |
| 1495 const LiveRange* pred_cover_; | 1490 const LiveRange* pred_cover_; |
| 1496 }; | 1491 }; |
| 1497 | 1492 |
| 1498 | 1493 |
| 1499 class LiveRangeBoundArray { | 1494 class LiveRangeBoundArray { |
| 1500 public: | 1495 public: |
| 1501 LiveRangeBoundArray() : length_(0), start_(nullptr) {} | 1496 LiveRangeBoundArray() : length_(0), start_(nullptr) {} |
| 1502 | 1497 |
| 1503 bool ShouldInitialize() { return start_ == NULL; } | 1498 bool ShouldInitialize() { return start_ == nullptr; } |
| 1504 | 1499 |
| 1505 void Initialize(Zone* zone, const LiveRange* const range) { | 1500 void Initialize(Zone* zone, const LiveRange* const range) { |
| 1506 size_t length = 0; | 1501 size_t length = 0; |
| 1507 for (const LiveRange* i = range; i != NULL; i = i->next()) length++; | 1502 for (auto i = range; i != nullptr; i = i->next()) length++; |
| 1508 start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length)); | 1503 start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length)); |
| 1509 length_ = length; | 1504 length_ = length; |
| 1510 LiveRangeBound* curr = start_; | 1505 auto curr = start_; |
| 1511 for (const LiveRange* i = range; i != NULL; i = i->next(), ++curr) { | 1506 for (auto i = range; i != nullptr; i = i->next(), ++curr) { |
| 1512 new (curr) LiveRangeBound(i); | 1507 new (curr) LiveRangeBound(i); |
| 1513 } | 1508 } |
| 1514 } | 1509 } |
| 1515 | 1510 |
| 1516 LiveRangeBound* Find(const LifetimePosition position) const { | 1511 LiveRangeBound* Find(const LifetimePosition position) const { |
| 1517 size_t left_index = 0; | 1512 size_t left_index = 0; |
| 1518 size_t right_index = length_; | 1513 size_t right_index = length_; |
| 1519 while (true) { | 1514 while (true) { |
| 1520 size_t current_index = left_index + (right_index - left_index) / 2; | 1515 size_t current_index = left_index + (right_index - left_index) / 2; |
| 1521 DCHECK(right_index > current_index); | 1516 DCHECK(right_index > current_index); |
| 1522 LiveRangeBound* bound = &start_[current_index]; | 1517 auto bound = &start_[current_index]; |
| 1523 if (bound->start_.Value() <= position.Value()) { | 1518 if (bound->start_.Value() <= position.Value()) { |
| 1524 if (position.Value() < bound->end_.Value()) return bound; | 1519 if (position.Value() < bound->end_.Value()) return bound; |
| 1525 DCHECK(left_index < current_index); | 1520 DCHECK(left_index < current_index); |
| 1526 left_index = current_index; | 1521 left_index = current_index; |
| 1527 } else { | 1522 } else { |
| 1528 right_index = current_index; | 1523 right_index = current_index; |
| 1529 } | 1524 } |
| 1530 } | 1525 } |
| 1531 } | 1526 } |
| 1532 | 1527 |
| 1533 LiveRangeBound* FindPred(const InstructionBlock* pred) { | 1528 LiveRangeBound* FindPred(const InstructionBlock* pred) { |
| 1534 const LifetimePosition pred_end = | 1529 auto pred_end = |
| 1535 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1530 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
| 1536 return Find(pred_end); | 1531 return Find(pred_end); |
| 1537 } | 1532 } |
| 1538 | 1533 |
| 1539 LiveRangeBound* FindSucc(const InstructionBlock* succ) { | 1534 LiveRangeBound* FindSucc(const InstructionBlock* succ) { |
| 1540 const LifetimePosition succ_start = | 1535 auto succ_start = |
| 1541 LifetimePosition::FromInstructionIndex(succ->first_instruction_index()); | 1536 LifetimePosition::FromInstructionIndex(succ->first_instruction_index()); |
| 1542 return Find(succ_start); | 1537 return Find(succ_start); |
| 1543 } | 1538 } |
| 1544 | 1539 |
| 1545 void Find(const InstructionBlock* block, const InstructionBlock* pred, | 1540 void Find(const InstructionBlock* block, const InstructionBlock* pred, |
| 1546 FindResult* result) const { | 1541 FindResult* result) const { |
| 1547 const LifetimePosition pred_end = | 1542 auto pred_end = |
| 1548 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1543 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
| 1549 LiveRangeBound* bound = Find(pred_end); | 1544 auto bound = Find(pred_end); |
| 1550 result->pred_cover_ = bound->range_; | 1545 result->pred_cover_ = bound->range_; |
| 1551 const LifetimePosition cur_start = LifetimePosition::FromInstructionIndex( | 1546 auto cur_start = LifetimePosition::FromInstructionIndex( |
| 1552 block->first_instruction_index()); | 1547 block->first_instruction_index()); |
| 1553 // Common case. | 1548 // Common case. |
| 1554 if (bound->CanCover(cur_start)) { | 1549 if (bound->CanCover(cur_start)) { |
| 1555 result->cur_cover_ = bound->range_; | 1550 result->cur_cover_ = bound->range_; |
| 1556 return; | 1551 return; |
| 1557 } | 1552 } |
| 1558 result->cur_cover_ = Find(cur_start)->range_; | 1553 result->cur_cover_ = Find(cur_start)->range_; |
| 1559 DCHECK(result->pred_cover_ != NULL && result->cur_cover_ != NULL); | 1554 DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr); |
| 1560 } | 1555 } |
| 1561 | 1556 |
| 1562 private: | 1557 private: |
| 1563 size_t length_; | 1558 size_t length_; |
| 1564 LiveRangeBound* start_; | 1559 LiveRangeBound* start_; |
| 1565 | 1560 |
| 1566 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray); | 1561 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray); |
| 1567 }; | 1562 }; |
| 1568 | 1563 |
| 1569 | 1564 |
| 1570 class LiveRangeFinder { | 1565 class LiveRangeFinder { |
| 1571 public: | 1566 public: |
| 1572 explicit LiveRangeFinder(const RegisterAllocator& allocator) | 1567 explicit LiveRangeFinder(const RegisterAllocator& allocator) |
| 1573 : allocator_(allocator), | 1568 : allocator_(allocator), |
| 1574 bounds_length_(static_cast<int>(allocator.live_ranges().size())), | 1569 bounds_length_(static_cast<int>(allocator.live_ranges().size())), |
| 1575 bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>( | 1570 bounds_(allocator.local_zone()->NewArray<LiveRangeBoundArray>( |
| 1576 bounds_length_)) { | 1571 bounds_length_)) { |
| 1577 for (int i = 0; i < bounds_length_; ++i) { | 1572 for (int i = 0; i < bounds_length_; ++i) { |
| 1578 new (&bounds_[i]) LiveRangeBoundArray(); | 1573 new (&bounds_[i]) LiveRangeBoundArray(); |
| 1579 } | 1574 } |
| 1580 } | 1575 } |
| 1581 | 1576 |
| 1582 LiveRangeBoundArray* ArrayFor(int operand_index) { | 1577 LiveRangeBoundArray* ArrayFor(int operand_index) { |
| 1583 DCHECK(operand_index < bounds_length_); | 1578 DCHECK(operand_index < bounds_length_); |
| 1584 const LiveRange* range = allocator_.live_ranges()[operand_index]; | 1579 auto range = allocator_.live_ranges()[operand_index]; |
| 1585 DCHECK(range != nullptr && !range->IsEmpty()); | 1580 DCHECK(range != nullptr && !range->IsEmpty()); |
| 1586 LiveRangeBoundArray* array = &bounds_[operand_index]; | 1581 auto array = &bounds_[operand_index]; |
| 1587 if (array->ShouldInitialize()) { | 1582 if (array->ShouldInitialize()) { |
| 1588 array->Initialize(allocator_.local_zone(), range); | 1583 array->Initialize(allocator_.local_zone(), range); |
| 1589 } | 1584 } |
| 1590 return array; | 1585 return array; |
| 1591 } | 1586 } |
| 1592 | 1587 |
| 1593 private: | 1588 private: |
| 1594 const RegisterAllocator& allocator_; | 1589 const RegisterAllocator& allocator_; |
| 1595 const int bounds_length_; | 1590 const int bounds_length_; |
| 1596 LiveRangeBoundArray* const bounds_; | 1591 LiveRangeBoundArray* const bounds_; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1619 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); | 1614 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); |
| 1620 auto* pred_bound = finder.ArrayFor(phi->operands()[pred_index]) | 1615 auto* pred_bound = finder.ArrayFor(phi->operands()[pred_index]) |
| 1621 ->FindPred(pred_block); | 1616 ->FindPred(pred_block); |
| 1622 auto pred_op = pred_bound->range_->CreateAssignedOperand(code_zone()); | 1617 auto pred_op = pred_bound->range_->CreateAssignedOperand(code_zone()); |
| 1623 phi->inputs()[pred_index] = pred_op; | 1618 phi->inputs()[pred_index] = pred_op; |
| 1624 ResolveControlFlow(block, phi_output, pred_block, pred_op); | 1619 ResolveControlFlow(block, phi_output, pred_block, pred_op); |
| 1625 pred_index++; | 1620 pred_index++; |
| 1626 } | 1621 } |
| 1627 } | 1622 } |
| 1628 } | 1623 } |
| 1629 BitVector* live = live_in_sets_[block->rpo_number().ToInt()]; | 1624 auto live = live_in_sets_[block->rpo_number().ToInt()]; |
| 1630 BitVector::Iterator iterator(live); | 1625 BitVector::Iterator iterator(live); |
| 1631 while (!iterator.Done()) { | 1626 while (!iterator.Done()) { |
| 1632 auto* array = finder.ArrayFor(iterator.Current()); | 1627 auto* array = finder.ArrayFor(iterator.Current()); |
| 1633 for (auto pred : block->predecessors()) { | 1628 for (auto pred : block->predecessors()) { |
| 1634 FindResult result; | 1629 FindResult result; |
| 1635 const auto* pred_block = code()->InstructionBlockAt(pred); | 1630 const auto* pred_block = code()->InstructionBlockAt(pred); |
| 1636 array->Find(block, pred_block, &result); | 1631 array->Find(block, pred_block, &result); |
| 1637 if (result.cur_cover_ == result.pred_cover_ || | 1632 if (result.cur_cover_ == result.pred_cover_ || |
| 1638 result.cur_cover_->IsSpilled()) | 1633 result.cur_cover_->IsSpilled()) |
| 1639 continue; | 1634 continue; |
| 1640 InstructionOperand* pred_op = | 1635 auto pred_op = result.pred_cover_->CreateAssignedOperand(code_zone()); |
| 1641 result.pred_cover_->CreateAssignedOperand(code_zone()); | 1636 auto cur_op = result.cur_cover_->CreateAssignedOperand(code_zone()); |
| 1642 InstructionOperand* cur_op = | |
| 1643 result.cur_cover_->CreateAssignedOperand(code_zone()); | |
| 1644 ResolveControlFlow(block, cur_op, pred_block, pred_op); | 1637 ResolveControlFlow(block, cur_op, pred_block, pred_op); |
| 1645 } | 1638 } |
| 1646 iterator.Advance(); | 1639 iterator.Advance(); |
| 1647 } | 1640 } |
| 1648 } | 1641 } |
| 1649 } | 1642 } |
| 1650 | 1643 |
| 1651 | 1644 |
| 1652 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, | 1645 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block, |
| 1653 InstructionOperand* cur_op, | 1646 InstructionOperand* cur_op, |
| 1654 const InstructionBlock* pred, | 1647 const InstructionBlock* pred, |
| 1655 InstructionOperand* pred_op) { | 1648 InstructionOperand* pred_op) { |
| 1656 if (pred_op->Equals(cur_op)) return; | 1649 if (pred_op->Equals(cur_op)) return; |
| 1657 GapInstruction* gap = nullptr; | 1650 GapInstruction* gap = nullptr; |
| 1658 if (block->PredecessorCount() == 1) { | 1651 if (block->PredecessorCount() == 1) { |
| 1659 gap = code()->GapAt(block->first_instruction_index()); | 1652 gap = code()->GapAt(block->first_instruction_index()); |
| 1660 } else { | 1653 } else { |
| 1661 DCHECK(pred->SuccessorCount() == 1); | 1654 DCHECK(pred->SuccessorCount() == 1); |
| 1662 gap = GetLastGap(pred); | 1655 gap = GetLastGap(pred); |
| 1663 | 1656 auto branch = InstructionAt(pred->last_instruction_index()); |
| 1664 Instruction* branch = InstructionAt(pred->last_instruction_index()); | |
| 1665 DCHECK(!branch->HasPointerMap()); | 1657 DCHECK(!branch->HasPointerMap()); |
| 1666 USE(branch); | 1658 USE(branch); |
| 1667 } | 1659 } |
| 1668 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) | 1660 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) |
| 1669 ->AddMove(pred_op, cur_op, code_zone()); | 1661 ->AddMove(pred_op, cur_op, code_zone()); |
| 1670 } | 1662 } |
| 1671 | 1663 |
| 1672 | 1664 |
| 1673 void RegisterAllocator::BuildLiveRanges() { | 1665 void RegisterAllocator::BuildLiveRanges() { |
| 1674 InitializeLivenessAnalysis(); | |
| 1675 // Process the blocks in reverse order. | 1666 // Process the blocks in reverse order. |
| 1676 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; | 1667 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; |
| 1677 --block_id) { | 1668 --block_id) { |
| 1678 const InstructionBlock* block = | 1669 auto block = |
| 1679 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); | 1670 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); |
| 1680 BitVector* live = ComputeLiveOut(block); | 1671 auto live = ComputeLiveOut(block); |
| 1681 // Initially consider all live_out values live for the entire block. We | 1672 // Initially consider all live_out values live for the entire block. We |
| 1682 // will shorten these intervals if necessary. | 1673 // will shorten these intervals if necessary. |
| 1683 AddInitialIntervals(block, live); | 1674 AddInitialIntervals(block, live); |
| 1684 | 1675 |
| 1685 // Process the instructions in reverse order, generating and killing | 1676 // Process the instructions in reverse order, generating and killing |
| 1686 // live values. | 1677 // live values. |
| 1687 ProcessInstructions(block, live); | 1678 ProcessInstructions(block, live); |
| 1688 // All phi output operands are killed by this block. | 1679 // All phi output operands are killed by this block. |
| 1689 for (auto phi : block->phis()) { | 1680 for (auto phi : block->phis()) { |
| 1690 // The live range interval already ends at the first instruction of the | 1681 // The live range interval already ends at the first instruction of the |
| 1691 // block. | 1682 // block. |
| 1692 int phi_vreg = phi->virtual_register(); | 1683 int phi_vreg = phi->virtual_register(); |
| 1693 live->Remove(phi_vreg); | 1684 live->Remove(phi_vreg); |
| 1694 if (!FLAG_turbo_delay_ssa_decon) { | 1685 if (!FLAG_turbo_delay_ssa_decon) { |
| 1695 InstructionOperand* hint = NULL; | 1686 InstructionOperand* hint = nullptr; |
| 1696 InstructionOperand* phi_operand = NULL; | 1687 InstructionOperand* phi_operand = nullptr; |
| 1697 GapInstruction* gap = | 1688 auto gap = |
| 1698 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0])); | 1689 GetLastGap(code()->InstructionBlockAt(block->predecessors()[0])); |
| 1699 ParallelMove* move = | 1690 auto move = |
| 1700 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); | 1691 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 1701 for (int j = 0; j < move->move_operands()->length(); ++j) { | 1692 for (int j = 0; j < move->move_operands()->length(); ++j) { |
| 1702 InstructionOperand* to = move->move_operands()->at(j).destination(); | 1693 auto to = move->move_operands()->at(j).destination(); |
| 1703 if (to->IsUnallocated() && | 1694 if (to->IsUnallocated() && |
| 1704 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { | 1695 UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) { |
| 1705 hint = move->move_operands()->at(j).source(); | 1696 hint = move->move_operands()->at(j).source(); |
| 1706 phi_operand = to; | 1697 phi_operand = to; |
| 1707 break; | 1698 break; |
| 1708 } | 1699 } |
| 1709 } | 1700 } |
| 1710 DCHECK(hint != NULL); | 1701 DCHECK(hint != nullptr); |
| 1711 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( | 1702 auto block_start = LifetimePosition::FromInstructionIndex( |
| 1712 block->first_instruction_index()); | 1703 block->first_instruction_index()); |
| 1713 Define(block_start, phi_operand, hint); | 1704 Define(block_start, phi_operand, hint); |
| 1714 } | 1705 } |
| 1715 } | 1706 } |
| 1716 | 1707 |
| 1717 // Now live is live_in for this block except not including values live | 1708 // Now live is live_in for this block except not including values live |
| 1718 // out on backward successor edges. | 1709 // out on backward successor edges. |
| 1719 live_in_sets_[block_id] = live; | 1710 live_in_sets_[block_id] = live; |
| 1720 | 1711 |
| 1721 if (block->IsLoopHeader()) { | 1712 if (block->IsLoopHeader()) { |
| 1722 // Add a live range stretching from the first loop instruction to the last | 1713 // Add a live range stretching from the first loop instruction to the last |
| 1723 // for each value live on entry to the header. | 1714 // for each value live on entry to the header. |
| 1724 BitVector::Iterator iterator(live); | 1715 BitVector::Iterator iterator(live); |
| 1725 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 1716 auto start = LifetimePosition::FromInstructionIndex( |
| 1726 block->first_instruction_index()); | 1717 block->first_instruction_index()); |
| 1727 LifetimePosition end = | 1718 auto end = LifetimePosition::FromInstructionIndex( |
| 1728 LifetimePosition::FromInstructionIndex( | 1719 code()->LastLoopInstructionIndex(block)).NextInstruction(); |
| 1729 code()->LastLoopInstructionIndex(block)).NextInstruction(); | |
| 1730 while (!iterator.Done()) { | 1720 while (!iterator.Done()) { |
| 1731 int operand_index = iterator.Current(); | 1721 int operand_index = iterator.Current(); |
| 1732 LiveRange* range = LiveRangeFor(operand_index); | 1722 auto range = LiveRangeFor(operand_index); |
| 1733 range->EnsureInterval(start, end, local_zone()); | 1723 range->EnsureInterval(start, end, local_zone()); |
| 1734 iterator.Advance(); | 1724 iterator.Advance(); |
| 1735 } | 1725 } |
| 1736 | |
| 1737 // Insert all values into the live in sets of all blocks in the loop. | 1726 // Insert all values into the live in sets of all blocks in the loop. |
| 1738 for (int i = block->rpo_number().ToInt() + 1; | 1727 for (int i = block->rpo_number().ToInt() + 1; |
| 1739 i < block->loop_end().ToInt(); ++i) { | 1728 i < block->loop_end().ToInt(); ++i) { |
| 1740 live_in_sets_[i]->Union(*live); | 1729 live_in_sets_[i]->Union(*live); |
| 1741 } | 1730 } |
| 1742 } | 1731 } |
| 1743 } | 1732 } |
| 1744 | 1733 |
| 1745 for (size_t i = 0; i < live_ranges_.size(); ++i) { | 1734 for (auto range : live_ranges()) { |
| 1746 if (live_ranges_[i] != NULL) { | 1735 if (range == nullptr) continue; |
| 1747 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); | 1736 range->kind_ = RequiredRegisterKind(range->id()); |
| 1748 | 1737 // TODO(bmeurer): This is a horrible hack to make sure that for constant |
| 1749 // TODO(bmeurer): This is a horrible hack to make sure that for constant | 1738 // live ranges, every use requires the constant to be in a register. |
| 1750 // live ranges, every use requires the constant to be in a register. | 1739 // Without this hack, all uses with "any" policy would get the constant |
| 1751 // Without this hack, all uses with "any" policy would get the constant | 1740 // operand assigned. |
| 1752 // operand assigned. | 1741 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { |
| 1753 LiveRange* range = live_ranges_[i]; | 1742 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) { |
| 1754 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { | 1743 pos->register_beneficial_ = true; |
| 1755 for (UsePosition* pos = range->first_pos(); pos != NULL; | 1744 // TODO(dcarney): should the else case assert requires_reg_ == false? |
| 1756 pos = pos->next_) { | 1745 // Can't mark phis as needing a register. |
| 1757 pos->register_beneficial_ = true; | 1746 if (!code() |
| 1758 // TODO(dcarney): should the else case assert requires_reg_ == false? | 1747 ->InstructionAt(pos->pos().InstructionIndex()) |
| 1759 // Can't mark phis as needing a register. | 1748 ->IsGapMoves()) { |
| 1760 if (!code() | 1749 pos->requires_reg_ = true; |
| 1761 ->InstructionAt(pos->pos().InstructionIndex()) | |
| 1762 ->IsGapMoves()) { | |
| 1763 pos->requires_reg_ = true; | |
| 1764 } | |
| 1765 } | 1750 } |
| 1766 } | 1751 } |
| 1767 } | 1752 } |
| 1768 } | 1753 } |
| 1769 } | 1754 } |
| 1770 | 1755 |
| 1771 | 1756 |
| 1772 bool RegisterAllocator::ExistsUseWithoutDefinition() { | 1757 bool RegisterAllocator::ExistsUseWithoutDefinition() { |
| 1773 bool found = false; | 1758 bool found = false; |
| 1774 BitVector::Iterator iterator(live_in_sets_[0]); | 1759 BitVector::Iterator iterator(live_in_sets_[0]); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1785 PrintF(" (function: %s)\n", debug_name()); | 1770 PrintF(" (function: %s)\n", debug_name()); |
| 1786 } | 1771 } |
| 1787 iterator.Advance(); | 1772 iterator.Advance(); |
| 1788 } | 1773 } |
| 1789 return found; | 1774 return found; |
| 1790 } | 1775 } |
| 1791 | 1776 |
| 1792 | 1777 |
| 1793 bool RegisterAllocator::SafePointsAreInOrder() const { | 1778 bool RegisterAllocator::SafePointsAreInOrder() const { |
| 1794 int safe_point = 0; | 1779 int safe_point = 0; |
| 1795 const PointerMapDeque* pointer_maps = code()->pointer_maps(); | 1780 for (auto map : *code()->pointer_maps()) { |
| 1796 for (PointerMapDeque::const_iterator it = pointer_maps->begin(); | |
| 1797 it != pointer_maps->end(); ++it) { | |
| 1798 PointerMap* map = *it; | |
| 1799 if (safe_point > map->instruction_position()) return false; | 1781 if (safe_point > map->instruction_position()) return false; |
| 1800 safe_point = map->instruction_position(); | 1782 safe_point = map->instruction_position(); |
| 1801 } | 1783 } |
| 1802 return true; | 1784 return true; |
| 1803 } | 1785 } |
| 1804 | 1786 |
| 1805 | 1787 |
| 1806 void RegisterAllocator::PopulatePointerMaps() { | 1788 void RegisterAllocator::PopulatePointerMaps() { |
| 1807 DCHECK(SafePointsAreInOrder()); | 1789 DCHECK(SafePointsAreInOrder()); |
| 1808 | 1790 |
| 1809 // Iterate over all safe point positions and record a pointer | 1791 // Iterate over all safe point positions and record a pointer |
| 1810 // for all spilled live ranges at this point. | 1792 // for all spilled live ranges at this point. |
| 1811 int last_range_start = 0; | 1793 int last_range_start = 0; |
| 1812 const PointerMapDeque* pointer_maps = code()->pointer_maps(); | 1794 auto pointer_maps = code()->pointer_maps(); |
| 1813 PointerMapDeque::const_iterator first_it = pointer_maps->begin(); | 1795 PointerMapDeque::const_iterator first_it = pointer_maps->begin(); |
| 1814 for (size_t range_idx = 0; range_idx < live_ranges().size(); ++range_idx) { | 1796 for (LiveRange* range : live_ranges()) { |
| 1815 LiveRange* range = live_ranges().at(range_idx); | 1797 if (range == nullptr) continue; |
| 1816 if (range == NULL) continue; | |
| 1817 // Iterate over the first parts of multi-part live ranges. | 1798 // Iterate over the first parts of multi-part live ranges. |
| 1818 if (range->parent() != NULL) continue; | 1799 if (range->IsChild()) continue; |
| 1819 // Skip non-reference values. | 1800 // Skip non-reference values. |
| 1820 if (!HasTaggedValue(range->id())) continue; | 1801 if (!HasTaggedValue(range->id())) continue; |
| 1821 // Skip empty live ranges. | 1802 // Skip empty live ranges. |
| 1822 if (range->IsEmpty()) continue; | 1803 if (range->IsEmpty()) continue; |
| 1823 | 1804 |
| 1824 // Find the extent of the range and its children. | 1805 // Find the extent of the range and its children. |
| 1825 int start = range->Start().InstructionIndex(); | 1806 int start = range->Start().InstructionIndex(); |
| 1826 int end = 0; | 1807 int end = 0; |
| 1827 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { | 1808 for (auto cur = range; cur != nullptr; cur = cur->next()) { |
| 1828 LifetimePosition this_end = cur->End(); | 1809 auto this_end = cur->End(); |
| 1829 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); | 1810 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); |
| 1830 DCHECK(cur->Start().InstructionIndex() >= start); | 1811 DCHECK(cur->Start().InstructionIndex() >= start); |
| 1831 } | 1812 } |
| 1832 | 1813 |
| 1833 // Most of the ranges are in order, but not all. Keep an eye on when they | 1814 // Most of the ranges are in order, but not all. Keep an eye on when they |
| 1834 // step backwards and reset the first_it so we don't miss any safe points. | 1815 // step backwards and reset the first_it so we don't miss any safe points. |
| 1835 if (start < last_range_start) first_it = pointer_maps->begin(); | 1816 if (start < last_range_start) first_it = pointer_maps->begin(); |
| 1836 last_range_start = start; | 1817 last_range_start = start; |
| 1837 | 1818 |
| 1838 // Step across all the safe points that are before the start of this range, | 1819 // Step across all the safe points that are before the start of this range, |
| 1839 // recording how far we step in order to save doing this for the next range. | 1820 // recording how far we step in order to save doing this for the next range. |
| 1840 for (; first_it != pointer_maps->end(); ++first_it) { | 1821 for (; first_it != pointer_maps->end(); ++first_it) { |
| 1841 PointerMap* map = *first_it; | 1822 auto map = *first_it; |
| 1842 if (map->instruction_position() >= start) break; | 1823 if (map->instruction_position() >= start) break; |
| 1843 } | 1824 } |
| 1844 | 1825 |
| 1845 // Step through the safe points to see whether they are in the range. | 1826 // Step through the safe points to see whether they are in the range. |
| 1846 for (PointerMapDeque::const_iterator it = first_it; | 1827 for (auto it = first_it; it != pointer_maps->end(); ++it) { |
| 1847 it != pointer_maps->end(); ++it) { | 1828 auto map = *it; |
| 1848 PointerMap* map = *it; | |
| 1849 int safe_point = map->instruction_position(); | 1829 int safe_point = map->instruction_position(); |
| 1850 | 1830 |
| 1851 // The safe points are sorted so we can stop searching here. | 1831 // The safe points are sorted so we can stop searching here. |
| 1852 if (safe_point - 1 > end) break; | 1832 if (safe_point - 1 > end) break; |
| 1853 | 1833 |
| 1854 // Advance to the next active range that covers the current | 1834 // Advance to the next active range that covers the current |
| 1855 // safe point position. | 1835 // safe point position. |
| 1856 LifetimePosition safe_point_pos = | 1836 auto safe_point_pos = LifetimePosition::FromInstructionIndex(safe_point); |
| 1857 LifetimePosition::FromInstructionIndex(safe_point); | 1837 auto cur = range; |
| 1858 LiveRange* cur = range; | 1838 while (cur != nullptr && !cur->Covers(safe_point_pos)) { |
| 1859 while (cur != NULL && !cur->Covers(safe_point_pos)) { | |
| 1860 cur = cur->next(); | 1839 cur = cur->next(); |
| 1861 } | 1840 } |
| 1862 if (cur == NULL) continue; | 1841 if (cur == nullptr) continue; |
| 1863 | 1842 |
| 1864 // Check if the live range is spilled and the safe point is after | 1843 // Check if the live range is spilled and the safe point is after |
| 1865 // the spill position. | 1844 // the spill position. |
| 1866 if (range->HasSpillOperand() && | 1845 if (range->HasSpillOperand() && |
| 1867 safe_point >= range->spill_start_index() && | 1846 safe_point >= range->spill_start_index() && |
| 1868 !range->GetSpillOperand()->IsConstant()) { | 1847 !range->GetSpillOperand()->IsConstant()) { |
| 1869 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", | 1848 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", |
| 1870 range->id(), range->spill_start_index(), safe_point); | 1849 range->id(), range->spill_start_index(), safe_point); |
| 1871 map->RecordPointer(range->GetSpillOperand(), code_zone()); | 1850 map->RecordPointer(range->GetSpillOperand(), code_zone()); |
| 1872 } | 1851 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1893 | 1872 |
| 1894 | 1873 |
| 1895 void RegisterAllocator::AllocateDoubleRegisters() { | 1874 void RegisterAllocator::AllocateDoubleRegisters() { |
| 1896 num_registers_ = config()->num_aliased_double_registers(); | 1875 num_registers_ = config()->num_aliased_double_registers(); |
| 1897 mode_ = DOUBLE_REGISTERS; | 1876 mode_ = DOUBLE_REGISTERS; |
| 1898 AllocateRegisters(); | 1877 AllocateRegisters(); |
| 1899 } | 1878 } |
| 1900 | 1879 |
| 1901 | 1880 |
| 1902 void RegisterAllocator::AllocateRegisters() { | 1881 void RegisterAllocator::AllocateRegisters() { |
| 1903 DCHECK(unhandled_live_ranges_.is_empty()); | 1882 DCHECK(unhandled_live_ranges().empty()); |
| 1904 | 1883 |
| 1905 for (size_t i = 0; i < live_ranges_.size(); ++i) { | 1884 for (auto range : live_ranges()) { |
| 1906 if (live_ranges_[i] != NULL) { | 1885 if (range == nullptr) continue; |
| 1907 if (live_ranges_[i]->Kind() == mode_) { | 1886 if (range->Kind() == mode_) { |
| 1908 AddToUnhandledUnsorted(live_ranges_[i]); | 1887 AddToUnhandledUnsorted(range); |
| 1909 } | |
| 1910 } | 1888 } |
| 1911 } | 1889 } |
| 1912 SortUnhandled(); | 1890 SortUnhandled(); |
| 1913 DCHECK(UnhandledIsSorted()); | 1891 DCHECK(UnhandledIsSorted()); |
| 1914 | 1892 |
| 1915 DCHECK(reusable_slots_.is_empty()); | 1893 DCHECK(reusable_slots().empty()); |
| 1916 DCHECK(active_live_ranges_.is_empty()); | 1894 DCHECK(active_live_ranges().empty()); |
| 1917 DCHECK(inactive_live_ranges_.is_empty()); | 1895 DCHECK(inactive_live_ranges().empty()); |
| 1918 | 1896 |
| 1919 if (mode_ == DOUBLE_REGISTERS) { | 1897 if (mode_ == DOUBLE_REGISTERS) { |
| 1920 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { | 1898 for (int i = 0; i < config()->num_aliased_double_registers(); ++i) { |
| 1921 LiveRange* current = fixed_double_live_ranges_.at(i); | 1899 auto current = fixed_double_live_ranges()[i]; |
| 1922 if (current != NULL) { | 1900 if (current != nullptr) { |
| 1923 AddToInactive(current); | 1901 AddToInactive(current); |
| 1924 } | 1902 } |
| 1925 } | 1903 } |
| 1926 } else { | 1904 } else { |
| 1927 DCHECK(mode_ == GENERAL_REGISTERS); | 1905 DCHECK(mode_ == GENERAL_REGISTERS); |
| 1928 for (auto current : fixed_live_ranges()) { | 1906 for (auto current : fixed_live_ranges()) { |
| 1929 if (current != NULL) { | 1907 if (current != nullptr) { |
| 1930 AddToInactive(current); | 1908 AddToInactive(current); |
| 1931 } | 1909 } |
| 1932 } | 1910 } |
| 1933 } | 1911 } |
| 1934 | 1912 |
| 1935 while (!unhandled_live_ranges_.is_empty()) { | 1913 while (!unhandled_live_ranges().empty()) { |
| 1936 DCHECK(UnhandledIsSorted()); | 1914 DCHECK(UnhandledIsSorted()); |
| 1937 LiveRange* current = unhandled_live_ranges_.RemoveLast(); | 1915 auto current = unhandled_live_ranges().back(); |
| 1916 unhandled_live_ranges().pop_back(); |
| 1938 DCHECK(UnhandledIsSorted()); | 1917 DCHECK(UnhandledIsSorted()); |
| 1939 LifetimePosition position = current->Start(); | 1918 auto position = current->Start(); |
| 1940 #ifdef DEBUG | 1919 #ifdef DEBUG |
| 1941 allocation_finger_ = position; | 1920 allocation_finger_ = position; |
| 1942 #endif | 1921 #endif |
| 1943 TraceAlloc("Processing interval %d start=%d\n", current->id(), | 1922 TraceAlloc("Processing interval %d start=%d\n", current->id(), |
| 1944 position.Value()); | 1923 position.Value()); |
| 1945 | 1924 |
| 1946 if (!current->HasNoSpillType()) { | 1925 if (!current->HasNoSpillType()) { |
| 1947 TraceAlloc("Live range %d already has a spill operand\n", current->id()); | 1926 TraceAlloc("Live range %d already has a spill operand\n", current->id()); |
| 1948 LifetimePosition next_pos = position; | 1927 auto next_pos = position; |
| 1949 if (code()->IsGapAt(next_pos.InstructionIndex())) { | 1928 if (code()->IsGapAt(next_pos.InstructionIndex())) { |
| 1950 next_pos = next_pos.NextInstruction(); | 1929 next_pos = next_pos.NextInstruction(); |
| 1951 } | 1930 } |
| 1952 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1931 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1953 // If the range already has a spill operand and it doesn't need a | 1932 // If the range already has a spill operand and it doesn't need a |
| 1954 // register immediately, split it and spill the first part of the range. | 1933 // register immediately, split it and spill the first part of the range. |
| 1955 if (pos == NULL) { | 1934 if (pos == nullptr) { |
| 1956 Spill(current); | 1935 Spill(current); |
| 1957 continue; | 1936 continue; |
| 1958 } else if (pos->pos().Value() > | 1937 } else if (pos->pos().Value() > |
| 1959 current->Start().NextInstruction().Value()) { | 1938 current->Start().NextInstruction().Value()) { |
| 1960 // Do not spill live range eagerly if use position that can benefit from | 1939 // Do not spill live range eagerly if use position that can benefit from |
| 1961 // the register is too close to the start of live range. | 1940 // the register is too close to the start of live range. |
| 1962 SpillBetween(current, current->Start(), pos->pos()); | 1941 SpillBetween(current, current->Start(), pos->pos()); |
| 1963 if (!AllocationOk()) return; | 1942 if (!AllocationOk()) return; |
| 1964 DCHECK(UnhandledIsSorted()); | 1943 DCHECK(UnhandledIsSorted()); |
| 1965 continue; | 1944 continue; |
| 1966 } | 1945 } |
| 1967 } | 1946 } |
| 1968 | 1947 |
| 1969 if (FLAG_turbo_reuse_spill_slots) { | 1948 if (FLAG_turbo_reuse_spill_slots) { |
| 1970 if (TryReuseSpillForPhi(current)) { | 1949 if (TryReuseSpillForPhi(current)) { |
| 1971 continue; | 1950 continue; |
| 1972 } | 1951 } |
| 1973 if (!AllocationOk()) return; | 1952 if (!AllocationOk()) return; |
| 1974 } | 1953 } |
| 1975 | 1954 |
| 1976 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1955 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 1977 LiveRange* cur_active = active_live_ranges_.at(i); | 1956 auto cur_active = active_live_ranges()[i]; |
| 1978 if (cur_active->End().Value() <= position.Value()) { | 1957 if (cur_active->End().Value() <= position.Value()) { |
| 1979 ActiveToHandled(cur_active); | 1958 ActiveToHandled(cur_active); |
| 1980 --i; // The live range was removed from the list of active live ranges. | 1959 --i; // The live range was removed from the list of active live ranges. |
| 1981 } else if (!cur_active->Covers(position)) { | 1960 } else if (!cur_active->Covers(position)) { |
| 1982 ActiveToInactive(cur_active); | 1961 ActiveToInactive(cur_active); |
| 1983 --i; // The live range was removed from the list of active live ranges. | 1962 --i; // The live range was removed from the list of active live ranges. |
| 1984 } | 1963 } |
| 1985 } | 1964 } |
| 1986 | 1965 |
| 1987 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1966 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { |
| 1988 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | 1967 auto cur_inactive = inactive_live_ranges()[i]; |
| 1989 if (cur_inactive->End().Value() <= position.Value()) { | 1968 if (cur_inactive->End().Value() <= position.Value()) { |
| 1990 InactiveToHandled(cur_inactive); | 1969 InactiveToHandled(cur_inactive); |
| 1991 --i; // Live range was removed from the list of inactive live ranges. | 1970 --i; // Live range was removed from the list of inactive live ranges. |
| 1992 } else if (cur_inactive->Covers(position)) { | 1971 } else if (cur_inactive->Covers(position)) { |
| 1993 InactiveToActive(cur_inactive); | 1972 InactiveToActive(cur_inactive); |
| 1994 --i; // Live range was removed from the list of inactive live ranges. | 1973 --i; // Live range was removed from the list of inactive live ranges. |
| 1995 } | 1974 } |
| 1996 } | 1975 } |
| 1997 | 1976 |
| 1998 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled()); | 1977 DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled()); |
| 1999 | 1978 |
| 2000 bool result = TryAllocateFreeReg(current); | 1979 bool result = TryAllocateFreeReg(current); |
| 2001 if (!AllocationOk()) return; | 1980 if (!AllocationOk()) return; |
| 2002 | 1981 |
| 2003 if (!result) AllocateBlockedReg(current); | 1982 if (!result) AllocateBlockedReg(current); |
| 2004 if (!AllocationOk()) return; | 1983 if (!AllocationOk()) return; |
| 2005 | 1984 |
| 2006 if (current->HasRegisterAssigned()) { | 1985 if (current->HasRegisterAssigned()) { |
| 2007 AddToActive(current); | 1986 AddToActive(current); |
| 2008 } | 1987 } |
| 2009 } | 1988 } |
| 2010 | 1989 |
| 2011 reusable_slots_.Rewind(0); | 1990 reusable_slots().clear(); |
| 2012 active_live_ranges_.Rewind(0); | 1991 active_live_ranges().clear(); |
| 2013 inactive_live_ranges_.Rewind(0); | 1992 inactive_live_ranges().clear(); |
| 2014 } | 1993 } |
| 2015 | 1994 |
| 2016 | 1995 |
| 2017 const char* RegisterAllocator::RegisterName(int allocation_index) { | 1996 const char* RegisterAllocator::RegisterName(int allocation_index) { |
| 2018 if (mode_ == GENERAL_REGISTERS) { | 1997 if (mode_ == GENERAL_REGISTERS) { |
| 2019 return config()->general_register_name(allocation_index); | 1998 return config()->general_register_name(allocation_index); |
| 2020 } else { | 1999 } else { |
| 2021 return config()->double_register_name(allocation_index); | 2000 return config()->double_register_name(allocation_index); |
| 2022 } | 2001 } |
| 2023 } | 2002 } |
| 2024 | 2003 |
| 2025 | 2004 |
| 2026 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { | 2005 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { |
| 2027 return code()->IsReference(virtual_register); | 2006 return code()->IsReference(virtual_register); |
| 2028 } | 2007 } |
| 2029 | 2008 |
| 2030 | 2009 |
| 2031 RegisterKind RegisterAllocator::RequiredRegisterKind( | 2010 RegisterKind RegisterAllocator::RequiredRegisterKind( |
| 2032 int virtual_register) const { | 2011 int virtual_register) const { |
| 2033 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS | 2012 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS |
| 2034 : GENERAL_REGISTERS; | 2013 : GENERAL_REGISTERS; |
| 2035 } | 2014 } |
| 2036 | 2015 |
| 2037 | 2016 |
| 2038 void RegisterAllocator::AddToActive(LiveRange* range) { | 2017 void RegisterAllocator::AddToActive(LiveRange* range) { |
| 2039 TraceAlloc("Add live range %d to active\n", range->id()); | 2018 TraceAlloc("Add live range %d to active\n", range->id()); |
| 2040 active_live_ranges_.Add(range, local_zone()); | 2019 active_live_ranges().push_back(range); |
| 2041 } | 2020 } |
| 2042 | 2021 |
| 2043 | 2022 |
| 2044 void RegisterAllocator::AddToInactive(LiveRange* range) { | 2023 void RegisterAllocator::AddToInactive(LiveRange* range) { |
| 2045 TraceAlloc("Add live range %d to inactive\n", range->id()); | 2024 TraceAlloc("Add live range %d to inactive\n", range->id()); |
| 2046 inactive_live_ranges_.Add(range, local_zone()); | 2025 inactive_live_ranges().push_back(range); |
| 2047 } | 2026 } |
| 2048 | 2027 |
| 2049 | 2028 |
| 2050 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { | 2029 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
| 2051 if (range == NULL || range->IsEmpty()) return; | 2030 if (range == nullptr || range->IsEmpty()) return; |
| 2052 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2031 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2053 DCHECK(allocation_finger_.Value() <= range->Start().Value()); | 2032 DCHECK(allocation_finger_.Value() <= range->Start().Value()); |
| 2054 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { | 2033 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; |
| 2055 LiveRange* cur_range = unhandled_live_ranges_.at(i); | 2034 --i) { |
| 2056 if (range->ShouldBeAllocatedBefore(cur_range)) { | 2035 auto cur_range = unhandled_live_ranges().at(i); |
| 2057 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 2036 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; |
| 2058 unhandled_live_ranges_.InsertAt(i + 1, range, local_zone()); | 2037 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 2059 DCHECK(UnhandledIsSorted()); | 2038 auto it = unhandled_live_ranges().begin() + (i + 1); |
| 2060 return; | 2039 unhandled_live_ranges().insert(it, range); |
| 2061 } | 2040 DCHECK(UnhandledIsSorted()); |
| 2041 return; |
| 2062 } | 2042 } |
| 2063 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); | 2043 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); |
| 2064 unhandled_live_ranges_.InsertAt(0, range, local_zone()); | 2044 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); |
| 2065 DCHECK(UnhandledIsSorted()); | 2045 DCHECK(UnhandledIsSorted()); |
| 2066 } | 2046 } |
| 2067 | 2047 |
| 2068 | 2048 |
| 2069 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 2049 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| 2070 if (range == NULL || range->IsEmpty()) return; | 2050 if (range == nullptr || range->IsEmpty()) return; |
| 2071 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | 2051 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 2072 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); | 2052 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 2073 unhandled_live_ranges_.Add(range, local_zone()); | 2053 unhandled_live_ranges().push_back(range); |
| 2074 } | 2054 } |
| 2075 | 2055 |
| 2076 | 2056 |
| 2077 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { | 2057 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
| 2078 DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) || | 2058 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); |
| 2079 !(*b)->ShouldBeAllocatedBefore(*a)); | 2059 if (a->ShouldBeAllocatedBefore(b)) return false; |
| 2080 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; | 2060 if (b->ShouldBeAllocatedBefore(a)) return true; |
| 2081 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; | 2061 return a->id() < b->id(); |
| 2082 return (*a)->id() - (*b)->id(); | |
| 2083 } | 2062 } |
| 2084 | 2063 |
| 2085 | 2064 |
| 2086 // Sort the unhandled live ranges so that the ranges to be processed first are | 2065 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 2087 // at the end of the array list. This is convenient for the register allocation | 2066 // at the end of the array list. This is convenient for the register allocation |
| 2088 // algorithm because it is efficient to remove elements from the end. | 2067 // algorithm because it is efficient to remove elements from the end. |
| 2089 void RegisterAllocator::SortUnhandled() { | 2068 void RegisterAllocator::SortUnhandled() { |
| 2090 TraceAlloc("Sort unhandled\n"); | 2069 TraceAlloc("Sort unhandled\n"); |
| 2091 unhandled_live_ranges_.Sort(&UnhandledSortHelper); | 2070 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
| 2071 &UnhandledSortHelper); |
| 2092 } | 2072 } |
| 2093 | 2073 |
| 2094 | 2074 |
| 2095 bool RegisterAllocator::UnhandledIsSorted() { | 2075 bool RegisterAllocator::UnhandledIsSorted() { |
| 2096 int len = unhandled_live_ranges_.length(); | 2076 size_t len = unhandled_live_ranges().size(); |
| 2097 for (int i = 1; i < len; i++) { | 2077 for (size_t i = 1; i < len; i++) { |
| 2098 LiveRange* a = unhandled_live_ranges_.at(i - 1); | 2078 auto a = unhandled_live_ranges().at(i - 1); |
| 2099 LiveRange* b = unhandled_live_ranges_.at(i); | 2079 auto b = unhandled_live_ranges().at(i); |
| 2100 if (a->Start().Value() < b->Start().Value()) return false; | 2080 if (a->Start().Value() < b->Start().Value()) return false; |
| 2101 } | 2081 } |
| 2102 return true; | 2082 return true; |
| 2103 } | 2083 } |
| 2104 | 2084 |
| 2105 | 2085 |
| 2106 void RegisterAllocator::FreeSpillSlot(LiveRange* range) { | 2086 void RegisterAllocator::FreeSpillSlot(LiveRange* range) { |
| 2107 DCHECK(!FLAG_turbo_reuse_spill_slots); | 2087 DCHECK(!FLAG_turbo_reuse_spill_slots); |
| 2108 // Check that we are the last range. | 2088 // Check that we are the last range. |
| 2109 if (range->next() != NULL) return; | 2089 if (range->next() != nullptr) return; |
| 2110 | |
| 2111 if (!range->TopLevel()->HasSpillOperand()) return; | 2090 if (!range->TopLevel()->HasSpillOperand()) return; |
| 2112 | 2091 auto spill_operand = range->TopLevel()->GetSpillOperand(); |
| 2113 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand(); | |
| 2114 if (spill_operand->IsConstant()) return; | 2092 if (spill_operand->IsConstant()) return; |
| 2115 if (spill_operand->index() >= 0) { | 2093 if (spill_operand->index() >= 0) { |
| 2116 reusable_slots_.Add(range, local_zone()); | 2094 reusable_slots().push_back(range); |
| 2117 } | 2095 } |
| 2118 } | 2096 } |
| 2119 | 2097 |
| 2120 | 2098 |
| 2121 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) { | 2099 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) { |
| 2122 DCHECK(!FLAG_turbo_reuse_spill_slots); | 2100 DCHECK(!FLAG_turbo_reuse_spill_slots); |
| 2123 if (reusable_slots_.is_empty()) return NULL; | 2101 if (reusable_slots().empty()) return nullptr; |
| 2124 if (reusable_slots_.first()->End().Value() > | 2102 if (reusable_slots().front()->End().Value() > |
| 2125 range->TopLevel()->Start().Value()) { | 2103 range->TopLevel()->Start().Value()) { |
| 2126 return NULL; | 2104 return nullptr; |
| 2127 } | 2105 } |
| 2128 InstructionOperand* result = | 2106 auto result = reusable_slots().front()->TopLevel()->GetSpillOperand(); |
| 2129 reusable_slots_.first()->TopLevel()->GetSpillOperand(); | 2107 reusable_slots().erase(reusable_slots().begin()); |
| 2130 reusable_slots_.Remove(0); | |
| 2131 return result; | 2108 return result; |
| 2132 } | 2109 } |
| 2133 | 2110 |
| 2134 | 2111 |
| 2135 void RegisterAllocator::ActiveToHandled(LiveRange* range) { | 2112 void RegisterAllocator::ActiveToHandled(LiveRange* range) { |
| 2136 DCHECK(active_live_ranges_.Contains(range)); | 2113 RemoveElement(&active_live_ranges(), range); |
| 2137 active_live_ranges_.RemoveElement(range); | |
| 2138 TraceAlloc("Moving live range %d from active to handled\n", range->id()); | 2114 TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
| 2139 if (!FLAG_turbo_reuse_spill_slots) FreeSpillSlot(range); | 2115 if (!FLAG_turbo_reuse_spill_slots) FreeSpillSlot(range); |
| 2140 } | 2116 } |
| 2141 | 2117 |
| 2142 | 2118 |
| 2143 void RegisterAllocator::ActiveToInactive(LiveRange* range) { | 2119 void RegisterAllocator::ActiveToInactive(LiveRange* range) { |
| 2144 DCHECK(active_live_ranges_.Contains(range)); | 2120 RemoveElement(&active_live_ranges(), range); |
| 2145 active_live_ranges_.RemoveElement(range); | 2121 inactive_live_ranges().push_back(range); |
| 2146 inactive_live_ranges_.Add(range, local_zone()); | |
| 2147 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); | 2122 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); |
| 2148 } | 2123 } |
| 2149 | 2124 |
| 2150 | 2125 |
| 2151 void RegisterAllocator::InactiveToHandled(LiveRange* range) { | 2126 void RegisterAllocator::InactiveToHandled(LiveRange* range) { |
| 2152 DCHECK(inactive_live_ranges_.Contains(range)); | 2127 RemoveElement(&inactive_live_ranges(), range); |
| 2153 inactive_live_ranges_.RemoveElement(range); | |
| 2154 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); | 2128 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
| 2155 if (!FLAG_turbo_reuse_spill_slots) FreeSpillSlot(range); | 2129 if (!FLAG_turbo_reuse_spill_slots) FreeSpillSlot(range); |
| 2156 } | 2130 } |
| 2157 | 2131 |
| 2158 | 2132 |
| 2159 void RegisterAllocator::InactiveToActive(LiveRange* range) { | 2133 void RegisterAllocator::InactiveToActive(LiveRange* range) { |
| 2160 DCHECK(inactive_live_ranges_.Contains(range)); | 2134 RemoveElement(&inactive_live_ranges(), range); |
| 2161 inactive_live_ranges_.RemoveElement(range); | 2135 active_live_ranges().push_back(range); |
| 2162 active_live_ranges_.Add(range, local_zone()); | |
| 2163 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | 2136 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
| 2164 } | 2137 } |
| 2165 | 2138 |
| 2166 | 2139 |
| 2167 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { | 2140 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 2168 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2141 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2169 | 2142 |
| 2170 for (int i = 0; i < num_registers_; i++) { | 2143 for (int i = 0; i < num_registers_; i++) { |
| 2171 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2144 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 2172 } | 2145 } |
| 2173 | 2146 |
| 2174 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 2147 for (auto cur_active : active_live_ranges()) { |
| 2175 LiveRange* cur_active = active_live_ranges_.at(i); | |
| 2176 free_until_pos[cur_active->assigned_register()] = | 2148 free_until_pos[cur_active->assigned_register()] = |
| 2177 LifetimePosition::FromInstructionIndex(0); | 2149 LifetimePosition::FromInstructionIndex(0); |
| 2178 } | 2150 } |
| 2179 | 2151 |
| 2180 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 2152 for (auto cur_inactive : inactive_live_ranges()) { |
| 2181 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | |
| 2182 DCHECK(cur_inactive->End().Value() > current->Start().Value()); | 2153 DCHECK(cur_inactive->End().Value() > current->Start().Value()); |
| 2183 LifetimePosition next_intersection = | 2154 auto next_intersection = cur_inactive->FirstIntersection(current); |
| 2184 cur_inactive->FirstIntersection(current); | |
| 2185 if (!next_intersection.IsValid()) continue; | 2155 if (!next_intersection.IsValid()) continue; |
| 2186 int cur_reg = cur_inactive->assigned_register(); | 2156 int cur_reg = cur_inactive->assigned_register(); |
| 2187 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 2157 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
| 2188 } | 2158 } |
| 2189 | 2159 |
| 2190 InstructionOperand* hint = current->FirstHint(); | 2160 auto hint = current->FirstHint(); |
| 2191 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { | 2161 if (hint != nullptr && (hint->IsRegister() || hint->IsDoubleRegister())) { |
| 2192 int register_index = hint->index(); | 2162 int register_index = hint->index(); |
| 2193 TraceAlloc( | 2163 TraceAlloc( |
| 2194 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", | 2164 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", |
| 2195 RegisterName(register_index), free_until_pos[register_index].Value(), | 2165 RegisterName(register_index), free_until_pos[register_index].Value(), |
| 2196 current->id(), current->End().Value()); | 2166 current->id(), current->End().Value()); |
| 2197 | 2167 |
| 2198 // The desired register is free until the end of the current live range. | 2168 // The desired register is free until the end of the current live range. |
| 2199 if (free_until_pos[register_index].Value() >= current->End().Value()) { | 2169 if (free_until_pos[register_index].Value() >= current->End().Value()) { |
| 2200 TraceAlloc("Assigning preferred reg %s to live range %d\n", | 2170 TraceAlloc("Assigning preferred reg %s to live range %d\n", |
| 2201 RegisterName(register_index), current->id()); | 2171 RegisterName(register_index), current->id()); |
| 2202 SetLiveRangeAssignedRegister(current, register_index); | 2172 SetLiveRangeAssignedRegister(current, register_index); |
| 2203 return true; | 2173 return true; |
| 2204 } | 2174 } |
| 2205 } | 2175 } |
| 2206 | 2176 |
| 2207 // Find the register which stays free for the longest time. | 2177 // Find the register which stays free for the longest time. |
| 2208 int reg = 0; | 2178 int reg = 0; |
| 2209 for (int i = 1; i < RegisterCount(); ++i) { | 2179 for (int i = 1; i < RegisterCount(); ++i) { |
| 2210 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { | 2180 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { |
| 2211 reg = i; | 2181 reg = i; |
| 2212 } | 2182 } |
| 2213 } | 2183 } |
| 2214 | 2184 |
| 2215 LifetimePosition pos = free_until_pos[reg]; | 2185 auto pos = free_until_pos[reg]; |
| 2216 | 2186 |
| 2217 if (pos.Value() <= current->Start().Value()) { | 2187 if (pos.Value() <= current->Start().Value()) { |
| 2218 // All registers are blocked. | 2188 // All registers are blocked. |
| 2219 return false; | 2189 return false; |
| 2220 } | 2190 } |
| 2221 | 2191 |
| 2222 if (pos.Value() < current->End().Value()) { | 2192 if (pos.Value() < current->End().Value()) { |
| 2223 // Register reg is available at the range start but becomes blocked before | 2193 // Register reg is available at the range start but becomes blocked before |
| 2224 // the range end. Split current at position where it becomes blocked. | 2194 // the range end. Split current at position where it becomes blocked. |
| 2225 LiveRange* tail = SplitRangeAt(current, pos); | 2195 auto tail = SplitRangeAt(current, pos); |
| 2226 if (!AllocationOk()) return false; | 2196 if (!AllocationOk()) return false; |
| 2227 AddToUnhandledSorted(tail); | 2197 AddToUnhandledSorted(tail); |
| 2228 } | 2198 } |
| 2229 | 2199 |
| 2230 | |
| 2231 // Register reg is available at the range start and is free until | 2200 // Register reg is available at the range start and is free until |
| 2232 // the range end. | 2201 // the range end. |
| 2233 DCHECK(pos.Value() >= current->End().Value()); | 2202 DCHECK(pos.Value() >= current->End().Value()); |
| 2234 TraceAlloc("Assigning free reg %s to live range %d\n", RegisterName(reg), | 2203 TraceAlloc("Assigning free reg %s to live range %d\n", RegisterName(reg), |
| 2235 current->id()); | 2204 current->id()); |
| 2236 SetLiveRangeAssignedRegister(current, reg); | 2205 SetLiveRangeAssignedRegister(current, reg); |
| 2237 | 2206 |
| 2238 return true; | 2207 return true; |
| 2239 } | 2208 } |
| 2240 | 2209 |
| 2241 | 2210 |
| 2242 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { | 2211 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { |
| 2243 UsePosition* register_use = current->NextRegisterPosition(current->Start()); | 2212 auto register_use = current->NextRegisterPosition(current->Start()); |
| 2244 if (register_use == NULL) { | 2213 if (register_use == nullptr) { |
| 2245 // There is no use in the current live range that requires a register. | 2214 // There is no use in the current live range that requires a register. |
| 2246 // We can just spill it. | 2215 // We can just spill it. |
| 2247 Spill(current); | 2216 Spill(current); |
| 2248 return; | 2217 return; |
| 2249 } | 2218 } |
| 2250 | 2219 |
| 2251 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2220 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2252 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2221 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2253 | 2222 |
| 2254 for (int i = 0; i < num_registers_; i++) { | 2223 for (int i = 0; i < num_registers_; i++) { |
| 2255 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | 2224 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); |
| 2256 } | 2225 } |
| 2257 | 2226 |
| 2258 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 2227 for (auto range : active_live_ranges()) { |
| 2259 LiveRange* range = active_live_ranges_[i]; | |
| 2260 int cur_reg = range->assigned_register(); | 2228 int cur_reg = range->assigned_register(); |
| 2261 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | 2229 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { |
| 2262 block_pos[cur_reg] = use_pos[cur_reg] = | 2230 block_pos[cur_reg] = use_pos[cur_reg] = |
| 2263 LifetimePosition::FromInstructionIndex(0); | 2231 LifetimePosition::FromInstructionIndex(0); |
| 2264 } else { | 2232 } else { |
| 2265 UsePosition* next_use = | 2233 auto next_use = |
| 2266 range->NextUsePositionRegisterIsBeneficial(current->Start()); | 2234 range->NextUsePositionRegisterIsBeneficial(current->Start()); |
| 2267 if (next_use == NULL) { | 2235 if (next_use == nullptr) { |
| 2268 use_pos[cur_reg] = range->End(); | 2236 use_pos[cur_reg] = range->End(); |
| 2269 } else { | 2237 } else { |
| 2270 use_pos[cur_reg] = next_use->pos(); | 2238 use_pos[cur_reg] = next_use->pos(); |
| 2271 } | 2239 } |
| 2272 } | 2240 } |
| 2273 } | 2241 } |
| 2274 | 2242 |
| 2275 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 2243 for (auto range : inactive_live_ranges()) { |
| 2276 LiveRange* range = inactive_live_ranges_.at(i); | |
| 2277 DCHECK(range->End().Value() > current->Start().Value()); | 2244 DCHECK(range->End().Value() > current->Start().Value()); |
| 2278 LifetimePosition next_intersection = range->FirstIntersection(current); | 2245 auto next_intersection = range->FirstIntersection(current); |
| 2279 if (!next_intersection.IsValid()) continue; | 2246 if (!next_intersection.IsValid()) continue; |
| 2280 int cur_reg = range->assigned_register(); | 2247 int cur_reg = range->assigned_register(); |
| 2281 if (range->IsFixed()) { | 2248 if (range->IsFixed()) { |
| 2282 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | 2249 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
| 2283 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | 2250 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
| 2284 } else { | 2251 } else { |
| 2285 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | 2252 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
| 2286 } | 2253 } |
| 2287 } | 2254 } |
| 2288 | 2255 |
| 2289 int reg = 0; | 2256 int reg = 0; |
| 2290 for (int i = 1; i < RegisterCount(); ++i) { | 2257 for (int i = 1; i < RegisterCount(); ++i) { |
| 2291 if (use_pos[i].Value() > use_pos[reg].Value()) { | 2258 if (use_pos[i].Value() > use_pos[reg].Value()) { |
| 2292 reg = i; | 2259 reg = i; |
| 2293 } | 2260 } |
| 2294 } | 2261 } |
| 2295 | 2262 |
| 2296 LifetimePosition pos = use_pos[reg]; | 2263 auto pos = use_pos[reg]; |
| 2297 | 2264 |
| 2298 if (pos.Value() < register_use->pos().Value()) { | 2265 if (pos.Value() < register_use->pos().Value()) { |
| 2299 // All registers are blocked before the first use that requires a register. | 2266 // All registers are blocked before the first use that requires a register. |
| 2300 // Spill starting part of live range up to that use. | 2267 // Spill starting part of live range up to that use. |
| 2301 SpillBetween(current, current->Start(), register_use->pos()); | 2268 SpillBetween(current, current->Start(), register_use->pos()); |
| 2302 return; | 2269 return; |
| 2303 } | 2270 } |
| 2304 | 2271 |
| 2305 if (block_pos[reg].Value() < current->End().Value()) { | 2272 if (block_pos[reg].Value() < current->End().Value()) { |
| 2306 // Register becomes blocked before the current range end. Split before that | 2273 // Register becomes blocked before the current range end. Split before that |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2319 | 2286 |
| 2320 // This register was not free. Thus we need to find and spill | 2287 // This register was not free. Thus we need to find and spill |
| 2321 // parts of active and inactive live regions that use the same register | 2288 // parts of active and inactive live regions that use the same register |
| 2322 // at the same lifetime positions as current. | 2289 // at the same lifetime positions as current. |
| 2323 SplitAndSpillIntersecting(current); | 2290 SplitAndSpillIntersecting(current); |
| 2324 } | 2291 } |
| 2325 | 2292 |
| 2326 | 2293 |
| 2327 static const InstructionBlock* GetContainingLoop( | 2294 static const InstructionBlock* GetContainingLoop( |
| 2328 const InstructionSequence* sequence, const InstructionBlock* block) { | 2295 const InstructionSequence* sequence, const InstructionBlock* block) { |
| 2329 BasicBlock::RpoNumber index = block->loop_header(); | 2296 auto index = block->loop_header(); |
| 2330 if (!index.IsValid()) return NULL; | 2297 if (!index.IsValid()) return nullptr; |
| 2331 return sequence->InstructionBlockAt(index); | 2298 return sequence->InstructionBlockAt(index); |
| 2332 } | 2299 } |
| 2333 | 2300 |
| 2334 | 2301 |
| 2335 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( | 2302 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( |
| 2336 LiveRange* range, LifetimePosition pos) { | 2303 LiveRange* range, LifetimePosition pos) { |
| 2337 const InstructionBlock* block = GetInstructionBlock(pos.InstructionStart()); | 2304 auto block = GetInstructionBlock(pos.InstructionStart()); |
| 2338 const InstructionBlock* loop_header = | 2305 auto loop_header = |
| 2339 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); | 2306 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); |
| 2340 | 2307 |
| 2341 if (loop_header == NULL) return pos; | 2308 if (loop_header == nullptr) return pos; |
| 2342 | 2309 |
| 2343 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); | 2310 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 2344 | 2311 |
| 2345 while (loop_header != NULL) { | 2312 while (loop_header != nullptr) { |
| 2346 // We are going to spill live range inside the loop. | 2313 // We are going to spill live range inside the loop. |
| 2347 // If possible try to move spilling position backwards to loop header. | 2314 // If possible try to move spilling position backwards to loop header. |
| 2348 // This will reduce number of memory moves on the back edge. | 2315 // This will reduce number of memory moves on the back edge. |
| 2349 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( | 2316 auto loop_start = LifetimePosition::FromInstructionIndex( |
| 2350 loop_header->first_instruction_index()); | 2317 loop_header->first_instruction_index()); |
| 2351 | 2318 |
| 2352 if (range->Covers(loop_start)) { | 2319 if (range->Covers(loop_start)) { |
| 2353 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { | 2320 if (prev_use == nullptr || prev_use->pos().Value() < loop_start.Value()) { |
| 2354 // No register beneficial use inside the loop before the pos. | 2321 // No register beneficial use inside the loop before the pos. |
| 2355 pos = loop_start; | 2322 pos = loop_start; |
| 2356 } | 2323 } |
| 2357 } | 2324 } |
| 2358 | 2325 |
| 2359 // Try hoisting out to an outer loop. | 2326 // Try hoisting out to an outer loop. |
| 2360 loop_header = GetContainingLoop(code(), loop_header); | 2327 loop_header = GetContainingLoop(code(), loop_header); |
| 2361 } | 2328 } |
| 2362 | 2329 |
| 2363 return pos; | 2330 return pos; |
| 2364 } | 2331 } |
| 2365 | 2332 |
| 2366 | 2333 |
| 2367 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 2334 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| 2368 DCHECK(current->HasRegisterAssigned()); | 2335 DCHECK(current->HasRegisterAssigned()); |
| 2369 int reg = current->assigned_register(); | 2336 int reg = current->assigned_register(); |
| 2370 LifetimePosition split_pos = current->Start(); | 2337 auto split_pos = current->Start(); |
| 2371 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 2338 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 2372 LiveRange* range = active_live_ranges_[i]; | 2339 auto range = active_live_ranges()[i]; |
| 2373 if (range->assigned_register() == reg) { | 2340 if (range->assigned_register() == reg) { |
| 2374 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 2341 auto next_pos = range->NextRegisterPosition(current->Start()); |
| 2375 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); | 2342 auto spill_pos = FindOptimalSpillingPos(range, split_pos); |
| 2376 if (next_pos == NULL) { | 2343 if (next_pos == nullptr) { |
| 2377 SpillAfter(range, spill_pos); | 2344 SpillAfter(range, spill_pos); |
| 2378 } else { | 2345 } else { |
| 2379 // When spilling between spill_pos and next_pos ensure that the range | 2346 // When spilling between spill_pos and next_pos ensure that the range |
| 2380 // remains spilled at least until the start of the current live range. | 2347 // remains spilled at least until the start of the current live range. |
| 2381 // This guarantees that we will not introduce new unhandled ranges that | 2348 // This guarantees that we will not introduce new unhandled ranges that |
| 2382 // start before the current range as this violates allocation invariant | 2349 // start before the current range as this violates allocation invariant |
| 2383 // and will lead to an inconsistent state of active and inactive | 2350 // and will lead to an inconsistent state of active and inactive |
| 2384 // live-ranges: ranges are allocated in order of their start positions, | 2351 // live-ranges: ranges are allocated in order of their start positions, |
| 2385 // ranges are retired from active/inactive when the start of the | 2352 // ranges are retired from active/inactive when the start of the |
| 2386 // current live-range is larger than their end. | 2353 // current live-range is larger than their end. |
| 2387 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); | 2354 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); |
| 2388 } | 2355 } |
| 2389 if (!AllocationOk()) return; | 2356 if (!AllocationOk()) return; |
| 2390 ActiveToHandled(range); | 2357 ActiveToHandled(range); |
| 2391 --i; | 2358 --i; |
| 2392 } | 2359 } |
| 2393 } | 2360 } |
| 2394 | 2361 |
| 2395 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 2362 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { |
| 2396 LiveRange* range = inactive_live_ranges_[i]; | 2363 auto range = inactive_live_ranges()[i]; |
| 2397 DCHECK(range->End().Value() > current->Start().Value()); | 2364 DCHECK(range->End().Value() > current->Start().Value()); |
| 2398 if (range->assigned_register() == reg && !range->IsFixed()) { | 2365 if (range->assigned_register() == reg && !range->IsFixed()) { |
| 2399 LifetimePosition next_intersection = range->FirstIntersection(current); | 2366 LifetimePosition next_intersection = range->FirstIntersection(current); |
| 2400 if (next_intersection.IsValid()) { | 2367 if (next_intersection.IsValid()) { |
| 2401 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 2368 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
| 2402 if (next_pos == NULL) { | 2369 if (next_pos == nullptr) { |
| 2403 SpillAfter(range, split_pos); | 2370 SpillAfter(range, split_pos); |
| 2404 } else { | 2371 } else { |
| 2405 next_intersection = Min(next_intersection, next_pos->pos()); | 2372 next_intersection = Min(next_intersection, next_pos->pos()); |
| 2406 SpillBetween(range, split_pos, next_intersection); | 2373 SpillBetween(range, split_pos, next_intersection); |
| 2407 } | 2374 } |
| 2408 if (!AllocationOk()) return; | 2375 if (!AllocationOk()) return; |
| 2409 InactiveToHandled(range); | 2376 InactiveToHandled(range); |
| 2410 --i; | 2377 --i; |
| 2411 } | 2378 } |
| 2412 } | 2379 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2426 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | 2393 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 2427 | 2394 |
| 2428 if (pos.Value() <= range->Start().Value()) return range; | 2395 if (pos.Value() <= range->Start().Value()) return range; |
| 2429 | 2396 |
| 2430 // We can't properly connect liveranges if split occured at the end | 2397 // We can't properly connect liveranges if split occured at the end |
| 2431 // of control instruction. | 2398 // of control instruction. |
| 2432 DCHECK(pos.IsInstructionStart() || | 2399 DCHECK(pos.IsInstructionStart() || |
| 2433 !InstructionAt(pos.InstructionIndex())->IsControl()); | 2400 !InstructionAt(pos.InstructionIndex())->IsControl()); |
| 2434 | 2401 |
| 2435 int vreg = GetVirtualRegister(); | 2402 int vreg = GetVirtualRegister(); |
| 2436 if (!AllocationOk()) return NULL; | 2403 if (!AllocationOk()) return nullptr; |
| 2437 LiveRange* result = LiveRangeFor(vreg); | 2404 auto result = LiveRangeFor(vreg); |
| 2438 range->SplitAt(pos, result, local_zone()); | 2405 range->SplitAt(pos, result, local_zone()); |
| 2439 return result; | 2406 return result; |
| 2440 } | 2407 } |
| 2441 | 2408 |
| 2442 | 2409 |
| 2443 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, | 2410 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 2444 LifetimePosition start, | 2411 LifetimePosition start, |
| 2445 LifetimePosition end) { | 2412 LifetimePosition end) { |
| 2446 DCHECK(!range->IsFixed()); | 2413 DCHECK(!range->IsFixed()); |
| 2447 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | 2414 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", |
| 2448 range->id(), start.Value(), end.Value()); | 2415 range->id(), start.Value(), end.Value()); |
| 2449 | 2416 |
| 2450 LifetimePosition split_pos = FindOptimalSplitPos(start, end); | 2417 auto split_pos = FindOptimalSplitPos(start, end); |
| 2451 DCHECK(split_pos.Value() >= start.Value()); | 2418 DCHECK(split_pos.Value() >= start.Value()); |
| 2452 return SplitRangeAt(range, split_pos); | 2419 return SplitRangeAt(range, split_pos); |
| 2453 } | 2420 } |
| 2454 | 2421 |
| 2455 | 2422 |
| 2456 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, | 2423 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 2457 LifetimePosition end) { | 2424 LifetimePosition end) { |
| 2458 int start_instr = start.InstructionIndex(); | 2425 int start_instr = start.InstructionIndex(); |
| 2459 int end_instr = end.InstructionIndex(); | 2426 int end_instr = end.InstructionIndex(); |
| 2460 DCHECK(start_instr <= end_instr); | 2427 DCHECK(start_instr <= end_instr); |
| 2461 | 2428 |
| 2462 // We have no choice | 2429 // We have no choice |
| 2463 if (start_instr == end_instr) return end; | 2430 if (start_instr == end_instr) return end; |
| 2464 | 2431 |
| 2465 const InstructionBlock* start_block = GetInstructionBlock(start); | 2432 auto start_block = GetInstructionBlock(start); |
| 2466 const InstructionBlock* end_block = GetInstructionBlock(end); | 2433 auto end_block = GetInstructionBlock(end); |
| 2467 | 2434 |
| 2468 if (end_block == start_block) { | 2435 if (end_block == start_block) { |
| 2469 // The interval is split in the same basic block. Split at the latest | 2436 // The interval is split in the same basic block. Split at the latest |
| 2470 // possible position. | 2437 // possible position. |
| 2471 return end; | 2438 return end; |
| 2472 } | 2439 } |
| 2473 | 2440 |
| 2474 const InstructionBlock* block = end_block; | 2441 auto block = end_block; |
| 2475 // Find header of outermost loop. | 2442 // Find header of outermost loop. |
| 2476 // TODO(titzer): fix redundancy below. | 2443 // TODO(titzer): fix redundancy below. |
| 2477 while (GetContainingLoop(code(), block) != NULL && | 2444 while (GetContainingLoop(code(), block) != nullptr && |
| 2478 GetContainingLoop(code(), block)->rpo_number().ToInt() > | 2445 GetContainingLoop(code(), block)->rpo_number().ToInt() > |
| 2479 start_block->rpo_number().ToInt()) { | 2446 start_block->rpo_number().ToInt()) { |
| 2480 block = GetContainingLoop(code(), block); | 2447 block = GetContainingLoop(code(), block); |
| 2481 } | 2448 } |
| 2482 | 2449 |
| 2483 // We did not find any suitable outer loop. Split at the latest possible | 2450 // We did not find any suitable outer loop. Split at the latest possible |
| 2484 // position unless end_block is a loop header itself. | 2451 // position unless end_block is a loop header itself. |
| 2485 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2452 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2486 | 2453 |
| 2487 return LifetimePosition::FromInstructionIndex( | 2454 return LifetimePosition::FromInstructionIndex( |
| 2488 block->first_instruction_index()); | 2455 block->first_instruction_index()); |
| 2489 } | 2456 } |
| 2490 | 2457 |
| 2491 | 2458 |
| 2492 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2459 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
| 2493 LiveRange* second_part = SplitRangeAt(range, pos); | 2460 auto second_part = SplitRangeAt(range, pos); |
| 2494 if (!AllocationOk()) return; | 2461 if (!AllocationOk()) return; |
| 2495 Spill(second_part); | 2462 Spill(second_part); |
| 2496 } | 2463 } |
| 2497 | 2464 |
| 2498 | 2465 |
| 2499 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, | 2466 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start, |
| 2500 LifetimePosition end) { | 2467 LifetimePosition end) { |
| 2501 SpillBetweenUntil(range, start, start, end); | 2468 SpillBetweenUntil(range, start, start, end); |
| 2502 } | 2469 } |
| 2503 | 2470 |
| 2504 | 2471 |
| 2505 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, | 2472 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
| 2506 LifetimePosition start, | 2473 LifetimePosition start, |
| 2507 LifetimePosition until, | 2474 LifetimePosition until, |
| 2508 LifetimePosition end) { | 2475 LifetimePosition end) { |
| 2509 CHECK(start.Value() < end.Value()); | 2476 CHECK(start.Value() < end.Value()); |
| 2510 LiveRange* second_part = SplitRangeAt(range, start); | 2477 auto second_part = SplitRangeAt(range, start); |
| 2511 if (!AllocationOk()) return; | 2478 if (!AllocationOk()) return; |
| 2512 | 2479 |
| 2513 if (second_part->Start().Value() < end.Value()) { | 2480 if (second_part->Start().Value() < end.Value()) { |
| 2514 // The split result intersects with [start, end[. | 2481 // The split result intersects with [start, end[. |
| 2515 // Split it at position between ]start+1, end[, spill the middle part | 2482 // Split it at position between ]start+1, end[, spill the middle part |
| 2516 // and put the rest to unhandled. | 2483 // and put the rest to unhandled. |
| 2517 LiveRange* third_part = SplitBetween( | 2484 auto third_part = SplitBetween( |
| 2518 second_part, Max(second_part->Start().InstructionEnd(), until), | 2485 second_part, Max(second_part->Start().InstructionEnd(), until), |
| 2519 end.PrevInstruction().InstructionEnd()); | 2486 end.PrevInstruction().InstructionEnd()); |
| 2520 if (!AllocationOk()) return; | 2487 if (!AllocationOk()) return; |
| 2521 | 2488 |
| 2522 DCHECK(third_part != second_part); | 2489 DCHECK(third_part != second_part); |
| 2523 | 2490 |
| 2524 Spill(second_part); | 2491 Spill(second_part); |
| 2525 AddToUnhandledSorted(third_part); | 2492 AddToUnhandledSorted(third_part); |
| 2526 } else { | 2493 } else { |
| 2527 // The split result does not intersect with [start, end[. | 2494 // The split result does not intersect with [start, end[. |
| 2528 // Nothing to spill. Just put it to unhandled as whole. | 2495 // Nothing to spill. Just put it to unhandled as whole. |
| 2529 AddToUnhandledSorted(second_part); | 2496 AddToUnhandledSorted(second_part); |
| 2530 } | 2497 } |
| 2531 } | 2498 } |
| 2532 | 2499 |
| 2533 | 2500 |
| 2534 void RegisterAllocator::Spill(LiveRange* range) { | 2501 void RegisterAllocator::Spill(LiveRange* range) { |
| 2535 DCHECK(!range->IsSpilled()); | 2502 DCHECK(!range->IsSpilled()); |
| 2536 TraceAlloc("Spilling live range %d\n", range->id()); | 2503 TraceAlloc("Spilling live range %d\n", range->id()); |
| 2537 LiveRange* first = range->TopLevel(); | 2504 auto first = range->TopLevel(); |
| 2538 if (first->HasNoSpillType()) { | 2505 if (first->HasNoSpillType()) { |
| 2539 if (FLAG_turbo_reuse_spill_slots) { | 2506 if (FLAG_turbo_reuse_spill_slots) { |
| 2540 AssignSpillRangeToLiveRange(first); | 2507 AssignSpillRangeToLiveRange(first); |
| 2541 } else { | 2508 } else { |
| 2542 InstructionOperand* op = TryReuseSpillSlot(range); | 2509 auto op = TryReuseSpillSlot(range); |
| 2543 if (op == NULL) { | 2510 if (op == nullptr) { |
| 2544 // Allocate a new operand referring to the spill slot. | 2511 // Allocate a new operand referring to the spill slot. |
| 2545 RegisterKind kind = range->Kind(); | 2512 RegisterKind kind = range->Kind(); |
| 2546 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); | 2513 int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); |
| 2547 auto op_kind = kind == DOUBLE_REGISTERS | 2514 auto op_kind = kind == DOUBLE_REGISTERS |
| 2548 ? InstructionOperand::DOUBLE_STACK_SLOT | 2515 ? InstructionOperand::DOUBLE_STACK_SLOT |
| 2549 : InstructionOperand::STACK_SLOT; | 2516 : InstructionOperand::STACK_SLOT; |
| 2550 op = new (code_zone()) InstructionOperand(op_kind, index); | 2517 op = new (code_zone()) InstructionOperand(op_kind, index); |
| 2551 } | 2518 } |
| 2552 first->SetSpillOperand(op); | 2519 first->SetSpillOperand(op); |
| 2553 } | 2520 } |
| 2554 } | 2521 } |
| 2555 range->MakeSpilled(); | 2522 range->MakeSpilled(); |
| 2556 } | 2523 } |
| 2557 | 2524 |
| 2558 | 2525 |
| 2559 int RegisterAllocator::RegisterCount() const { return num_registers_; } | 2526 int RegisterAllocator::RegisterCount() const { return num_registers_; } |
| 2560 | 2527 |
| 2561 | 2528 |
| 2562 #ifdef DEBUG | 2529 #ifdef DEBUG |
| 2563 | 2530 |
| 2564 | 2531 |
| 2565 void RegisterAllocator::Verify() const { | 2532 void RegisterAllocator::Verify() const { |
| 2566 for (auto current : live_ranges()) { | 2533 for (auto current : live_ranges()) { |
| 2567 if (current != NULL) current->Verify(); | 2534 if (current != nullptr) current->Verify(); |
| 2568 } | 2535 } |
| 2569 } | 2536 } |
| 2570 | 2537 |
| 2571 | 2538 |
| 2572 #endif | 2539 #endif |
| 2573 | 2540 |
| 2574 | 2541 |
| 2575 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, | 2542 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
| 2576 int reg) { | 2543 int reg) { |
| 2577 if (range->Kind() == DOUBLE_REGISTERS) { | 2544 if (range->Kind() == DOUBLE_REGISTERS) { |
| 2578 assigned_double_registers_->Add(reg); | 2545 assigned_double_registers_->Add(reg); |
| 2579 } else { | 2546 } else { |
| 2580 DCHECK(range->Kind() == GENERAL_REGISTERS); | 2547 DCHECK(range->Kind() == GENERAL_REGISTERS); |
| 2581 assigned_registers_->Add(reg); | 2548 assigned_registers_->Add(reg); |
| 2582 } | 2549 } |
| 2583 range->set_assigned_register(reg); | 2550 range->set_assigned_register(reg); |
| 2584 } | 2551 } |
| 2585 | 2552 |
| 2586 } // namespace compiler | 2553 } // namespace compiler |
| 2587 } // namespace internal | 2554 } // namespace internal |
| 2588 } // namespace v8 | 2555 } // namespace v8 |
| OLD | NEW |