Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
| 7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
| 8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 223 } | 223 } |
| 224 if (pos.IsStart()) { | 224 if (pos.IsStart()) { |
| 225 os << 's'; | 225 os << 's'; |
| 226 } else { | 226 } else { |
| 227 os << 'e'; | 227 os << 'e'; |
| 228 } | 228 } |
| 229 return os; | 229 return os; |
| 230 } | 230 } |
| 231 | 231 |
| 232 | 232 |
| 233 struct LiveRange::SpillAtDefinitionList : ZoneObject { | |
| 234 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, | |
| 235 SpillAtDefinitionList* next) | |
| 236 : gap_index(gap_index), operand(operand), next(next) {} | |
| 237 const int gap_index; | |
| 238 InstructionOperand* const operand; | |
| 239 SpillAtDefinitionList* const next; | |
| 240 }; | |
| 241 | |
| 242 | |
| 243 const float LiveRange::kInvalidWeight = -1; | 233 const float LiveRange::kInvalidWeight = -1; |
| 244 const float LiveRange::kMaxWeight = std::numeric_limits<float>::max(); | 234 const float LiveRange::kMaxWeight = std::numeric_limits<float>::max(); |
| 245 | 235 |
| 246 | 236 |
| 247 LiveRange::LiveRange(int id, MachineType machine_type) | 237 LiveRange::LiveRange(int relative_id, MachineType machine_type, |
| 248 : id_(id), | 238 TopLevelLiveRange* parent) |
| 249 spill_start_index_(kMaxInt), | 239 : relative_id_(relative_id), |
| 250 bits_(0), | 240 bits_(0), |
| 251 last_interval_(nullptr), | 241 last_interval_(nullptr), |
| 252 first_interval_(nullptr), | 242 first_interval_(nullptr), |
| 253 first_pos_(nullptr), | 243 first_pos_(nullptr), |
| 254 parent_(nullptr), | 244 parent_(parent), |
| 255 next_(nullptr), | 245 next_(nullptr), |
| 256 splintered_from_(nullptr), | |
| 257 spill_operand_(nullptr), | |
| 258 spills_at_definition_(nullptr), | |
| 259 current_interval_(nullptr), | 246 current_interval_(nullptr), |
| 260 last_processed_use_(nullptr), | 247 last_processed_use_(nullptr), |
| 261 current_hint_position_(nullptr), | 248 current_hint_position_(nullptr), |
| 262 size_(kInvalidSize), | 249 size_(kInvalidSize), |
| 263 weight_(kInvalidWeight), | 250 weight_(kInvalidWeight) { |
| 264 spilled_in_deferred_block_(false) { | |
| 265 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); | 251 DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); |
| 266 bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | | 252 bits_ = AssignedRegisterField::encode(kUnassignedRegister) | |
| 267 AssignedRegisterField::encode(kUnassignedRegister) | | |
| 268 MachineTypeField::encode(machine_type); | 253 MachineTypeField::encode(machine_type); |
| 269 } | 254 } |
| 270 | 255 |
| 271 | 256 |
| 272 void LiveRange::Verify() const { | 257 void LiveRange::Verify() const { |
| 273 // Walk the positions, verifying that each is in an interval. | 258 // Walk the positions, verifying that each is in an interval. |
| 274 auto interval = first_interval_; | 259 auto interval = first_interval_; |
| 275 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { | 260 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { |
| 276 CHECK(Start() <= pos->pos()); | 261 CHECK(Start() <= pos->pos()); |
| 277 CHECK(pos->pos() <= End()); | 262 CHECK(pos->pos() <= End()); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 309 case kRepFloat32: | 294 case kRepFloat32: |
| 310 case kRepFloat64: | 295 case kRepFloat64: |
| 311 return DOUBLE_REGISTERS; | 296 return DOUBLE_REGISTERS; |
| 312 default: | 297 default: |
| 313 break; | 298 break; |
| 314 } | 299 } |
| 315 return GENERAL_REGISTERS; | 300 return GENERAL_REGISTERS; |
| 316 } | 301 } |
| 317 | 302 |
| 318 | 303 |
| 319 void LiveRange::SpillAtDefinition(Zone* zone, int gap_index, | |
| 320 InstructionOperand* operand) { | |
| 321 DCHECK(HasNoSpillType()); | |
| 322 spills_at_definition_ = new (zone) | |
| 323 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); | |
| 324 } | |
| 325 | |
| 326 | |
| 327 bool LiveRange::TryCommitSpillInDeferredBlock( | |
| 328 InstructionSequence* code, const InstructionOperand& spill_operand) { | |
| 329 DCHECK(!IsChild()); | |
| 330 | |
| 331 if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() || | |
| 332 spill_operand.IsConstant() || spill_operand.IsImmediate()) { | |
| 333 return false; | |
| 334 } | |
| 335 | |
| 336 int count = 0; | |
| 337 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
| 338 int first_instr = child->Start().ToInstructionIndex(); | |
| 339 | |
| 340 // If the range starts at instruction end, the first instruction index is | |
| 341 // the next one. | |
| 342 if (!child->Start().IsGapPosition() && !child->Start().IsStart()) { | |
| 343 ++first_instr; | |
| 344 } | |
| 345 | |
| 346 // We only look at where the range starts. It doesn't matter where it ends: | |
| 347 // if it ends past this block, then either there is a phi there already, | |
| 348 // or ResolveControlFlow will adapt the last instruction gap of this block | |
| 349 // as if there were a phi. In either case, data flow will be correct. | |
| 350 const InstructionBlock* block = code->GetInstructionBlock(first_instr); | |
| 351 | |
| 352 // If we have slot uses in a subrange, bail out, because we need the value | |
| 353 // on the stack before that use. | |
| 354 bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr; | |
| 355 if (!block->IsDeferred()) { | |
| 356 if (child->spilled() || has_slot_use) { | |
| 357 TRACE( | |
| 358 "Live Range %d must be spilled at definition: found a " | |
| 359 "slot-requiring non-deferred child range %d.\n", | |
| 360 TopLevel()->id(), child->id()); | |
| 361 return false; | |
| 362 } | |
| 363 } else { | |
| 364 if (child->spilled() || has_slot_use) ++count; | |
| 365 } | |
| 366 } | |
| 367 if (count == 0) return false; | |
| 368 | |
| 369 spill_start_index_ = -1; | |
| 370 spilled_in_deferred_block_ = true; | |
| 371 | |
| 372 TRACE("Live Range %d will be spilled only in deferred blocks.\n", id()); | |
| 373 // If we have ranges that aren't spilled but require the operand on the stack, | |
| 374 // make sure we insert the spill. | |
| 375 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
| 376 if (!child->spilled() && | |
| 377 child->NextSlotPosition(child->Start()) != nullptr) { | |
| 378 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); | |
| 379 // Insert spill at the end to let live range connections happen at START. | |
| 380 auto move = | |
| 381 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); | |
| 382 InstructionOperand assigned = child->GetAssignedOperand(); | |
| 383 if (TopLevel()->has_slot_use()) { | |
| 384 bool found = false; | |
| 385 for (auto move_op : *move) { | |
| 386 if (move_op->IsEliminated()) continue; | |
| 387 if (move_op->source().Equals(assigned) && | |
| 388 move_op->destination().Equals(spill_operand)) { | |
| 389 found = true; | |
| 390 break; | |
| 391 } | |
| 392 } | |
| 393 if (found) continue; | |
| 394 } | |
| 395 | |
| 396 move->AddMove(assigned, spill_operand); | |
| 397 } | |
| 398 } | |
| 399 | |
| 400 return true; | |
| 401 } | |
| 402 | |
| 403 | |
| 404 void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | |
| 405 const InstructionOperand& op, | |
| 406 bool might_be_duplicated) { | |
| 407 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); | |
| 408 DCHECK(!IsChild()); | |
| 409 auto zone = sequence->zone(); | |
| 410 | |
| 411 for (auto to_spill = spills_at_definition_; to_spill != nullptr; | |
| 412 to_spill = to_spill->next) { | |
| 413 auto instr = sequence->InstructionAt(to_spill->gap_index); | |
| 414 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); | |
| 415 // Skip insertion if it's possible that the move exists already as a | |
| 416 // constraint move from a fixed output register to a slot. | |
| 417 if (might_be_duplicated) { | |
| 418 bool found = false; | |
| 419 for (auto move_op : *move) { | |
| 420 if (move_op->IsEliminated()) continue; | |
| 421 if (move_op->source().Equals(*to_spill->operand) && | |
| 422 move_op->destination().Equals(op)) { | |
| 423 found = true; | |
| 424 break; | |
| 425 } | |
| 426 } | |
| 427 if (found) continue; | |
| 428 } | |
| 429 move->AddMove(*to_spill->operand, op); | |
| 430 } | |
| 431 } | |
| 432 | |
| 433 | |
| 434 UsePosition* LiveRange::FirstHintPosition(int* register_index) const { | 304 UsePosition* LiveRange::FirstHintPosition(int* register_index) const { |
| 435 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { | 305 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { |
| 436 if (pos->HintRegister(register_index)) return pos; | 306 if (pos->HintRegister(register_index)) return pos; |
| 437 } | 307 } |
| 438 return nullptr; | 308 return nullptr; |
| 439 } | 309 } |
| 440 | 310 |
| 441 | 311 |
| 442 void LiveRange::SetSpillOperand(InstructionOperand* operand) { | |
| 443 DCHECK(HasNoSpillType()); | |
| 444 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate()); | |
| 445 set_spill_type(SpillType::kSpillOperand); | |
| 446 spill_operand_ = operand; | |
| 447 } | |
| 448 | |
| 449 | |
| 450 void LiveRange::SetSpillRange(SpillRange* spill_range) { | |
| 451 DCHECK(!HasSpillOperand()); | |
| 452 DCHECK(spill_range); | |
| 453 DCHECK(!IsChild()); | |
| 454 spill_range_ = spill_range; | |
| 455 } | |
| 456 | |
| 457 | |
| 458 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { | 312 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { |
| 459 UsePosition* use_pos = last_processed_use_; | 313 UsePosition* use_pos = last_processed_use_; |
| 460 if (use_pos == nullptr || use_pos->pos() > start) { | 314 if (use_pos == nullptr || use_pos->pos() > start) { |
| 461 use_pos = first_pos(); | 315 use_pos = first_pos(); |
| 462 } | 316 } |
| 463 while (use_pos != nullptr && use_pos->pos() < start) { | 317 while (use_pos != nullptr && use_pos->pos() < start) { |
| 464 use_pos = use_pos->next(); | 318 use_pos = use_pos->next(); |
| 465 } | 319 } |
| 466 last_processed_use_ = use_pos; | 320 last_processed_use_ = use_pos; |
| 467 return use_pos; | 321 return use_pos; |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 532 DCHECK(!HasRegisterAssigned()); | 386 DCHECK(!HasRegisterAssigned()); |
| 533 if (TopLevel()->HasSpillOperand()) { | 387 if (TopLevel()->HasSpillOperand()) { |
| 534 auto op = TopLevel()->GetSpillOperand(); | 388 auto op = TopLevel()->GetSpillOperand(); |
| 535 DCHECK(!op->IsUnallocated()); | 389 DCHECK(!op->IsUnallocated()); |
| 536 return *op; | 390 return *op; |
| 537 } | 391 } |
| 538 return TopLevel()->GetSpillRangeOperand(); | 392 return TopLevel()->GetSpillRangeOperand(); |
| 539 } | 393 } |
| 540 | 394 |
| 541 | 395 |
| 542 AllocatedOperand LiveRange::GetSpillRangeOperand() const { | |
| 543 auto spill_range = GetSpillRange(); | |
| 544 int index = spill_range->assigned_slot(); | |
| 545 switch (kind()) { | |
| 546 case GENERAL_REGISTERS: | |
| 547 return StackSlotOperand(machine_type(), index); | |
| 548 case DOUBLE_REGISTERS: | |
| 549 return DoubleStackSlotOperand(machine_type(), index); | |
| 550 } | |
| 551 UNREACHABLE(); | |
| 552 return StackSlotOperand(kMachNone, 0); | |
| 553 } | |
| 554 | |
| 555 | |
| 556 UseInterval* LiveRange::FirstSearchIntervalForPosition( | 396 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
| 557 LifetimePosition position) const { | 397 LifetimePosition position) const { |
| 558 if (current_interval_ == nullptr) return first_interval_; | 398 if (current_interval_ == nullptr) return first_interval_; |
| 559 if (current_interval_->start() > position) { | 399 if (current_interval_->start() > position) { |
| 560 current_interval_ = nullptr; | 400 current_interval_ = nullptr; |
| 561 return first_interval_; | 401 return first_interval_; |
| 562 } | 402 } |
| 563 return current_interval_; | 403 return current_interval_; |
| 564 } | 404 } |
| 565 | 405 |
| 566 | 406 |
| 567 void LiveRange::AdvanceLastProcessedMarker( | 407 void LiveRange::AdvanceLastProcessedMarker( |
| 568 UseInterval* to_start_of, LifetimePosition but_not_past) const { | 408 UseInterval* to_start_of, LifetimePosition but_not_past) const { |
| 569 if (to_start_of == nullptr) return; | 409 if (to_start_of == nullptr) return; |
| 570 if (to_start_of->start() > but_not_past) return; | 410 if (to_start_of->start() > but_not_past) return; |
| 571 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() | 411 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() |
| 572 : current_interval_->start(); | 412 : current_interval_->start(); |
| 573 if (to_start_of->start() > start) { | 413 if (to_start_of->start() > start) { |
| 574 current_interval_ = to_start_of; | 414 current_interval_ = to_start_of; |
| 575 } | 415 } |
| 576 } | 416 } |
| 577 | 417 |
| 578 | 418 |
| 579 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, | 419 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) { |
| 580 Zone* zone) { | 420 int new_id = TopLevel()->GetNextChildId(); |
| 421 LiveRange* child = new (zone) LiveRange(new_id, machine_type(), TopLevel()); | |
| 422 DetachAt(position, child, zone); | |
| 423 | |
| 424 child->parent_ = TopLevel(); | |
| 425 child->next_ = next_; | |
| 426 next_ = child; | |
| 427 | |
| 428 return child; | |
| 429 } | |
| 430 | |
| 431 | |
| 432 void LiveRange::DetachAt(LifetimePosition position, LiveRange* result, | |
| 433 Zone* zone) { | |
| 581 DCHECK(Start() < position); | 434 DCHECK(Start() < position); |
| 582 DCHECK(result->IsEmpty()); | 435 DCHECK(result->IsEmpty()); |
| 583 // Find the last interval that ends before the position. If the | 436 // Find the last interval that ends before the position. If the |
| 584 // position is contained in one of the intervals in the chain, we | 437 // position is contained in one of the intervals in the chain, we |
| 585 // split that interval and use the first part. | 438 // split that interval and use the first part. |
| 586 auto current = FirstSearchIntervalForPosition(position); | 439 auto current = FirstSearchIntervalForPosition(position); |
| 587 | 440 |
| 588 // If the split position coincides with the beginning of a use interval | 441 // If the split position coincides with the beginning of a use interval |
| 589 // we need to split use positons in a special way. | 442 // we need to split use positons in a special way. |
| 590 bool split_at_start = false; | 443 bool split_at_start = false; |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 645 } else { | 498 } else { |
| 646 first_pos_ = nullptr; | 499 first_pos_ = nullptr; |
| 647 } | 500 } |
| 648 result->first_pos_ = use_after; | 501 result->first_pos_ = use_after; |
| 649 | 502 |
| 650 // Discard cached iteration state. It might be pointing | 503 // Discard cached iteration state. It might be pointing |
| 651 // to the use that no longer belongs to this live range. | 504 // to the use that no longer belongs to this live range. |
| 652 last_processed_use_ = nullptr; | 505 last_processed_use_ = nullptr; |
| 653 current_interval_ = nullptr; | 506 current_interval_ = nullptr; |
| 654 | 507 |
| 655 // Link the new live range in the chain before any of the other | |
| 656 // ranges linked from the range before the split. | |
| 657 result->parent_ = (parent_ == nullptr) ? this : parent_; | |
| 658 result->next_ = next_; | |
| 659 next_ = result; | |
| 660 | |
| 661 // Invalidate size and weight of this range. The child range has them | 508 // Invalidate size and weight of this range. The child range has them |
| 662 // invalid at construction. | 509 // invalid at construction. |
| 663 size_ = kInvalidSize; | 510 size_ = kInvalidSize; |
| 664 weight_ = kInvalidWeight; | 511 weight_ = kInvalidWeight; |
| 665 #ifdef DEBUG | 512 #ifdef DEBUG |
| 666 Verify(); | 513 Verify(); |
| 667 result->Verify(); | 514 result->Verify(); |
| 668 #endif | 515 #endif |
| 669 } | 516 } |
| 670 | 517 |
| 671 | 518 |
| 672 void LiveRange::Splinter(LifetimePosition start, LifetimePosition end, | 519 void LiveRange::AppendAsChild(TopLevelLiveRange* other) { |
| 673 LiveRange* result, Zone* zone) { | 520 next_ = other; |
| 521 | |
| 522 other->UpdateParentForAllChildren(TopLevel()); | |
| 523 TopLevel()->UpdateSpillRangePostMerge(other); | |
| 524 } | |
| 525 | |
| 526 | |
| 527 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_parent) { | |
| 528 LiveRange* child = this; | |
| 529 for (; child != nullptr; child = child->next()) { | |
| 530 child->parent_ = new_parent; | |
| 531 } | |
| 532 } | |
| 533 | |
| 534 | |
| 535 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, | |
| 536 const InstructionOperand& spill_op) { | |
| 537 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
| 538 DCHECK(Start() <= pos->pos() && pos->pos() <= End()); | |
| 539 if (!pos->HasOperand()) continue; | |
| 540 switch (pos->type()) { | |
| 541 case UsePositionType::kRequiresSlot: | |
| 542 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot()); | |
| 543 InstructionOperand::ReplaceWith(pos->operand(), &spill_op); | |
| 544 break; | |
| 545 case UsePositionType::kRequiresRegister: | |
| 546 DCHECK(op.IsRegister() || op.IsDoubleRegister()); | |
| 547 // Fall through. | |
| 548 case UsePositionType::kAny: | |
| 549 InstructionOperand::ReplaceWith(pos->operand(), &op); | |
| 550 break; | |
| 551 } | |
| 552 } | |
| 553 } | |
| 554 | |
| 555 | |
| 556 // This implements an ordering on live ranges so that they are ordered by their | |
| 557 // start positions. This is needed for the correctness of the register | |
| 558 // allocation algorithm. If two live ranges start at the same offset then there | |
| 559 // is a tie breaker based on where the value is first used. This part of the | |
| 560 // ordering is merely a heuristic. | |
| 561 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { | |
| 562 LifetimePosition start = Start(); | |
| 563 LifetimePosition other_start = other->Start(); | |
| 564 if (start == other_start) { | |
| 565 UsePosition* pos = first_pos(); | |
| 566 if (pos == nullptr) return false; | |
| 567 UsePosition* other_pos = other->first_pos(); | |
| 568 if (other_pos == nullptr) return true; | |
| 569 return pos->pos() < other_pos->pos(); | |
| 570 } | |
| 571 return start < other_start; | |
| 572 } | |
| 573 | |
| 574 | |
| 575 void LiveRange::SetUseHints(int register_index) { | |
| 576 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
| 577 if (!pos->HasOperand()) continue; | |
| 578 switch (pos->type()) { | |
| 579 case UsePositionType::kRequiresSlot: | |
| 580 break; | |
| 581 case UsePositionType::kRequiresRegister: | |
| 582 case UsePositionType::kAny: | |
| 583 pos->set_assigned_register(register_index); | |
| 584 break; | |
| 585 } | |
| 586 } | |
| 587 } | |
| 588 | |
| 589 | |
| 590 bool LiveRange::CanCover(LifetimePosition position) const { | |
| 591 if (IsEmpty()) return false; | |
| 592 return Start() <= position && position < End(); | |
| 593 } | |
| 594 | |
| 595 | |
| 596 bool LiveRange::Covers(LifetimePosition position) const { | |
| 597 if (!CanCover(position)) return false; | |
| 598 auto start_search = FirstSearchIntervalForPosition(position); | |
| 599 for (auto interval = start_search; interval != nullptr; | |
| 600 interval = interval->next()) { | |
| 601 DCHECK(interval->next() == nullptr || | |
| 602 interval->next()->start() >= interval->start()); | |
| 603 AdvanceLastProcessedMarker(interval, position); | |
| 604 if (interval->Contains(position)) return true; | |
| 605 if (interval->start() > position) return false; | |
| 606 } | |
| 607 return false; | |
| 608 } | |
| 609 | |
| 610 | |
| 611 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const { | |
| 612 auto b = other->first_interval(); | |
| 613 if (b == nullptr) return LifetimePosition::Invalid(); | |
| 614 auto advance_last_processed_up_to = b->start(); | |
| 615 auto a = FirstSearchIntervalForPosition(b->start()); | |
| 616 while (a != nullptr && b != nullptr) { | |
| 617 if (a->start() > other->End()) break; | |
| 618 if (b->start() > End()) break; | |
| 619 auto cur_intersection = a->Intersect(b); | |
| 620 if (cur_intersection.IsValid()) { | |
| 621 return cur_intersection; | |
| 622 } | |
| 623 if (a->start() < b->start()) { | |
| 624 a = a->next(); | |
| 625 if (a == nullptr || a->start() > other->End()) break; | |
| 626 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | |
| 627 } else { | |
| 628 b = b->next(); | |
| 629 } | |
| 630 } | |
| 631 return LifetimePosition::Invalid(); | |
| 632 } | |
| 633 | |
| 634 | |
| 635 unsigned LiveRange::GetSize() { | |
| 636 if (size_ == kInvalidSize) { | |
| 637 size_ = 0; | |
| 638 for (auto interval = first_interval(); interval != nullptr; | |
| 639 interval = interval->next()) { | |
| 640 size_ += (interval->end().value() - interval->start().value()); | |
| 641 } | |
| 642 } | |
| 643 | |
| 644 return static_cast<unsigned>(size_); | |
| 645 } | |
| 646 | |
| 647 | |
| 648 struct TopLevelLiveRange::SpillAtDefinitionList : ZoneObject { | |
| 649 SpillAtDefinitionList(int gap_index, InstructionOperand* operand, | |
| 650 SpillAtDefinitionList* next) | |
| 651 : gap_index(gap_index), operand(operand), next(next) {} | |
| 652 const int gap_index; | |
| 653 InstructionOperand* const operand; | |
| 654 SpillAtDefinitionList* const next; | |
| 655 }; | |
| 656 | |
| 657 | |
| 658 TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineType machine_type) | |
| 659 : LiveRange(0, machine_type, this), | |
| 660 vreg_(vreg), | |
| 661 last_child_id_(0), | |
| 662 splintered_from_(nullptr), | |
| 663 spill_operand_(nullptr), | |
| 664 spills_at_definition_(nullptr), | |
| 665 spilled_in_deferred_blocks_(false), | |
| 666 spill_start_index_(kMaxInt) { | |
| 667 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType); | |
| 668 } | |
| 669 | |
| 670 | |
| 671 void TopLevelLiveRange::SpillAtDefinition(Zone* zone, int gap_index, | |
| 672 InstructionOperand* operand) { | |
| 673 DCHECK(HasNoSpillType()); | |
| 674 spills_at_definition_ = new (zone) | |
| 675 SpillAtDefinitionList(gap_index, operand, spills_at_definition_); | |
| 676 } | |
| 677 | |
| 678 | |
| 679 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock( | |
| 680 InstructionSequence* code, const InstructionOperand& spill_operand) { | |
| 681 if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() || | |
| 682 spill_operand.IsConstant() || spill_operand.IsImmediate()) { | |
| 683 return false; | |
| 684 } | |
| 685 | |
| 686 int count = 0; | |
| 687 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
| 688 int first_instr = child->Start().ToInstructionIndex(); | |
| 689 | |
| 690 // If the range starts at instruction end, the first instruction index is | |
| 691 // the next one. | |
| 692 if (!child->Start().IsGapPosition() && !child->Start().IsStart()) { | |
| 693 ++first_instr; | |
| 694 } | |
| 695 | |
| 696 // We only look at where the range starts. It doesn't matter where it ends: | |
| 697 // if it ends past this block, then either there is a phi there already, | |
| 698 // or ResolveControlFlow will adapt the last instruction gap of this block | |
| 699 // as if there were a phi. In either case, data flow will be correct. | |
| 700 const InstructionBlock* block = code->GetInstructionBlock(first_instr); | |
| 701 | |
| 702 // If we have slot uses in a subrange, bail out, because we need the value | |
| 703 // on the stack before that use. | |
| 704 bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr; | |
| 705 if (!block->IsDeferred()) { | |
| 706 if (child->spilled() || has_slot_use) { | |
| 707 TRACE( | |
| 708 "Live Range %d must be spilled at definition: found a " | |
| 709 "slot-requiring non-deferred child range %d.\n", | |
| 710 TopLevel()->vreg(), child->relative_id()); | |
| 711 return false; | |
| 712 } | |
| 713 } else { | |
| 714 if (child->spilled() || has_slot_use) ++count; | |
| 715 } | |
| 716 } | |
| 717 if (count == 0) return false; | |
| 718 | |
| 719 spill_start_index_ = -1; | |
| 720 spilled_in_deferred_blocks_ = true; | |
| 721 | |
| 722 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg()); | |
| 723 // If we have ranges that aren't spilled but require the operand on the stack, | |
| 724 // make sure we insert the spill. | |
| 725 for (const LiveRange* child = this; child != nullptr; child = child->next()) { | |
| 726 if (!child->spilled() && | |
| 727 child->NextSlotPosition(child->Start()) != nullptr) { | |
| 728 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); | |
| 729 // Insert spill at the end to let live range connections happen at START. | |
| 730 auto move = | |
| 731 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); | |
| 732 InstructionOperand assigned = child->GetAssignedOperand(); | |
| 733 if (TopLevel()->has_slot_use()) { | |
| 734 bool found = false; | |
| 735 for (auto move_op : *move) { | |
| 736 if (move_op->IsEliminated()) continue; | |
| 737 if (move_op->source().Equals(assigned) && | |
| 738 move_op->destination().Equals(spill_operand)) { | |
| 739 found = true; | |
| 740 break; | |
| 741 } | |
| 742 } | |
| 743 if (found) continue; | |
| 744 } | |
| 745 | |
| 746 move->AddMove(assigned, spill_operand); | |
| 747 } | |
| 748 } | |
| 749 | |
| 750 return true; | |
| 751 } | |
| 752 | |
| 753 | |
| 754 void TopLevelLiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence, | |
| 755 const InstructionOperand& op, | |
| 756 bool might_be_duplicated) { | |
| 757 DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr); | |
| 758 auto zone = sequence->zone(); | |
| 759 | |
| 760 for (auto to_spill = spills_at_definition_; to_spill != nullptr; | |
| 761 to_spill = to_spill->next) { | |
| 762 auto instr = sequence->InstructionAt(to_spill->gap_index); | |
| 763 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); | |
| 764 // Skip insertion if it's possible that the move exists already as a | |
| 765 // constraint move from a fixed output register to a slot. | |
| 766 if (might_be_duplicated) { | |
| 767 bool found = false; | |
| 768 for (auto move_op : *move) { | |
| 769 if (move_op->IsEliminated()) continue; | |
| 770 if (move_op->source().Equals(*to_spill->operand) && | |
| 771 move_op->destination().Equals(op)) { | |
| 772 found = true; | |
| 773 break; | |
| 774 } | |
| 775 } | |
| 776 if (found) continue; | |
| 777 } | |
| 778 move->AddMove(*to_spill->operand, op); | |
| 779 } | |
| 780 } | |
| 781 | |
| 782 | |
| 783 void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) { | |
| 784 DCHECK(HasNoSpillType()); | |
| 785 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate()); | |
| 786 set_spill_type(SpillType::kSpillOperand); | |
| 787 spill_operand_ = operand; | |
| 788 } | |
| 789 | |
| 790 | |
| 791 void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) { | |
| 792 DCHECK(!HasSpillOperand()); | |
| 793 DCHECK(spill_range); | |
| 794 spill_range_ = spill_range; | |
| 795 } | |
| 796 | |
| 797 | |
| 798 AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const { | |
| 799 auto spill_range = GetSpillRange(); | |
| 800 int index = spill_range->assigned_slot(); | |
| 801 switch (kind()) { | |
| 802 case GENERAL_REGISTERS: | |
| 803 return StackSlotOperand(machine_type(), index); | |
| 804 case DOUBLE_REGISTERS: | |
| 805 return DoubleStackSlotOperand(machine_type(), index); | |
| 806 } | |
| 807 UNREACHABLE(); | |
| 808 return StackSlotOperand(kMachNone, 0); | |
| 809 } | |
| 810 | |
| 811 | |
| 812 void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end, | |
| 813 TopLevelLiveRange* result, Zone* zone) { | |
| 674 DCHECK(start != Start() || end != End()); | 814 DCHECK(start != Start() || end != End()); |
| 675 DCHECK(start < end); | 815 DCHECK(start < end); |
| 676 | 816 |
| 677 result->set_spill_type(spill_type()); | 817 result->set_spill_type(spill_type()); |
| 678 | 818 |
| 679 if (start <= Start()) { | 819 if (start <= Start()) { |
| 680 DCHECK(end < End()); | 820 DCHECK(end < End()); |
| 681 SplitAt(end, result, zone); | 821 DetachAt(end, result, zone); |
| 682 next_ = nullptr; | 822 next_ = nullptr; |
| 683 } else if (end >= End()) { | 823 } else if (end >= End()) { |
| 684 DCHECK(start > Start()); | 824 DCHECK(start > Start()); |
| 685 SplitAt(start, result, zone); | 825 DetachAt(start, result, zone); |
| 686 next_ = nullptr; | 826 next_ = nullptr; |
| 687 } else { | 827 } else { |
| 688 DCHECK(start < End() && Start() < end); | 828 DCHECK(start < End() && Start() < end); |
| 689 | 829 |
| 690 const int kInvalidId = std::numeric_limits<int>::max(); | 830 const int kInvalidId = std::numeric_limits<int>::max(); |
| 691 | 831 |
| 692 SplitAt(start, result, zone); | 832 DetachAt(start, result, zone); |
| 693 | 833 |
| 694 LiveRange end_part(kInvalidId, this->machine_type()); | 834 LiveRange end_part(kInvalidId, this->machine_type(), nullptr); |
| 695 result->SplitAt(end, &end_part, zone); | 835 result->DetachAt(end, &end_part, zone); |
| 696 | 836 |
| 697 next_ = end_part.next_; | 837 next_ = end_part.next_; |
| 698 last_interval_->set_next(end_part.first_interval_); | 838 last_interval_->set_next(end_part.first_interval_); |
| 699 last_interval_ = end_part.last_interval_; | 839 last_interval_ = end_part.last_interval_; |
| 700 | 840 |
| 701 | 841 |
| 702 if (first_pos_ == nullptr) { | 842 if (first_pos_ == nullptr) { |
| 703 first_pos_ = end_part.first_pos_; | 843 first_pos_ = end_part.first_pos_; |
| 704 } else { | 844 } else { |
| 705 UsePosition* pos = first_pos_; | 845 UsePosition* pos = first_pos_; |
| 706 for (; pos->next() != nullptr; pos = pos->next()) { | 846 for (; pos->next() != nullptr; pos = pos->next()) { |
| 707 } | 847 } |
| 708 pos->set_next(end_part.first_pos_); | 848 pos->set_next(end_part.first_pos_); |
| 709 } | 849 } |
| 710 } | 850 } |
| 711 result->next_ = nullptr; | 851 result->next_ = nullptr; |
| 712 result->parent_ = nullptr; | 852 result->parent_ = result; |
| 713 | 853 |
| 714 result->SetSplinteredFrom(this); | 854 result->SetSplinteredFrom(this); |
| 715 } | 855 } |
| 716 | 856 |
| 717 | 857 |
| 718 void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) { | 858 void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) { |
| 719 // The splinter parent is always the original "Top". | 859 // The splinter parent is always the original "Top". |
| 720 DCHECK(splinter_parent->Start() < Start()); | 860 DCHECK(splinter_parent->Start() < Start()); |
| 721 DCHECK(!splinter_parent->IsChild()); | |
| 722 | 861 |
| 723 splintered_from_ = splinter_parent; | 862 splintered_from_ = splinter_parent; |
| 724 if (!HasSpillOperand()) { | 863 if (!HasSpillOperand()) { |
| 725 SetSpillRange(splinter_parent->spill_range_); | 864 SetSpillRange(splinter_parent->spill_range_); |
| 726 } | 865 } |
| 727 } | 866 } |
| 728 | 867 |
| 729 | 868 |
| 730 void LiveRange::AppendChild(LiveRange* other) { | 869 void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) { |
| 731 DCHECK(!other->IsChild()); | |
| 732 next_ = other; | |
| 733 | |
| 734 other->UpdateParentForAllChildren(TopLevel()); | |
| 735 TopLevel()->UpdateSpillRangePostMerge(other); | |
| 736 } | |
| 737 | |
| 738 | |
| 739 void LiveRange::UpdateSpillRangePostMerge(LiveRange* merged) { | |
| 740 DCHECK(!IsChild()); | |
| 741 DCHECK(merged->TopLevel() == this); | 870 DCHECK(merged->TopLevel() == this); |
| 742 | 871 |
| 743 if (HasNoSpillType() && merged->HasSpillRange()) { | 872 if (HasNoSpillType() && merged->HasSpillRange()) { |
| 744 set_spill_type(merged->spill_type()); | 873 set_spill_type(merged->spill_type()); |
| 745 DCHECK(GetSpillRange()->live_ranges().size() > 0); | 874 DCHECK(GetSpillRange()->live_ranges().size() > 0); |
| 746 merged->spill_range_ = nullptr; | 875 merged->spill_range_ = nullptr; |
| 747 merged->bits_ = SpillTypeField::update(merged->bits_, | 876 merged->bits_ = |
| 748 LiveRange::SpillType::kNoSpillType); | 877 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType); |
| 749 } | 878 } |
| 750 } | 879 } |
| 751 | 880 |
| 752 | 881 |
| 753 void LiveRange::UpdateParentForAllChildren(LiveRange* new_parent) { | 882 LiveRange* TopLevelLiveRange::GetLastChild() { |
| 754 LiveRange* child = this; | |
| 755 for (; child != nullptr; child = child->next()) { | |
| 756 child->parent_ = new_parent; | |
| 757 } | |
| 758 } | |
| 759 | |
| 760 | |
| 761 LiveRange* LiveRange::GetLastChild() { | |
| 762 LiveRange* ret = this; | 883 LiveRange* ret = this; |
| 763 for (; ret->next() != nullptr; ret = ret->next()) { | 884 for (; ret->next() != nullptr; ret = ret->next()) { |
| 764 } | 885 } |
| 765 return ret; | 886 return ret; |
| 766 } | 887 } |
| 767 | 888 |
| 768 | 889 |
| 769 void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) { | 890 void TopLevelLiveRange::Merge(TopLevelLiveRange* other, |
| 770 DCHECK(!IsChild()); | 891 RegisterAllocationData* data) { |
| 771 DCHECK(!other->IsChild()); | |
| 772 DCHECK(Start() < other->Start()); | 892 DCHECK(Start() < other->Start()); |
| 773 | 893 |
| 894 data->live_ranges()[other->vreg()] = nullptr; | |
| 895 | |
| 774 LiveRange* last_other = other->GetLastChild(); | 896 LiveRange* last_other = other->GetLastChild(); |
| 775 LiveRange* last_me = GetLastChild(); | 897 LiveRange* last_me = GetLastChild(); |
| 776 | 898 |
| 777 // Simple case: we just append at the end. | 899 // Simple case: we just append at the end. |
| 778 if (last_me->End() <= other->Start()) return last_me->AppendChild(other); | 900 if (last_me->End() <= other->Start()) return last_me->AppendAsChild(other); |
| 779 | 901 |
| 780 DCHECK(last_me->End() > last_other->End()); | 902 DCHECK(last_me->End() > last_other->End()); |
| 781 | 903 |
| 782 // In the more general case, we need to find the ranges between which to | 904 // In the more general case, we need to find the ranges between which to |
| 783 // insert. | 905 // insert. |
| 784 LiveRange* insertion_point = this; | 906 LiveRange* insertion_point = this; |
| 785 for (; insertion_point->next() != nullptr && | 907 for (; insertion_point->next() != nullptr && |
| 786 insertion_point->next()->Start() <= other->Start(); | 908 insertion_point->next()->Start() <= other->Start(); |
| 787 insertion_point = insertion_point->next()) { | 909 insertion_point = insertion_point->next()) { |
| 788 } | 910 } |
| 789 | 911 |
| 790 // When we splintered the original range, we reconstituted the original range | 912 // When we splintered the original range, we reconstituted the original range |
| 791 // into one range without children, but with discontinuities. To merge the | 913 // into one range without children, but with discontinuities. To merge the |
| 792 // splinter back in, we need to split the range - or a child obtained after | 914 // splinter back in, we need to split the range - or a child obtained after |
| 793 // register allocation splitting. | 915 // register allocation splitting. |
| 794 LiveRange* after = insertion_point->next(); | 916 LiveRange* after = insertion_point->next(); |
| 795 if (insertion_point->End() > other->Start()) { | 917 if (insertion_point->End() > other->Start()) { |
| 796 LiveRange* new_after = data->NewChildRangeFor(insertion_point); | 918 LiveRange* new_after = |
| 797 insertion_point->SplitAt(other->Start(), new_after, | 919 insertion_point->SplitAt(other->Start(), data->allocation_zone()); |
| 798 data->allocation_zone()); | |
| 799 new_after->set_spilled(insertion_point->spilled()); | 920 new_after->set_spilled(insertion_point->spilled()); |
| 800 if (!new_after->spilled()) | 921 if (!new_after->spilled()) |
| 801 new_after->set_assigned_register(insertion_point->assigned_register()); | 922 new_after->set_assigned_register(insertion_point->assigned_register()); |
| 802 after = new_after; | 923 after = new_after; |
| 803 } | 924 } |
| 804 | 925 |
| 805 last_other->next_ = after; | 926 last_other->next_ = after; |
| 806 insertion_point->next_ = other; | 927 insertion_point->next_ = other; |
| 807 other->UpdateParentForAllChildren(TopLevel()); | 928 other->UpdateParentForAllChildren(TopLevel()); |
| 808 TopLevel()->UpdateSpillRangePostMerge(other); | 929 TopLevel()->UpdateSpillRangePostMerge(other); |
| 809 } | 930 } |
| 810 | 931 |
| 811 | 932 |
| 812 // This implements an ordering on live ranges so that they are ordered by their | 933 void TopLevelLiveRange::ShortenTo(LifetimePosition start) { |
| 813 // start positions. This is needed for the correctness of the register | 934 TRACE("Shorten live range %d to [%d\n", vreg(), start.value()); |
| 814 // allocation algorithm. If two live ranges start at the same offset then there | |
| 815 // is a tie breaker based on where the value is first used. This part of the | |
| 816 // ordering is merely a heuristic. | |
| 817 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { | |
| 818 LifetimePosition start = Start(); | |
| 819 LifetimePosition other_start = other->Start(); | |
| 820 if (start == other_start) { | |
| 821 UsePosition* pos = first_pos(); | |
| 822 if (pos == nullptr) return false; | |
| 823 UsePosition* other_pos = other->first_pos(); | |
| 824 if (other_pos == nullptr) return true; | |
| 825 return pos->pos() < other_pos->pos(); | |
| 826 } | |
| 827 return start < other_start; | |
| 828 } | |
| 829 | |
| 830 | |
| 831 void LiveRange::ShortenTo(LifetimePosition start) { | |
| 832 TRACE("Shorten live range %d to [%d\n", id_, start.value()); | |
| 833 DCHECK(first_interval_ != nullptr); | 935 DCHECK(first_interval_ != nullptr); |
| 834 DCHECK(first_interval_->start() <= start); | 936 DCHECK(first_interval_->start() <= start); |
| 835 DCHECK(start < first_interval_->end()); | 937 DCHECK(start < first_interval_->end()); |
| 836 first_interval_->set_start(start); | 938 first_interval_->set_start(start); |
| 837 } | 939 } |
| 838 | 940 |
| 839 | 941 |
| 840 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, | 942 void TopLevelLiveRange::EnsureInterval(LifetimePosition start, |
| 841 Zone* zone) { | 943 LifetimePosition end, Zone* zone) { |
| 842 TRACE("Ensure live range %d in interval [%d %d[\n", id_, start.value(), | 944 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(), |
| 843 end.value()); | 945 end.value()); |
| 844 auto new_end = end; | 946 auto new_end = end; |
| 845 while (first_interval_ != nullptr && first_interval_->start() <= end) { | 947 while (first_interval_ != nullptr && first_interval_->start() <= end) { |
| 846 if (first_interval_->end() > end) { | 948 if (first_interval_->end() > end) { |
| 847 new_end = first_interval_->end(); | 949 new_end = first_interval_->end(); |
| 848 } | 950 } |
| 849 first_interval_ = first_interval_->next(); | 951 first_interval_ = first_interval_->next(); |
| 850 } | 952 } |
| 851 | 953 |
| 852 auto new_interval = new (zone) UseInterval(start, new_end); | 954 auto new_interval = new (zone) UseInterval(start, new_end); |
| 853 new_interval->set_next(first_interval_); | 955 new_interval->set_next(first_interval_); |
| 854 first_interval_ = new_interval; | 956 first_interval_ = new_interval; |
| 855 if (new_interval->next() == nullptr) { | 957 if (new_interval->next() == nullptr) { |
| 856 last_interval_ = new_interval; | 958 last_interval_ = new_interval; |
| 857 } | 959 } |
| 858 } | 960 } |
| 859 | 961 |
| 860 | 962 |
| 861 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, | 963 void TopLevelLiveRange::AddUseInterval(LifetimePosition start, |
| 862 Zone* zone) { | 964 LifetimePosition end, Zone* zone) { |
| 863 TRACE("Add to live range %d interval [%d %d[\n", id_, start.value(), | 965 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(), |
| 864 end.value()); | 966 end.value()); |
| 865 if (first_interval_ == nullptr) { | 967 if (first_interval_ == nullptr) { |
| 866 auto interval = new (zone) UseInterval(start, end); | 968 auto interval = new (zone) UseInterval(start, end); |
| 867 first_interval_ = interval; | 969 first_interval_ = interval; |
| 868 last_interval_ = interval; | 970 last_interval_ = interval; |
| 869 } else { | 971 } else { |
| 870 if (end == first_interval_->start()) { | 972 if (end == first_interval_->start()) { |
| 871 first_interval_->set_start(start); | 973 first_interval_->set_start(start); |
| 872 } else if (end < first_interval_->start()) { | 974 } else if (end < first_interval_->start()) { |
| 873 auto interval = new (zone) UseInterval(start, end); | 975 auto interval = new (zone) UseInterval(start, end); |
| 874 interval->set_next(first_interval_); | 976 interval->set_next(first_interval_); |
| 875 first_interval_ = interval; | 977 first_interval_ = interval; |
| 876 } else { | 978 } else { |
| 877 // Order of instruction's processing (see ProcessInstructions) guarantees | 979 // Order of instruction's processing (see ProcessInstructions) guarantees |
| 878 // that each new use interval either precedes or intersects with | 980 // that each new use interval either precedes or intersects with |
| 879 // last added interval. | 981 // last added interval. |
| 880 DCHECK(start < first_interval_->end()); | 982 DCHECK(start < first_interval_->end()); |
| 881 first_interval_->set_start(Min(start, first_interval_->start())); | 983 first_interval_->set_start(Min(start, first_interval_->start())); |
| 882 first_interval_->set_end(Max(end, first_interval_->end())); | 984 first_interval_->set_end(Max(end, first_interval_->end())); |
| 883 } | 985 } |
| 884 } | 986 } |
| 885 } | 987 } |
| 886 | 988 |
| 887 | 989 |
| 888 void LiveRange::AddUsePosition(UsePosition* use_pos) { | 990 void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) { |
| 889 auto pos = use_pos->pos(); | 991 auto pos = use_pos->pos(); |
| 890 TRACE("Add to live range %d use position %d\n", id_, pos.value()); | 992 TRACE("Add to live range %d use position %d\n", vreg(), pos.value()); |
| 891 UsePosition* prev_hint = nullptr; | 993 UsePosition* prev_hint = nullptr; |
| 892 UsePosition* prev = nullptr; | 994 UsePosition* prev = nullptr; |
| 893 auto current = first_pos_; | 995 auto current = first_pos_; |
| 894 while (current != nullptr && current->pos() < pos) { | 996 while (current != nullptr && current->pos() < pos) { |
| 895 prev_hint = current->HasHint() ? current : prev_hint; | 997 prev_hint = current->HasHint() ? current : prev_hint; |
| 896 prev = current; | 998 prev = current; |
| 897 current = current->next(); | 999 current = current->next(); |
| 898 } | 1000 } |
| 899 | 1001 |
| 900 if (prev == nullptr) { | 1002 if (prev == nullptr) { |
| 901 use_pos->set_next(first_pos_); | 1003 use_pos->set_next(first_pos_); |
| 902 first_pos_ = use_pos; | 1004 first_pos_ = use_pos; |
| 903 } else { | 1005 } else { |
| 904 use_pos->set_next(prev->next()); | 1006 use_pos->set_next(prev->next()); |
| 905 prev->set_next(use_pos); | 1007 prev->set_next(use_pos); |
| 906 } | 1008 } |
| 907 | 1009 |
| 908 if (prev_hint == nullptr && use_pos->HasHint()) { | 1010 if (prev_hint == nullptr && use_pos->HasHint()) { |
| 909 current_hint_position_ = use_pos; | 1011 current_hint_position_ = use_pos; |
| 910 } | 1012 } |
| 911 } | 1013 } |
| 912 | 1014 |
| 913 | 1015 |
| 914 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, | |
| 915 const InstructionOperand& spill_op) { | |
| 916 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
| 917 DCHECK(Start() <= pos->pos() && pos->pos() <= End()); | |
| 918 if (!pos->HasOperand()) continue; | |
| 919 switch (pos->type()) { | |
| 920 case UsePositionType::kRequiresSlot: | |
| 921 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot()); | |
| 922 InstructionOperand::ReplaceWith(pos->operand(), &spill_op); | |
| 923 break; | |
| 924 case UsePositionType::kRequiresRegister: | |
| 925 DCHECK(op.IsRegister() || op.IsDoubleRegister()); | |
| 926 // Fall through. | |
| 927 case UsePositionType::kAny: | |
| 928 InstructionOperand::ReplaceWith(pos->operand(), &op); | |
| 929 break; | |
| 930 } | |
| 931 } | |
| 932 } | |
| 933 | |
| 934 | |
| 935 void LiveRange::SetUseHints(int register_index) { | |
| 936 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { | |
| 937 if (!pos->HasOperand()) continue; | |
| 938 switch (pos->type()) { | |
| 939 case UsePositionType::kRequiresSlot: | |
| 940 break; | |
| 941 case UsePositionType::kRequiresRegister: | |
| 942 case UsePositionType::kAny: | |
| 943 pos->set_assigned_register(register_index); | |
| 944 break; | |
| 945 } | |
| 946 } | |
| 947 } | |
| 948 | |
| 949 | |
| 950 bool LiveRange::CanCover(LifetimePosition position) const { | |
| 951 if (IsEmpty()) return false; | |
| 952 return Start() <= position && position < End(); | |
| 953 } | |
| 954 | |
| 955 | |
| 956 bool LiveRange::Covers(LifetimePosition position) const { | |
| 957 if (!CanCover(position)) return false; | |
| 958 auto start_search = FirstSearchIntervalForPosition(position); | |
| 959 for (auto interval = start_search; interval != nullptr; | |
| 960 interval = interval->next()) { | |
| 961 DCHECK(interval->next() == nullptr || | |
| 962 interval->next()->start() >= interval->start()); | |
| 963 AdvanceLastProcessedMarker(interval, position); | |
| 964 if (interval->Contains(position)) return true; | |
| 965 if (interval->start() > position) return false; | |
| 966 } | |
| 967 return false; | |
| 968 } | |
| 969 | |
| 970 | |
| 971 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const { | |
| 972 auto b = other->first_interval(); | |
| 973 if (b == nullptr) return LifetimePosition::Invalid(); | |
| 974 auto advance_last_processed_up_to = b->start(); | |
| 975 auto a = FirstSearchIntervalForPosition(b->start()); | |
| 976 while (a != nullptr && b != nullptr) { | |
| 977 if (a->start() > other->End()) break; | |
| 978 if (b->start() > End()) break; | |
| 979 auto cur_intersection = a->Intersect(b); | |
| 980 if (cur_intersection.IsValid()) { | |
| 981 return cur_intersection; | |
| 982 } | |
| 983 if (a->start() < b->start()) { | |
| 984 a = a->next(); | |
| 985 if (a == nullptr || a->start() > other->End()) break; | |
| 986 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | |
| 987 } else { | |
| 988 b = b->next(); | |
| 989 } | |
| 990 } | |
| 991 return LifetimePosition::Invalid(); | |
| 992 } | |
| 993 | |
| 994 | |
| 995 unsigned LiveRange::GetSize() { | |
| 996 if (size_ == kInvalidSize) { | |
| 997 size_ = 0; | |
| 998 for (auto interval = first_interval(); interval != nullptr; | |
| 999 interval = interval->next()) { | |
| 1000 size_ += (interval->end().value() - interval->start().value()); | |
| 1001 } | |
| 1002 } | |
| 1003 | |
| 1004 return static_cast<unsigned>(size_); | |
| 1005 } | |
| 1006 | |
| 1007 | |
| 1008 static bool AreUseIntervalsIntersecting(UseInterval* interval1, | 1016 static bool AreUseIntervalsIntersecting(UseInterval* interval1, |
| 1009 UseInterval* interval2) { | 1017 UseInterval* interval2) { |
| 1010 while (interval1 != nullptr && interval2 != nullptr) { | 1018 while (interval1 != nullptr && interval2 != nullptr) { |
| 1011 if (interval1->start() < interval2->start()) { | 1019 if (interval1->start() < interval2->start()) { |
| 1012 if (interval1->end() > interval2->start()) { | 1020 if (interval1->end() > interval2->start()) { |
| 1013 return true; | 1021 return true; |
| 1014 } | 1022 } |
| 1015 interval1 = interval1->next(); | 1023 interval1 = interval1->next(); |
| 1016 } else { | 1024 } else { |
| 1017 if (interval2->end() > interval1->start()) { | 1025 if (interval2->end() > interval1->start()) { |
| 1018 return true; | 1026 return true; |
| 1019 } | 1027 } |
| 1020 interval2 = interval2->next(); | 1028 interval2 = interval2->next(); |
| 1021 } | 1029 } |
| 1022 } | 1030 } |
| 1023 return false; | 1031 return false; |
| 1024 } | 1032 } |
| 1025 | 1033 |
| 1026 | 1034 |
| 1027 std::ostream& operator<<(std::ostream& os, | 1035 std::ostream& operator<<(std::ostream& os, |
| 1028 const PrintableLiveRange& printable_range) { | 1036 const PrintableLiveRange& printable_range) { |
| 1029 const LiveRange* range = printable_range.range_; | 1037 const LiveRange* range = printable_range.range_; |
| 1030 os << "Range: " << range->id() << " "; | 1038 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id() |
| 1031 if (range->is_phi()) os << "phi "; | 1039 << " "; |
| 1032 if (range->is_non_loop_phi()) os << "nlphi "; | 1040 if (range->TopLevel()->is_phi()) os << "phi "; |
| 1041 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi "; | |
| 1033 | 1042 |
| 1034 os << "{" << std::endl; | 1043 os << "{" << std::endl; |
| 1035 auto interval = range->first_interval(); | 1044 auto interval = range->first_interval(); |
| 1036 auto use_pos = range->first_pos(); | 1045 auto use_pos = range->first_pos(); |
| 1037 PrintableInstructionOperand pio; | 1046 PrintableInstructionOperand pio; |
| 1038 pio.register_configuration_ = printable_range.register_configuration_; | 1047 pio.register_configuration_ = printable_range.register_configuration_; |
| 1039 while (use_pos != nullptr) { | 1048 while (use_pos != nullptr) { |
| 1040 pio.op_ = *use_pos->operand(); | 1049 pio.op_ = *use_pos->operand(); |
| 1041 os << pio << use_pos->pos() << " "; | 1050 os << pio << use_pos->pos() << " "; |
| 1042 use_pos = use_pos->next(); | 1051 use_pos = use_pos->next(); |
| 1043 } | 1052 } |
| 1044 os << std::endl; | 1053 os << std::endl; |
| 1045 | 1054 |
| 1046 while (interval != nullptr) { | 1055 while (interval != nullptr) { |
| 1047 os << '[' << interval->start() << ", " << interval->end() << ')' | 1056 os << '[' << interval->start() << ", " << interval->end() << ')' |
| 1048 << std::endl; | 1057 << std::endl; |
| 1049 interval = interval->next(); | 1058 interval = interval->next(); |
| 1050 } | 1059 } |
| 1051 os << "}"; | 1060 os << "}"; |
| 1052 return os; | 1061 return os; |
| 1053 } | 1062 } |
| 1054 | 1063 |
| 1055 | 1064 |
| 1056 SpillRange::SpillRange(LiveRange* parent, Zone* zone) | 1065 SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone) |
| 1057 : live_ranges_(zone), | 1066 : live_ranges_(zone), |
| 1058 assigned_slot_(kUnassignedSlot), | 1067 assigned_slot_(kUnassignedSlot), |
| 1059 byte_width_(GetByteWidth(parent->machine_type())), | 1068 byte_width_(GetByteWidth(parent->machine_type())), |
| 1060 kind_(parent->kind()) { | 1069 kind_(parent->kind()) { |
| 1061 // Spill ranges are created for top level, non-splintered ranges. This is so | 1070 // Spill ranges are created for top level, non-splintered ranges. This is so |
| 1062 // that, when merging decisions are made, we consider the full extent of the | 1071 // that, when merging decisions are made, we consider the full extent of the |
| 1063 // virtual register, and avoid clobbering it. | 1072 // virtual register, and avoid clobbering it. |
| 1064 DCHECK(!parent->IsChild()); | |
| 1065 DCHECK(!parent->IsSplinter()); | 1073 DCHECK(!parent->IsSplinter()); |
| 1066 UseInterval* result = nullptr; | 1074 UseInterval* result = nullptr; |
| 1067 UseInterval* node = nullptr; | 1075 UseInterval* node = nullptr; |
| 1068 // Copy the intervals for all ranges. | 1076 // Copy the intervals for all ranges. |
| 1069 for (auto range = parent; range != nullptr; range = range->next()) { | 1077 for (LiveRange* range = parent; range != nullptr; range = range->next()) { |
| 1070 auto src = range->first_interval(); | 1078 auto src = range->first_interval(); |
| 1071 while (src != nullptr) { | 1079 while (src != nullptr) { |
| 1072 auto new_node = new (zone) UseInterval(src->start(), src->end()); | 1080 auto new_node = new (zone) UseInterval(src->start(), src->end()); |
| 1073 if (result == nullptr) { | 1081 if (result == nullptr) { |
| 1074 result = new_node; | 1082 result = new_node; |
| 1075 } else { | 1083 } else { |
| 1076 node->set_next(new_node); | 1084 node->set_next(new_node); |
| 1077 } | 1085 } |
| 1078 node = new_node; | 1086 node = new_node; |
| 1079 src = src->next(); | 1087 src = src->next(); |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1220 return moves->AddMove(from, to); | 1228 return moves->AddMove(from, to); |
| 1221 } | 1229 } |
| 1222 | 1230 |
| 1223 | 1231 |
| 1224 MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) { | 1232 MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) { |
| 1225 DCHECK_LT(virtual_register, code()->VirtualRegisterCount()); | 1233 DCHECK_LT(virtual_register, code()->VirtualRegisterCount()); |
| 1226 return code()->GetRepresentation(virtual_register); | 1234 return code()->GetRepresentation(virtual_register); |
| 1227 } | 1235 } |
| 1228 | 1236 |
| 1229 | 1237 |
| 1230 LiveRange* RegisterAllocationData::LiveRangeFor(int index) { | 1238 TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) { |
| 1231 if (index >= static_cast<int>(live_ranges().size())) { | 1239 if (index >= static_cast<int>(live_ranges().size())) { |
| 1232 live_ranges().resize(index + 1, nullptr); | 1240 live_ranges().resize(index + 1, nullptr); |
| 1233 } | 1241 } |
| 1234 auto result = live_ranges()[index]; | 1242 auto result = live_ranges()[index]; |
| 1235 if (result == nullptr) { | 1243 if (result == nullptr) { |
| 1236 result = NewLiveRange(index, MachineTypeFor(index)); | 1244 result = NewLiveRange(index, MachineTypeFor(index)); |
| 1237 live_ranges()[index] = result; | 1245 live_ranges()[index] = result; |
| 1238 } | 1246 } |
| 1239 return result; | 1247 return result; |
| 1240 } | 1248 } |
| 1241 | 1249 |
| 1242 | 1250 |
| 1243 LiveRange* RegisterAllocationData::NewLiveRange(int index, | 1251 TopLevelLiveRange* RegisterAllocationData::NewLiveRange( |
| 1244 MachineType machine_type) { | 1252 int index, MachineType machine_type) { |
| 1245 return new (allocation_zone()) LiveRange(index, machine_type); | 1253 return new (allocation_zone()) TopLevelLiveRange(index, machine_type); |
| 1246 } | 1254 } |
| 1247 | 1255 |
| 1248 | 1256 |
| 1249 LiveRange* RegisterAllocationData::NextLiveRange(MachineType machine_type) { | 1257 int RegisterAllocationData::GetNextLiveRangeId() { |
| 1250 int vreg = virtual_register_count_++; | 1258 int vreg = virtual_register_count_++; |
| 1251 if (vreg >= static_cast<int>(live_ranges().size())) { | 1259 if (vreg >= static_cast<int>(live_ranges().size())) { |
| 1252 live_ranges().resize(vreg + 1, nullptr); | 1260 live_ranges().resize(vreg + 1, nullptr); |
| 1253 } | 1261 } |
| 1254 LiveRange* ret = NewLiveRange(vreg, machine_type); | 1262 return vreg; |
| 1263 } | |
| 1264 | |
| 1265 | |
| 1266 TopLevelLiveRange* RegisterAllocationData::NextLiveRange( | |
| 1267 MachineType machine_type) { | |
| 1268 int vreg = GetNextLiveRangeId(); | |
| 1269 TopLevelLiveRange* ret = NewLiveRange(vreg, machine_type); | |
| 1255 return ret; | 1270 return ret; |
| 1256 } | 1271 } |
| 1257 | 1272 |
| 1258 | 1273 |
| 1259 LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) { | |
| 1260 auto child = NextLiveRange(range->machine_type()); | |
| 1261 DCHECK_NULL(live_ranges()[child->id()]); | |
| 1262 live_ranges()[child->id()] = child; | |
| 1263 return child; | |
| 1264 } | |
| 1265 | |
| 1266 | |
| 1267 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( | 1274 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( |
| 1268 const InstructionBlock* block, PhiInstruction* phi) { | 1275 const InstructionBlock* block, PhiInstruction* phi) { |
| 1269 auto map_value = new (allocation_zone()) | 1276 auto map_value = new (allocation_zone()) |
| 1270 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); | 1277 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); |
| 1271 auto res = | 1278 auto res = |
| 1272 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); | 1279 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); |
| 1273 DCHECK(res.second); | 1280 DCHECK(res.second); |
| 1274 USE(res); | 1281 USE(res); |
| 1275 return map_value; | 1282 return map_value; |
| 1276 } | 1283 } |
| 1277 | 1284 |
| 1278 | 1285 |
| 1279 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( | 1286 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( |
| 1280 int virtual_register) { | 1287 int virtual_register) { |
| 1281 auto it = phi_map_.find(virtual_register); | 1288 auto it = phi_map_.find(virtual_register); |
| 1282 DCHECK(it != phi_map_.end()); | 1289 DCHECK(it != phi_map_.end()); |
| 1283 return it->second; | 1290 return it->second; |
| 1284 } | 1291 } |
| 1285 | 1292 |
| 1286 | 1293 |
| 1294 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( | |
| 1295 TopLevelLiveRange* top_range) { | |
| 1296 return GetPhiMapValueFor(top_range->vreg()); | |
| 1297 } | |
| 1298 | |
| 1299 | |
| 1287 bool RegisterAllocationData::ExistsUseWithoutDefinition() { | 1300 bool RegisterAllocationData::ExistsUseWithoutDefinition() { |
| 1288 bool found = false; | 1301 bool found = false; |
| 1289 BitVector::Iterator iterator(live_in_sets()[0]); | 1302 BitVector::Iterator iterator(live_in_sets()[0]); |
| 1290 while (!iterator.Done()) { | 1303 while (!iterator.Done()) { |
| 1291 found = true; | 1304 found = true; |
| 1292 int operand_index = iterator.Current(); | 1305 int operand_index = iterator.Current(); |
| 1293 PrintF("Register allocator error: live v%d reached first block.\n", | 1306 PrintF("Register allocator error: live v%d reached first block.\n", |
| 1294 operand_index); | 1307 operand_index); |
| 1295 LiveRange* range = LiveRangeFor(operand_index); | 1308 LiveRange* range = GetOrCreateLiveRangeFor(operand_index); |
| 1296 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value()); | 1309 PrintF(" (first use is at %d)\n", range->first_pos()->pos().value()); |
| 1297 if (debug_name() == nullptr) { | 1310 if (debug_name() == nullptr) { |
| 1298 PrintF("\n"); | 1311 PrintF("\n"); |
| 1299 } else { | 1312 } else { |
| 1300 PrintF(" (function: %s)\n", debug_name()); | 1313 PrintF(" (function: %s)\n", debug_name()); |
| 1301 } | 1314 } |
| 1302 iterator.Advance(); | 1315 iterator.Advance(); |
| 1303 } | 1316 } |
| 1304 return found; | 1317 return found; |
| 1305 } | 1318 } |
| 1306 | 1319 |
| 1307 | 1320 |
| 1308 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( | 1321 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( |
| 1309 LiveRange* range) { | 1322 TopLevelLiveRange* range) { |
| 1310 DCHECK(!range->IsChild()); | |
| 1311 DCHECK(!range->HasSpillOperand()); | 1323 DCHECK(!range->HasSpillOperand()); |
| 1312 | 1324 |
| 1313 SpillRange* spill_range = range->GetAllocatedSpillRange(); | 1325 SpillRange* spill_range = range->GetAllocatedSpillRange(); |
| 1314 if (spill_range == nullptr) { | 1326 if (spill_range == nullptr) { |
| 1315 DCHECK(!range->IsSplinter()); | 1327 DCHECK(!range->IsSplinter()); |
| 1316 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone()); | 1328 spill_range = new (allocation_zone()) SpillRange(range, allocation_zone()); |
| 1317 } | 1329 } |
| 1318 range->set_spill_type(LiveRange::SpillType::kSpillRange); | 1330 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange); |
| 1319 | 1331 |
| 1320 spill_ranges().insert(spill_range); | 1332 spill_ranges().insert(spill_range); |
| 1321 return spill_range; | 1333 return spill_range; |
| 1322 } | 1334 } |
| 1323 | 1335 |
| 1324 | 1336 |
| 1325 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( | 1337 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( |
| 1326 LiveRange* range) { | 1338 TopLevelLiveRange* range) { |
| 1327 DCHECK(!range->HasSpillOperand()); | 1339 DCHECK(!range->HasSpillOperand()); |
| 1328 DCHECK(!range->IsChild()); | |
| 1329 DCHECK(!range->IsSplinter()); | 1340 DCHECK(!range->IsSplinter()); |
| 1330 auto spill_range = | 1341 auto spill_range = |
| 1331 new (allocation_zone()) SpillRange(range, allocation_zone()); | 1342 new (allocation_zone()) SpillRange(range, allocation_zone()); |
| 1332 return spill_range; | 1343 return spill_range; |
| 1333 } | 1344 } |
| 1334 | 1345 |
| 1335 | 1346 |
| 1336 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { | 1347 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { |
| 1337 if (kind == DOUBLE_REGISTERS) { | 1348 if (kind == DOUBLE_REGISTERS) { |
| 1338 assigned_double_registers_->Add(index); | 1349 assigned_double_registers_->Add(index); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1397 wrapper.op_ = move->destination(); | 1408 wrapper.op_ = move->destination(); |
| 1398 os << wrapper << " = "; | 1409 os << wrapper << " = "; |
| 1399 wrapper.op_ = move->source(); | 1410 wrapper.op_ = move->source(); |
| 1400 os << wrapper << std::endl; | 1411 os << wrapper << std::endl; |
| 1401 } | 1412 } |
| 1402 | 1413 |
| 1403 | 1414 |
| 1404 void RegisterAllocationData::Print(const SpillRange* spill_range) { | 1415 void RegisterAllocationData::Print(const SpillRange* spill_range) { |
| 1405 OFStream os(stdout); | 1416 OFStream os(stdout); |
| 1406 os << "{" << std::endl; | 1417 os << "{" << std::endl; |
| 1407 for (LiveRange* range : spill_range->live_ranges()) { | 1418 for (TopLevelLiveRange* range : spill_range->live_ranges()) { |
| 1408 os << range->id() << " "; | 1419 os << range->vreg() << " "; |
| 1409 } | 1420 } |
| 1410 os << std::endl; | 1421 os << std::endl; |
| 1411 | 1422 |
| 1412 for (UseInterval* interval = spill_range->interval(); interval != nullptr; | 1423 for (UseInterval* interval = spill_range->interval(); interval != nullptr; |
| 1413 interval = interval->next()) { | 1424 interval = interval->next()) { |
| 1414 os << '[' << interval->start() << ", " << interval->end() << ')' | 1425 os << '[' << interval->start() << ", " << interval->end() << ')' |
| 1415 << std::endl; | 1426 << std::endl; |
| 1416 } | 1427 } |
| 1417 os << "}" << std::endl; | 1428 os << "}" << std::endl; |
| 1418 } | 1429 } |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 1444 } else if (operand->HasFixedDoubleRegisterPolicy()) { | 1455 } else if (operand->HasFixedDoubleRegisterPolicy()) { |
| 1445 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register); | 1456 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register); |
| 1446 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER, | 1457 allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER, |
| 1447 machine_type, operand->fixed_register_index()); | 1458 machine_type, operand->fixed_register_index()); |
| 1448 } else { | 1459 } else { |
| 1449 UNREACHABLE(); | 1460 UNREACHABLE(); |
| 1450 } | 1461 } |
| 1451 InstructionOperand::ReplaceWith(operand, &allocated); | 1462 InstructionOperand::ReplaceWith(operand, &allocated); |
| 1452 if (is_tagged) { | 1463 if (is_tagged) { |
| 1453 TRACE("Fixed reg is tagged at %d\n", pos); | 1464 TRACE("Fixed reg is tagged at %d\n", pos); |
| 1454 auto instr = InstructionAt(pos); | 1465 auto instr = code()->InstructionAt(pos); |
| 1455 if (instr->HasReferenceMap()) { | 1466 if (instr->HasReferenceMap()) { |
| 1456 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); | 1467 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); |
| 1457 } | 1468 } |
| 1458 } | 1469 } |
| 1459 return operand; | 1470 return operand; |
| 1460 } | 1471 } |
| 1461 | 1472 |
| 1462 | 1473 |
| 1463 void ConstraintBuilder::MeetRegisterConstraints() { | 1474 void ConstraintBuilder::MeetRegisterConstraints() { |
| 1464 for (auto block : code()->instruction_blocks()) { | 1475 for (auto block : code()->instruction_blocks()) { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1476 if (i != end) MeetConstraintsAfter(i); | 1487 if (i != end) MeetConstraintsAfter(i); |
| 1477 } | 1488 } |
| 1478 // Meet register constraints for the instruction in the end. | 1489 // Meet register constraints for the instruction in the end. |
| 1479 MeetRegisterConstraintsForLastInstructionInBlock(block); | 1490 MeetRegisterConstraintsForLastInstructionInBlock(block); |
| 1480 } | 1491 } |
| 1481 | 1492 |
| 1482 | 1493 |
| 1483 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( | 1494 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( |
| 1484 const InstructionBlock* block) { | 1495 const InstructionBlock* block) { |
| 1485 int end = block->last_instruction_index(); | 1496 int end = block->last_instruction_index(); |
| 1486 auto last_instruction = InstructionAt(end); | 1497 auto last_instruction = code()->InstructionAt(end); |
| 1487 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { | 1498 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { |
| 1488 auto output_operand = last_instruction->OutputAt(i); | 1499 auto output_operand = last_instruction->OutputAt(i); |
| 1489 DCHECK(!output_operand->IsConstant()); | 1500 DCHECK(!output_operand->IsConstant()); |
| 1490 auto output = UnallocatedOperand::cast(output_operand); | 1501 auto output = UnallocatedOperand::cast(output_operand); |
| 1491 int output_vreg = output->virtual_register(); | 1502 int output_vreg = output->virtual_register(); |
| 1492 auto range = LiveRangeFor(output_vreg); | 1503 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); |
| 1493 bool assigned = false; | 1504 bool assigned = false; |
| 1494 if (output->HasFixedPolicy()) { | 1505 if (output->HasFixedPolicy()) { |
| 1495 AllocateFixed(output, -1, false); | 1506 AllocateFixed(output, -1, false); |
| 1496 // This value is produced on the stack, we never need to spill it. | 1507 // This value is produced on the stack, we never need to spill it. |
| 1497 if (output->IsStackSlot()) { | 1508 if (output->IsStackSlot()) { |
| 1498 DCHECK(StackSlotOperand::cast(output)->index() < | 1509 DCHECK(StackSlotOperand::cast(output)->index() < |
| 1499 data()->frame()->GetSpillSlotCount()); | 1510 data()->frame()->GetSpillSlotCount()); |
| 1500 range->SetSpillOperand(StackSlotOperand::cast(output)); | 1511 range->SetSpillOperand(StackSlotOperand::cast(output)); |
| 1501 range->SetSpillStartIndex(end); | 1512 range->SetSpillStartIndex(end); |
| 1502 assigned = true; | 1513 assigned = true; |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 1520 int gap_index = successor->first_instruction_index(); | 1531 int gap_index = successor->first_instruction_index(); |
| 1521 range->SpillAtDefinition(allocation_zone(), gap_index, output); | 1532 range->SpillAtDefinition(allocation_zone(), gap_index, output); |
| 1522 range->SetSpillStartIndex(gap_index); | 1533 range->SetSpillStartIndex(gap_index); |
| 1523 } | 1534 } |
| 1524 } | 1535 } |
| 1525 } | 1536 } |
| 1526 } | 1537 } |
| 1527 | 1538 |
| 1528 | 1539 |
| 1529 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { | 1540 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { |
| 1530 auto first = InstructionAt(instr_index); | 1541 auto first = code()->InstructionAt(instr_index); |
| 1531 // Handle fixed temporaries. | 1542 // Handle fixed temporaries. |
| 1532 for (size_t i = 0; i < first->TempCount(); i++) { | 1543 for (size_t i = 0; i < first->TempCount(); i++) { |
| 1533 auto temp = UnallocatedOperand::cast(first->TempAt(i)); | 1544 auto temp = UnallocatedOperand::cast(first->TempAt(i)); |
| 1534 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); | 1545 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); |
| 1535 } | 1546 } |
| 1536 // Handle constant/fixed output operands. | 1547 // Handle constant/fixed output operands. |
| 1537 for (size_t i = 0; i < first->OutputCount(); i++) { | 1548 for (size_t i = 0; i < first->OutputCount(); i++) { |
| 1538 InstructionOperand* output = first->OutputAt(i); | 1549 InstructionOperand* output = first->OutputAt(i); |
| 1539 if (output->IsConstant()) { | 1550 if (output->IsConstant()) { |
| 1540 int output_vreg = ConstantOperand::cast(output)->virtual_register(); | 1551 int output_vreg = ConstantOperand::cast(output)->virtual_register(); |
| 1541 auto range = LiveRangeFor(output_vreg); | 1552 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); |
| 1542 range->SetSpillStartIndex(instr_index + 1); | 1553 range->SetSpillStartIndex(instr_index + 1); |
| 1543 range->SetSpillOperand(output); | 1554 range->SetSpillOperand(output); |
| 1544 continue; | 1555 continue; |
| 1545 } | 1556 } |
| 1546 auto first_output = UnallocatedOperand::cast(output); | 1557 auto first_output = UnallocatedOperand::cast(output); |
| 1547 auto range = LiveRangeFor(first_output->virtual_register()); | 1558 auto range = |
| 1559 data()->GetOrCreateLiveRangeFor(first_output->virtual_register()); | |
| 1548 bool assigned = false; | 1560 bool assigned = false; |
| 1549 if (first_output->HasFixedPolicy()) { | 1561 if (first_output->HasFixedPolicy()) { |
| 1550 int output_vreg = first_output->virtual_register(); | 1562 int output_vreg = first_output->virtual_register(); |
| 1551 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); | 1563 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); |
| 1552 bool is_tagged = IsReference(output_vreg); | 1564 bool is_tagged = code()->IsReference(output_vreg); |
| 1553 AllocateFixed(first_output, instr_index, is_tagged); | 1565 AllocateFixed(first_output, instr_index, is_tagged); |
| 1554 | 1566 |
| 1555 // This value is produced on the stack, we never need to spill it. | 1567 // This value is produced on the stack, we never need to spill it. |
| 1556 if (first_output->IsStackSlot()) { | 1568 if (first_output->IsStackSlot()) { |
| 1557 DCHECK(StackSlotOperand::cast(first_output)->index() < | 1569 DCHECK(StackSlotOperand::cast(first_output)->index() < |
| 1558 data()->frame()->GetTotalFrameSlotCount()); | 1570 data()->frame()->GetTotalFrameSlotCount()); |
| 1559 range->SetSpillOperand(StackSlotOperand::cast(first_output)); | 1571 range->SetSpillOperand(StackSlotOperand::cast(first_output)); |
| 1560 range->SetSpillStartIndex(instr_index + 1); | 1572 range->SetSpillStartIndex(instr_index + 1); |
| 1561 assigned = true; | 1573 assigned = true; |
| 1562 } | 1574 } |
| 1563 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output, | 1575 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output, |
| 1564 output_copy); | 1576 output_copy); |
| 1565 } | 1577 } |
| 1566 // Make sure we add a gap move for spilling (if we have not done | 1578 // Make sure we add a gap move for spilling (if we have not done |
| 1567 // so already). | 1579 // so already). |
| 1568 if (!assigned) { | 1580 if (!assigned) { |
| 1569 range->SpillAtDefinition(allocation_zone(), instr_index + 1, | 1581 range->SpillAtDefinition(allocation_zone(), instr_index + 1, |
| 1570 first_output); | 1582 first_output); |
| 1571 range->SetSpillStartIndex(instr_index + 1); | 1583 range->SetSpillStartIndex(instr_index + 1); |
| 1572 } | 1584 } |
| 1573 } | 1585 } |
| 1574 } | 1586 } |
| 1575 | 1587 |
| 1576 | 1588 |
| 1577 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { | 1589 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { |
| 1578 auto second = InstructionAt(instr_index); | 1590 auto second = code()->InstructionAt(instr_index); |
| 1579 // Handle fixed input operands of second instruction. | 1591 // Handle fixed input operands of second instruction. |
| 1580 for (size_t i = 0; i < second->InputCount(); i++) { | 1592 for (size_t i = 0; i < second->InputCount(); i++) { |
| 1581 auto input = second->InputAt(i); | 1593 auto input = second->InputAt(i); |
| 1582 if (input->IsImmediate()) continue; // Ignore immediates. | 1594 if (input->IsImmediate()) continue; // Ignore immediates. |
| 1583 auto cur_input = UnallocatedOperand::cast(input); | 1595 auto cur_input = UnallocatedOperand::cast(input); |
| 1584 if (cur_input->HasFixedPolicy()) { | 1596 if (cur_input->HasFixedPolicy()) { |
| 1585 int input_vreg = cur_input->virtual_register(); | 1597 int input_vreg = cur_input->virtual_register(); |
| 1586 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); | 1598 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); |
| 1587 bool is_tagged = IsReference(input_vreg); | 1599 bool is_tagged = code()->IsReference(input_vreg); |
| 1588 AllocateFixed(cur_input, instr_index, is_tagged); | 1600 AllocateFixed(cur_input, instr_index, is_tagged); |
| 1589 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); | 1601 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); |
| 1590 } | 1602 } |
| 1591 } | 1603 } |
| 1592 // Handle "output same as input" for second instruction. | 1604 // Handle "output same as input" for second instruction. |
| 1593 for (size_t i = 0; i < second->OutputCount(); i++) { | 1605 for (size_t i = 0; i < second->OutputCount(); i++) { |
| 1594 auto output = second->OutputAt(i); | 1606 auto output = second->OutputAt(i); |
| 1595 if (!output->IsUnallocated()) continue; | 1607 if (!output->IsUnallocated()) continue; |
| 1596 auto second_output = UnallocatedOperand::cast(output); | 1608 auto second_output = UnallocatedOperand::cast(output); |
| 1597 if (!second_output->HasSameAsInputPolicy()) continue; | 1609 if (!second_output->HasSameAsInputPolicy()) continue; |
| 1598 DCHECK(i == 0); // Only valid for first output. | 1610 DCHECK(i == 0); // Only valid for first output. |
| 1599 UnallocatedOperand* cur_input = | 1611 UnallocatedOperand* cur_input = |
| 1600 UnallocatedOperand::cast(second->InputAt(0)); | 1612 UnallocatedOperand::cast(second->InputAt(0)); |
| 1601 int output_vreg = second_output->virtual_register(); | 1613 int output_vreg = second_output->virtual_register(); |
| 1602 int input_vreg = cur_input->virtual_register(); | 1614 int input_vreg = cur_input->virtual_register(); |
| 1603 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); | 1615 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); |
| 1604 cur_input->set_virtual_register(second_output->virtual_register()); | 1616 cur_input->set_virtual_register(second_output->virtual_register()); |
| 1605 auto gap_move = data()->AddGapMove(instr_index, Instruction::END, | 1617 auto gap_move = data()->AddGapMove(instr_index, Instruction::END, |
| 1606 input_copy, *cur_input); | 1618 input_copy, *cur_input); |
| 1607 if (IsReference(input_vreg) && !IsReference(output_vreg)) { | 1619 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) { |
| 1608 if (second->HasReferenceMap()) { | 1620 if (second->HasReferenceMap()) { |
| 1609 RegisterAllocationData::DelayedReference delayed_reference = { | 1621 RegisterAllocationData::DelayedReference delayed_reference = { |
| 1610 second->reference_map(), &gap_move->source()}; | 1622 second->reference_map(), &gap_move->source()}; |
| 1611 data()->delayed_references().push_back(delayed_reference); | 1623 data()->delayed_references().push_back(delayed_reference); |
| 1612 } | 1624 } |
| 1613 } else if (!IsReference(input_vreg) && IsReference(output_vreg)) { | 1625 } else if (!code()->IsReference(input_vreg) && |
| 1626 code()->IsReference(output_vreg)) { | |
| 1614 // The input is assumed to immediately have a tagged representation, | 1627 // The input is assumed to immediately have a tagged representation, |
| 1615 // before the pointer map can be used. I.e. the pointer map at the | 1628 // before the pointer map can be used. I.e. the pointer map at the |
| 1616 // instruction will include the output operand (whose value at the | 1629 // instruction will include the output operand (whose value at the |
| 1617 // beginning of the instruction is equal to the input operand). If | 1630 // beginning of the instruction is equal to the input operand). If |
| 1618 // this is not desired, then the pointer map at this instruction needs | 1631 // this is not desired, then the pointer map at this instruction needs |
| 1619 // to be adjusted manually. | 1632 // to be adjusted manually. |
| 1620 } | 1633 } |
| 1621 } | 1634 } |
| 1622 } | 1635 } |
| 1623 | 1636 |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 1636 auto map_value = data()->InitializePhiMap(block, phi); | 1649 auto map_value = data()->InitializePhiMap(block, phi); |
| 1637 auto& output = phi->output(); | 1650 auto& output = phi->output(); |
| 1638 // Map the destination operands, so the commitment phase can find them. | 1651 // Map the destination operands, so the commitment phase can find them. |
| 1639 for (size_t i = 0; i < phi->operands().size(); ++i) { | 1652 for (size_t i = 0; i < phi->operands().size(); ++i) { |
| 1640 InstructionBlock* cur_block = | 1653 InstructionBlock* cur_block = |
| 1641 code()->InstructionBlockAt(block->predecessors()[i]); | 1654 code()->InstructionBlockAt(block->predecessors()[i]); |
| 1642 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); | 1655 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); |
| 1643 auto move = data()->AddGapMove(cur_block->last_instruction_index(), | 1656 auto move = data()->AddGapMove(cur_block->last_instruction_index(), |
| 1644 Instruction::END, input, output); | 1657 Instruction::END, input, output); |
| 1645 map_value->AddOperand(&move->destination()); | 1658 map_value->AddOperand(&move->destination()); |
| 1646 DCHECK(!InstructionAt(cur_block->last_instruction_index()) | 1659 DCHECK(!code() |
| 1660 ->InstructionAt(cur_block->last_instruction_index()) | |
| 1647 ->HasReferenceMap()); | 1661 ->HasReferenceMap()); |
| 1648 } | 1662 } |
| 1649 auto live_range = LiveRangeFor(phi_vreg); | 1663 auto live_range = data()->GetOrCreateLiveRangeFor(phi_vreg); |
| 1650 int gap_index = block->first_instruction_index(); | 1664 int gap_index = block->first_instruction_index(); |
| 1651 live_range->SpillAtDefinition(allocation_zone(), gap_index, &output); | 1665 live_range->SpillAtDefinition(allocation_zone(), gap_index, &output); |
| 1652 live_range->SetSpillStartIndex(gap_index); | 1666 live_range->SetSpillStartIndex(gap_index); |
| 1653 // We use the phi-ness of some nodes in some later heuristics. | 1667 // We use the phi-ness of some nodes in some later heuristics. |
| 1654 live_range->set_is_phi(true); | 1668 live_range->set_is_phi(true); |
| 1655 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); | 1669 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); |
| 1656 } | 1670 } |
| 1657 } | 1671 } |
| 1658 | 1672 |
| 1659 | 1673 |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1695 BitVector* live_out) { | 1709 BitVector* live_out) { |
| 1696 // Add an interval that includes the entire block to the live range for | 1710 // Add an interval that includes the entire block to the live range for |
| 1697 // each live_out value. | 1711 // each live_out value. |
| 1698 auto start = LifetimePosition::GapFromInstructionIndex( | 1712 auto start = LifetimePosition::GapFromInstructionIndex( |
| 1699 block->first_instruction_index()); | 1713 block->first_instruction_index()); |
| 1700 auto end = LifetimePosition::InstructionFromInstructionIndex( | 1714 auto end = LifetimePosition::InstructionFromInstructionIndex( |
| 1701 block->last_instruction_index()).NextStart(); | 1715 block->last_instruction_index()).NextStart(); |
| 1702 BitVector::Iterator iterator(live_out); | 1716 BitVector::Iterator iterator(live_out); |
| 1703 while (!iterator.Done()) { | 1717 while (!iterator.Done()) { |
| 1704 int operand_index = iterator.Current(); | 1718 int operand_index = iterator.Current(); |
| 1705 auto range = LiveRangeFor(operand_index); | 1719 auto range = data()->GetOrCreateLiveRangeFor(operand_index); |
| 1706 range->AddUseInterval(start, end, allocation_zone()); | 1720 range->AddUseInterval(start, end, allocation_zone()); |
| 1707 iterator.Advance(); | 1721 iterator.Advance(); |
| 1708 } | 1722 } |
| 1709 } | 1723 } |
| 1710 | 1724 |
| 1711 | 1725 |
| 1712 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { | 1726 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { |
| 1713 return -index - 1 - config()->num_general_registers(); | 1727 return -index - 1 - config()->num_general_registers(); |
| 1714 } | 1728 } |
| 1715 | 1729 |
| 1716 | 1730 |
| 1717 LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { | 1731 TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { |
| 1718 DCHECK(index < config()->num_general_registers()); | 1732 DCHECK(index < config()->num_general_registers()); |
| 1719 auto result = data()->fixed_live_ranges()[index]; | 1733 auto result = data()->fixed_live_ranges()[index]; |
| 1720 if (result == nullptr) { | 1734 if (result == nullptr) { |
| 1721 result = data()->NewLiveRange(FixedLiveRangeID(index), | 1735 result = data()->NewLiveRange(FixedLiveRangeID(index), |
| 1722 InstructionSequence::DefaultRepresentation()); | 1736 InstructionSequence::DefaultRepresentation()); |
| 1723 DCHECK(result->IsFixed()); | 1737 DCHECK(result->IsFixed()); |
| 1724 result->set_assigned_register(index); | 1738 result->set_assigned_register(index); |
| 1725 data()->MarkAllocated(GENERAL_REGISTERS, index); | 1739 data()->MarkAllocated(GENERAL_REGISTERS, index); |
| 1726 data()->fixed_live_ranges()[index] = result; | 1740 data()->fixed_live_ranges()[index] = result; |
| 1727 } | 1741 } |
| 1728 return result; | 1742 return result; |
| 1729 } | 1743 } |
| 1730 | 1744 |
| 1731 | 1745 |
| 1732 LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { | 1746 TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { |
| 1733 DCHECK(index < config()->num_aliased_double_registers()); | 1747 DCHECK(index < config()->num_aliased_double_registers()); |
| 1734 auto result = data()->fixed_double_live_ranges()[index]; | 1748 auto result = data()->fixed_double_live_ranges()[index]; |
| 1735 if (result == nullptr) { | 1749 if (result == nullptr) { |
| 1736 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), kRepFloat64); | 1750 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), kRepFloat64); |
| 1737 DCHECK(result->IsFixed()); | 1751 DCHECK(result->IsFixed()); |
| 1738 result->set_assigned_register(index); | 1752 result->set_assigned_register(index); |
| 1739 data()->MarkAllocated(DOUBLE_REGISTERS, index); | 1753 data()->MarkAllocated(DOUBLE_REGISTERS, index); |
| 1740 data()->fixed_double_live_ranges()[index] = result; | 1754 data()->fixed_double_live_ranges()[index] = result; |
| 1741 } | 1755 } |
| 1742 return result; | 1756 return result; |
| 1743 } | 1757 } |
| 1744 | 1758 |
| 1745 | 1759 |
| 1746 LiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) { | 1760 TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) { |
| 1747 if (operand->IsUnallocated()) { | 1761 if (operand->IsUnallocated()) { |
| 1748 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); | 1762 return data()->GetOrCreateLiveRangeFor( |
| 1763 UnallocatedOperand::cast(operand)->virtual_register()); | |
| 1749 } else if (operand->IsConstant()) { | 1764 } else if (operand->IsConstant()) { |
| 1750 return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register()); | 1765 return data()->GetOrCreateLiveRangeFor( |
| 1766 ConstantOperand::cast(operand)->virtual_register()); | |
| 1751 } else if (operand->IsRegister()) { | 1767 } else if (operand->IsRegister()) { |
| 1752 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); | 1768 return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); |
| 1753 } else if (operand->IsDoubleRegister()) { | 1769 } else if (operand->IsDoubleRegister()) { |
| 1754 return FixedDoubleLiveRangeFor( | 1770 return FixedDoubleLiveRangeFor( |
| 1755 DoubleRegisterOperand::cast(operand)->index()); | 1771 DoubleRegisterOperand::cast(operand)->index()); |
| 1756 } else { | 1772 } else { |
| 1757 return nullptr; | 1773 return nullptr; |
| 1758 } | 1774 } |
| 1759 } | 1775 } |
| 1760 | 1776 |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1870 use_pos = curr_position; | 1886 use_pos = curr_position; |
| 1871 } else { | 1887 } else { |
| 1872 use_pos = curr_position.End(); | 1888 use_pos = curr_position.End(); |
| 1873 } | 1889 } |
| 1874 | 1890 |
| 1875 if (input->IsUnallocated()) { | 1891 if (input->IsUnallocated()) { |
| 1876 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); | 1892 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); |
| 1877 int vreg = unalloc->virtual_register(); | 1893 int vreg = unalloc->virtual_register(); |
| 1878 live->Add(vreg); | 1894 live->Add(vreg); |
| 1879 if (unalloc->HasSlotPolicy()) { | 1895 if (unalloc->HasSlotPolicy()) { |
| 1880 LiveRangeFor(vreg)->set_has_slot_use(true); | 1896 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true); |
| 1881 } | 1897 } |
| 1882 } | 1898 } |
| 1883 Use(block_start_position, use_pos, input); | 1899 Use(block_start_position, use_pos, input); |
| 1884 } | 1900 } |
| 1885 | 1901 |
| 1886 for (size_t i = 0; i < instr->TempCount(); i++) { | 1902 for (size_t i = 0; i < instr->TempCount(); i++) { |
| 1887 auto temp = instr->TempAt(i); | 1903 auto temp = instr->TempAt(i); |
| 1888 // Unsupported. | 1904 // Unsupported. |
| 1889 DCHECK_IMPLIES(temp->IsUnallocated(), | 1905 DCHECK_IMPLIES(temp->IsUnallocated(), |
| 1890 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); | 1906 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 1916 } | 1932 } |
| 1917 for (auto cur : *move) { | 1933 for (auto cur : *move) { |
| 1918 auto& from = cur->source(); | 1934 auto& from = cur->source(); |
| 1919 auto& to = cur->destination(); | 1935 auto& to = cur->destination(); |
| 1920 void* hint = &to; | 1936 void* hint = &to; |
| 1921 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to); | 1937 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to); |
| 1922 UsePosition* to_use = nullptr; | 1938 UsePosition* to_use = nullptr; |
| 1923 int phi_vreg = -1; | 1939 int phi_vreg = -1; |
| 1924 if (to.IsUnallocated()) { | 1940 if (to.IsUnallocated()) { |
| 1925 int to_vreg = UnallocatedOperand::cast(to).virtual_register(); | 1941 int to_vreg = UnallocatedOperand::cast(to).virtual_register(); |
| 1926 auto to_range = LiveRangeFor(to_vreg); | 1942 auto to_range = data()->GetOrCreateLiveRangeFor(to_vreg); |
| 1927 if (to_range->is_phi()) { | 1943 if (to_range->is_phi()) { |
| 1928 phi_vreg = to_vreg; | 1944 phi_vreg = to_vreg; |
| 1929 if (to_range->is_non_loop_phi()) { | 1945 if (to_range->is_non_loop_phi()) { |
| 1930 hint = to_range->current_hint_position(); | 1946 hint = to_range->current_hint_position(); |
| 1931 hint_type = hint == nullptr ? UsePositionHintType::kNone | 1947 hint_type = hint == nullptr ? UsePositionHintType::kNone |
| 1932 : UsePositionHintType::kUsePos; | 1948 : UsePositionHintType::kUsePos; |
| 1933 } else { | 1949 } else { |
| 1934 hint_type = UsePositionHintType::kPhi; | 1950 hint_type = UsePositionHintType::kPhi; |
| 1935 hint = data()->GetPhiMapValueFor(to_vreg); | 1951 hint = data()->GetPhiMapValueFor(to_vreg); |
| 1936 } | 1952 } |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2001 DCHECK(block->IsLoopHeader()); | 2017 DCHECK(block->IsLoopHeader()); |
| 2002 // Add a live range stretching from the first loop instruction to the last | 2018 // Add a live range stretching from the first loop instruction to the last |
| 2003 // for each value live on entry to the header. | 2019 // for each value live on entry to the header. |
| 2004 BitVector::Iterator iterator(live); | 2020 BitVector::Iterator iterator(live); |
| 2005 auto start = LifetimePosition::GapFromInstructionIndex( | 2021 auto start = LifetimePosition::GapFromInstructionIndex( |
| 2006 block->first_instruction_index()); | 2022 block->first_instruction_index()); |
| 2007 auto end = LifetimePosition::GapFromInstructionIndex( | 2023 auto end = LifetimePosition::GapFromInstructionIndex( |
| 2008 code()->LastLoopInstructionIndex(block)).NextFullStart(); | 2024 code()->LastLoopInstructionIndex(block)).NextFullStart(); |
| 2009 while (!iterator.Done()) { | 2025 while (!iterator.Done()) { |
| 2010 int operand_index = iterator.Current(); | 2026 int operand_index = iterator.Current(); |
| 2011 auto range = LiveRangeFor(operand_index); | 2027 auto range = data()->GetOrCreateLiveRangeFor(operand_index); |
|
Benedikt Meurer
2015/08/26 04:31:20
Nit: please deautofy this.
Mircea Trofin
2015/08/26 05:00:13
Done.
| |
| 2012 range->EnsureInterval(start, end, allocation_zone()); | 2028 range->EnsureInterval(start, end, allocation_zone()); |
| 2013 iterator.Advance(); | 2029 iterator.Advance(); |
| 2014 } | 2030 } |
| 2015 // Insert all values into the live in sets of all blocks in the loop. | 2031 // Insert all values into the live in sets of all blocks in the loop. |
| 2016 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt(); | 2032 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt(); |
| 2017 ++i) { | 2033 ++i) { |
| 2018 live_in_sets()[i]->Union(*live); | 2034 live_in_sets()[i]->Union(*live); |
| 2019 } | 2035 } |
| 2020 } | 2036 } |
| 2021 | 2037 |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2083 if (it == phi_hints_.end()) return; | 2099 if (it == phi_hints_.end()) return; |
| 2084 DCHECK(!it->second->IsResolved()); | 2100 DCHECK(!it->second->IsResolved()); |
| 2085 it->second->ResolveHint(use_pos); | 2101 it->second->ResolveHint(use_pos); |
| 2086 } | 2102 } |
| 2087 | 2103 |
| 2088 | 2104 |
| 2089 void LiveRangeBuilder::Verify() const { | 2105 void LiveRangeBuilder::Verify() const { |
| 2090 for (auto& hint : phi_hints_) { | 2106 for (auto& hint : phi_hints_) { |
| 2091 CHECK(hint.second->IsResolved()); | 2107 CHECK(hint.second->IsResolved()); |
| 2092 } | 2108 } |
| 2093 for (auto current : data()->live_ranges()) { | 2109 for (LiveRange* current : data()->live_ranges()) { |
| 2094 if (current != nullptr) current->Verify(); | 2110 if (current != nullptr) current->Verify(); |
| 2095 } | 2111 } |
| 2096 } | 2112 } |
| 2097 | 2113 |
| 2098 | 2114 |
| 2099 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, | 2115 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, |
| 2100 RegisterKind kind) | 2116 RegisterKind kind) |
| 2101 : data_(data), | 2117 : data_(data), |
| 2102 mode_(kind), | 2118 mode_(kind), |
| 2103 num_registers_(GetRegisterCount(data->config(), kind)) {} | 2119 num_registers_(GetRegisterCount(data->config(), kind)) {} |
| 2104 | 2120 |
| 2105 | 2121 |
| 2106 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, | 2122 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
| 2107 LifetimePosition pos) { | 2123 LifetimePosition pos) { |
| 2108 DCHECK(!range->IsFixed()); | 2124 DCHECK(!range->TopLevel()->IsFixed()); |
| 2109 TRACE("Splitting live range %d at %d\n", range->id(), pos.value()); | 2125 TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(), |
| 2126 range->relative_id(), pos.value()); | |
| 2110 | 2127 |
| 2111 if (pos <= range->Start()) return range; | 2128 if (pos <= range->Start()) return range; |
| 2112 | 2129 |
| 2113 // We can't properly connect liveranges if splitting occurred at the end | 2130 // We can't properly connect liveranges if splitting occurred at the end |
| 2114 // a block. | 2131 // a block. |
| 2115 DCHECK(pos.IsStart() || pos.IsGapPosition() || | 2132 DCHECK(pos.IsStart() || pos.IsGapPosition() || |
| 2116 (GetInstructionBlock(code(), pos)->last_instruction_index() != | 2133 (GetInstructionBlock(code(), pos)->last_instruction_index() != |
| 2117 pos.ToInstructionIndex())); | 2134 pos.ToInstructionIndex())); |
| 2118 | 2135 |
| 2119 auto result = data()->NewChildRangeFor(range); | 2136 LiveRange* result = range->SplitAt(pos, allocation_zone()); |
| 2120 range->SplitAt(pos, result, allocation_zone()); | |
| 2121 return result; | 2137 return result; |
| 2122 } | 2138 } |
| 2123 | 2139 |
| 2124 | 2140 |
| 2125 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, | 2141 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 2126 LifetimePosition start, | 2142 LifetimePosition start, |
| 2127 LifetimePosition end) { | 2143 LifetimePosition end) { |
| 2128 DCHECK(!range->IsFixed()); | 2144 DCHECK(!range->TopLevel()->IsFixed()); |
| 2129 TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), | 2145 TRACE("Splitting live range %d:%d in position between [%d, %d]\n", |
| 2130 start.value(), end.value()); | 2146 range->TopLevel()->vreg(), range->relative_id(), start.value(), |
| 2147 end.value()); | |
| 2131 | 2148 |
| 2132 auto split_pos = FindOptimalSplitPos(start, end); | 2149 auto split_pos = FindOptimalSplitPos(start, end); |
| 2133 DCHECK(split_pos >= start); | 2150 DCHECK(split_pos >= start); |
| 2134 return SplitRangeAt(range, split_pos); | 2151 return SplitRangeAt(range, split_pos); |
| 2135 } | 2152 } |
| 2136 | 2153 |
| 2137 | 2154 |
| 2138 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, | 2155 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 2139 LifetimePosition end) { | 2156 LifetimePosition end) { |
| 2140 int start_instr = start.ToInstructionIndex(); | 2157 int start_instr = start.ToInstructionIndex(); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2198 // Try hoisting out to an outer loop. | 2215 // Try hoisting out to an outer loop. |
| 2199 loop_header = GetContainingLoop(code(), loop_header); | 2216 loop_header = GetContainingLoop(code(), loop_header); |
| 2200 } | 2217 } |
| 2201 | 2218 |
| 2202 return pos; | 2219 return pos; |
| 2203 } | 2220 } |
| 2204 | 2221 |
| 2205 | 2222 |
| 2206 void RegisterAllocator::Spill(LiveRange* range) { | 2223 void RegisterAllocator::Spill(LiveRange* range) { |
| 2207 DCHECK(!range->spilled()); | 2224 DCHECK(!range->spilled()); |
| 2208 TRACE("Spilling live range %d\n", range->id()); | 2225 TopLevelLiveRange* first = range->TopLevel(); |
| 2209 auto first = range->TopLevel(); | 2226 TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id()); |
| 2227 | |
| 2210 if (first->HasNoSpillType()) { | 2228 if (first->HasNoSpillType()) { |
| 2211 data()->AssignSpillRangeToLiveRange(first); | 2229 data()->AssignSpillRangeToLiveRange(first); |
| 2212 } | 2230 } |
| 2213 range->Spill(); | 2231 range->Spill(); |
| 2214 } | 2232 } |
| 2215 | 2233 |
| 2216 | 2234 |
| 2217 const ZoneVector<LiveRange*>& RegisterAllocator::GetFixedRegisters() const { | 2235 const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters() |
| 2236 const { | |
| 2218 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() | 2237 return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() |
| 2219 : data()->fixed_live_ranges(); | 2238 : data()->fixed_live_ranges(); |
| 2220 } | 2239 } |
| 2221 | 2240 |
| 2222 | 2241 |
| 2223 const char* RegisterAllocator::RegisterName(int allocation_index) const { | 2242 const char* RegisterAllocator::RegisterName(int allocation_index) const { |
| 2224 if (mode() == GENERAL_REGISTERS) { | 2243 if (mode() == GENERAL_REGISTERS) { |
| 2225 return data()->config()->general_register_name(allocation_index); | 2244 return data()->config()->general_register_name(allocation_index); |
| 2226 } else { | 2245 } else { |
| 2227 return data()->config()->double_register_name(allocation_index); | 2246 return data()->config()->double_register_name(allocation_index); |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 2244 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 2263 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
| 2245 this->data()->config()->num_general_registers()); | 2264 this->data()->config()->num_general_registers()); |
| 2246 } | 2265 } |
| 2247 | 2266 |
| 2248 | 2267 |
| 2249 void LinearScanAllocator::AllocateRegisters() { | 2268 void LinearScanAllocator::AllocateRegisters() { |
| 2250 DCHECK(unhandled_live_ranges().empty()); | 2269 DCHECK(unhandled_live_ranges().empty()); |
| 2251 DCHECK(active_live_ranges().empty()); | 2270 DCHECK(active_live_ranges().empty()); |
| 2252 DCHECK(inactive_live_ranges().empty()); | 2271 DCHECK(inactive_live_ranges().empty()); |
| 2253 | 2272 |
| 2254 for (auto range : data()->live_ranges()) { | 2273 for (LiveRange* range : data()->live_ranges()) { |
| 2255 if (range == nullptr) continue; | 2274 if (range == nullptr) continue; |
| 2256 if (range->kind() == mode()) { | 2275 if (range->kind() == mode()) { |
| 2257 AddToUnhandledUnsorted(range); | 2276 AddToUnhandledUnsorted(range); |
| 2258 } | 2277 } |
| 2259 } | 2278 } |
| 2260 SortUnhandled(); | 2279 SortUnhandled(); |
| 2261 DCHECK(UnhandledIsSorted()); | 2280 DCHECK(UnhandledIsSorted()); |
| 2262 | 2281 |
| 2263 auto& fixed_ranges = GetFixedRegisters(); | 2282 auto& fixed_ranges = GetFixedRegisters(); |
| 2264 for (auto current : fixed_ranges) { | 2283 for (auto current : fixed_ranges) { |
| 2265 if (current != nullptr) { | 2284 if (current != nullptr) { |
| 2266 DCHECK_EQ(mode(), current->kind()); | 2285 DCHECK_EQ(mode(), current->kind()); |
| 2267 AddToInactive(current); | 2286 AddToInactive(current); |
| 2268 } | 2287 } |
| 2269 } | 2288 } |
| 2270 | 2289 |
| 2271 while (!unhandled_live_ranges().empty()) { | 2290 while (!unhandled_live_ranges().empty()) { |
| 2272 DCHECK(UnhandledIsSorted()); | 2291 DCHECK(UnhandledIsSorted()); |
| 2273 auto current = unhandled_live_ranges().back(); | 2292 auto current = unhandled_live_ranges().back(); |
| 2274 unhandled_live_ranges().pop_back(); | 2293 unhandled_live_ranges().pop_back(); |
| 2275 DCHECK(UnhandledIsSorted()); | 2294 DCHECK(UnhandledIsSorted()); |
| 2276 auto position = current->Start(); | 2295 auto position = current->Start(); |
| 2277 #ifdef DEBUG | 2296 #ifdef DEBUG |
| 2278 allocation_finger_ = position; | 2297 allocation_finger_ = position; |
| 2279 #endif | 2298 #endif |
| 2280 TRACE("Processing interval %d start=%d\n", current->id(), position.value()); | 2299 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(), |
| 2300 current->relative_id(), position.value()); | |
| 2281 | 2301 |
| 2282 if (!current->HasNoSpillType()) { | 2302 if (current->IsTopLevel() && !current->TopLevel()->HasNoSpillType()) { |
| 2283 TRACE("Live range %d already has a spill operand\n", current->id()); | 2303 TRACE("Live range %d:%d already has a spill operand\n", |
| 2304 current->TopLevel()->vreg(), current->relative_id()); | |
| 2284 auto next_pos = position; | 2305 auto next_pos = position; |
| 2285 if (next_pos.IsGapPosition()) { | 2306 if (next_pos.IsGapPosition()) { |
| 2286 next_pos = next_pos.NextStart(); | 2307 next_pos = next_pos.NextStart(); |
| 2287 } | 2308 } |
| 2288 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 2309 auto pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 2289 // If the range already has a spill operand and it doesn't need a | 2310 // If the range already has a spill operand and it doesn't need a |
| 2290 // register immediately, split it and spill the first part of the range. | 2311 // register immediately, split it and spill the first part of the range. |
| 2291 if (pos == nullptr) { | 2312 if (pos == nullptr) { |
| 2292 Spill(current); | 2313 Spill(current); |
| 2293 continue; | 2314 continue; |
| 2294 } else if (pos->pos() > current->Start().NextStart()) { | 2315 } else if (pos->pos() > current->Start().NextStart()) { |
| 2295 // Do not spill live range eagerly if use position that can benefit from | 2316 // Do not spill live range eagerly if use position that can benefit from |
| 2296 // the register is too close to the start of live range. | 2317 // the register is too close to the start of live range. |
| 2297 SpillBetween(current, current->Start(), pos->pos()); | 2318 SpillBetween(current, current->Start(), pos->pos()); |
| 2298 DCHECK(UnhandledIsSorted()); | 2319 DCHECK(UnhandledIsSorted()); |
| 2299 continue; | 2320 continue; |
| 2300 } | 2321 } |
| 2301 } | 2322 } |
| 2302 | 2323 |
| 2303 if (TryReuseSpillForPhi(current)) continue; | 2324 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel())) |
| 2325 continue; | |
| 2304 | 2326 |
| 2305 for (size_t i = 0; i < active_live_ranges().size(); ++i) { | 2327 for (size_t i = 0; i < active_live_ranges().size(); ++i) { |
| 2306 auto cur_active = active_live_ranges()[i]; | 2328 auto cur_active = active_live_ranges()[i]; |
| 2307 if (cur_active->End() <= position) { | 2329 if (cur_active->End() <= position) { |
| 2308 ActiveToHandled(cur_active); | 2330 ActiveToHandled(cur_active); |
| 2309 --i; // The live range was removed from the list of active live ranges. | 2331 --i; // The live range was removed from the list of active live ranges. |
| 2310 } else if (!cur_active->Covers(position)) { | 2332 } else if (!cur_active->Covers(position)) { |
| 2311 ActiveToInactive(cur_active); | 2333 ActiveToInactive(cur_active); |
| 2312 --i; // The live range was removed from the list of active live ranges. | 2334 --i; // The live range was removed from the list of active live ranges. |
| 2313 } | 2335 } |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 2333 } | 2355 } |
| 2334 } | 2356 } |
| 2335 } | 2357 } |
| 2336 | 2358 |
| 2337 | 2359 |
| 2338 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, | 2360 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
| 2339 int reg) { | 2361 int reg) { |
| 2340 data()->MarkAllocated(range->kind(), reg); | 2362 data()->MarkAllocated(range->kind(), reg); |
| 2341 range->set_assigned_register(reg); | 2363 range->set_assigned_register(reg); |
| 2342 range->SetUseHints(reg); | 2364 range->SetUseHints(reg); |
| 2343 if (range->is_phi()) { | 2365 if (range->IsTopLevel() && range->TopLevel()->is_phi()) { |
| 2344 data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg); | 2366 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg); |
| 2345 } | 2367 } |
| 2346 } | 2368 } |
| 2347 | 2369 |
| 2348 | 2370 |
| 2349 void LinearScanAllocator::AddToActive(LiveRange* range) { | 2371 void LinearScanAllocator::AddToActive(LiveRange* range) { |
| 2350 TRACE("Add live range %d to active\n", range->id()); | 2372 TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(), |
| 2373 range->relative_id()); | |
| 2351 active_live_ranges().push_back(range); | 2374 active_live_ranges().push_back(range); |
| 2352 } | 2375 } |
| 2353 | 2376 |
| 2354 | 2377 |
| 2355 void LinearScanAllocator::AddToInactive(LiveRange* range) { | 2378 void LinearScanAllocator::AddToInactive(LiveRange* range) { |
| 2356 TRACE("Add live range %d to inactive\n", range->id()); | 2379 TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(), |
| 2380 range->relative_id()); | |
| 2357 inactive_live_ranges().push_back(range); | 2381 inactive_live_ranges().push_back(range); |
| 2358 } | 2382 } |
| 2359 | 2383 |
| 2360 | 2384 |
| 2361 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { | 2385 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { |
| 2362 if (range == nullptr || range->IsEmpty()) return; | 2386 if (range == nullptr || range->IsEmpty()) return; |
| 2363 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); | 2387 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); |
| 2364 DCHECK(allocation_finger_ <= range->Start()); | 2388 DCHECK(allocation_finger_ <= range->Start()); |
| 2365 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; | 2389 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; |
| 2366 --i) { | 2390 --i) { |
| 2367 auto cur_range = unhandled_live_ranges().at(i); | 2391 auto cur_range = unhandled_live_ranges().at(i); |
| 2368 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; | 2392 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; |
| 2369 TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 2393 TRACE("Add live range %d:%d to unhandled at %d\n", |
| 2394 range->TopLevel()->vreg(), range->relative_id(), i + 1); | |
| 2370 auto it = unhandled_live_ranges().begin() + (i + 1); | 2395 auto it = unhandled_live_ranges().begin() + (i + 1); |
| 2371 unhandled_live_ranges().insert(it, range); | 2396 unhandled_live_ranges().insert(it, range); |
| 2372 DCHECK(UnhandledIsSorted()); | 2397 DCHECK(UnhandledIsSorted()); |
| 2373 return; | 2398 return; |
| 2374 } | 2399 } |
| 2375 TRACE("Add live range %d to unhandled at start\n", range->id()); | 2400 TRACE("Add live range %d:%d to unhandled at start\n", |
| 2401 range->TopLevel()->vreg(), range->relative_id()); | |
| 2376 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); | 2402 unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); |
| 2377 DCHECK(UnhandledIsSorted()); | 2403 DCHECK(UnhandledIsSorted()); |
| 2378 } | 2404 } |
| 2379 | 2405 |
| 2380 | 2406 |
| 2381 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 2407 void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| 2382 if (range == nullptr || range->IsEmpty()) return; | 2408 if (range == nullptr || range->IsEmpty()) return; |
| 2383 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); | 2409 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); |
| 2384 TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); | 2410 TRACE("Add live range %d:%d to unhandled unsorted at end\n", |
| 2411 range->TopLevel()->vreg(), range->relative_id()); | |
| 2385 unhandled_live_ranges().push_back(range); | 2412 unhandled_live_ranges().push_back(range); |
| 2386 } | 2413 } |
| 2387 | 2414 |
| 2388 | 2415 |
| 2389 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { | 2416 static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { |
| 2390 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); | 2417 DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); |
| 2391 if (a->ShouldBeAllocatedBefore(b)) return false; | 2418 if (a->ShouldBeAllocatedBefore(b)) return false; |
| 2392 if (b->ShouldBeAllocatedBefore(a)) return true; | 2419 if (b->ShouldBeAllocatedBefore(a)) return true; |
| 2393 return a->id() < b->id(); | 2420 return a->TopLevel()->vreg() < b->TopLevel()->vreg(); |
| 2394 } | 2421 } |
| 2395 | 2422 |
| 2396 | 2423 |
| 2397 // Sort the unhandled live ranges so that the ranges to be processed first are | 2424 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 2398 // at the end of the array list. This is convenient for the register allocation | 2425 // at the end of the array list. This is convenient for the register allocation |
| 2399 // algorithm because it is efficient to remove elements from the end. | 2426 // algorithm because it is efficient to remove elements from the end. |
| 2400 void LinearScanAllocator::SortUnhandled() { | 2427 void LinearScanAllocator::SortUnhandled() { |
| 2401 TRACE("Sort unhandled\n"); | 2428 TRACE("Sort unhandled\n"); |
| 2402 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), | 2429 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), |
| 2403 &UnhandledSortHelper); | 2430 &UnhandledSortHelper); |
| 2404 } | 2431 } |
| 2405 | 2432 |
| 2406 | 2433 |
| 2407 bool LinearScanAllocator::UnhandledIsSorted() { | 2434 bool LinearScanAllocator::UnhandledIsSorted() { |
| 2408 size_t len = unhandled_live_ranges().size(); | 2435 size_t len = unhandled_live_ranges().size(); |
| 2409 for (size_t i = 1; i < len; i++) { | 2436 for (size_t i = 1; i < len; i++) { |
| 2410 auto a = unhandled_live_ranges().at(i - 1); | 2437 auto a = unhandled_live_ranges().at(i - 1); |
| 2411 auto b = unhandled_live_ranges().at(i); | 2438 auto b = unhandled_live_ranges().at(i); |
| 2412 if (a->Start() < b->Start()) return false; | 2439 if (a->Start() < b->Start()) return false; |
| 2413 } | 2440 } |
| 2414 return true; | 2441 return true; |
| 2415 } | 2442 } |
| 2416 | 2443 |
| 2417 | 2444 |
| 2418 void LinearScanAllocator::ActiveToHandled(LiveRange* range) { | 2445 void LinearScanAllocator::ActiveToHandled(LiveRange* range) { |
| 2419 RemoveElement(&active_live_ranges(), range); | 2446 RemoveElement(&active_live_ranges(), range); |
| 2420 TRACE("Moving live range %d from active to handled\n", range->id()); | 2447 TRACE("Moving live range %d:%d from active to handled\n", |
| 2448 range->TopLevel()->vreg(), range->relative_id()); | |
| 2421 } | 2449 } |
| 2422 | 2450 |
| 2423 | 2451 |
| 2424 void LinearScanAllocator::ActiveToInactive(LiveRange* range) { | 2452 void LinearScanAllocator::ActiveToInactive(LiveRange* range) { |
| 2425 RemoveElement(&active_live_ranges(), range); | 2453 RemoveElement(&active_live_ranges(), range); |
| 2426 inactive_live_ranges().push_back(range); | 2454 inactive_live_ranges().push_back(range); |
| 2427 TRACE("Moving live range %d from active to inactive\n", range->id()); | 2455 TRACE("Moving live range %d:%d from active to inactive\n", |
| 2456 range->TopLevel()->vreg(), range->relative_id()); | |
| 2428 } | 2457 } |
| 2429 | 2458 |
| 2430 | 2459 |
| 2431 void LinearScanAllocator::InactiveToHandled(LiveRange* range) { | 2460 void LinearScanAllocator::InactiveToHandled(LiveRange* range) { |
| 2432 RemoveElement(&inactive_live_ranges(), range); | 2461 RemoveElement(&inactive_live_ranges(), range); |
| 2433 TRACE("Moving live range %d from inactive to handled\n", range->id()); | 2462 TRACE("Moving live range %d:%d from inactive to handled\n", |
| 2463 range->TopLevel()->vreg(), range->relative_id()); | |
| 2434 } | 2464 } |
| 2435 | 2465 |
| 2436 | 2466 |
| 2437 void LinearScanAllocator::InactiveToActive(LiveRange* range) { | 2467 void LinearScanAllocator::InactiveToActive(LiveRange* range) { |
| 2438 RemoveElement(&inactive_live_ranges(), range); | 2468 RemoveElement(&inactive_live_ranges(), range); |
| 2439 active_live_ranges().push_back(range); | 2469 active_live_ranges().push_back(range); |
| 2440 TRACE("Moving live range %d from inactive to active\n", range->id()); | 2470 TRACE("Moving live range %d:%d from inactive to active\n", |
| 2471 range->TopLevel()->vreg(), range->relative_id()); | |
| 2441 } | 2472 } |
| 2442 | 2473 |
| 2443 | 2474 |
| 2444 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { | 2475 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 2445 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2476 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2446 | 2477 |
| 2447 for (int i = 0; i < num_registers(); i++) { | 2478 for (int i = 0; i < num_registers(); i++) { |
| 2448 free_until_pos[i] = LifetimePosition::MaxPosition(); | 2479 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 2449 } | 2480 } |
| 2450 | 2481 |
| 2451 for (auto cur_active : active_live_ranges()) { | 2482 for (auto cur_active : active_live_ranges()) { |
| 2452 free_until_pos[cur_active->assigned_register()] = | 2483 free_until_pos[cur_active->assigned_register()] = |
| 2453 LifetimePosition::GapFromInstructionIndex(0); | 2484 LifetimePosition::GapFromInstructionIndex(0); |
| 2454 } | 2485 } |
| 2455 | 2486 |
| 2456 for (auto cur_inactive : inactive_live_ranges()) { | 2487 for (auto cur_inactive : inactive_live_ranges()) { |
| 2457 DCHECK(cur_inactive->End() > current->Start()); | 2488 DCHECK(cur_inactive->End() > current->Start()); |
| 2458 auto next_intersection = cur_inactive->FirstIntersection(current); | 2489 auto next_intersection = cur_inactive->FirstIntersection(current); |
| 2459 if (!next_intersection.IsValid()) continue; | 2490 if (!next_intersection.IsValid()) continue; |
| 2460 int cur_reg = cur_inactive->assigned_register(); | 2491 int cur_reg = cur_inactive->assigned_register(); |
| 2461 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 2492 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
| 2462 } | 2493 } |
| 2463 | 2494 |
| 2464 int hint_register; | 2495 int hint_register; |
| 2465 if (current->FirstHintPosition(&hint_register) != nullptr) { | 2496 if (current->FirstHintPosition(&hint_register) != nullptr) { |
| 2466 TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n", | 2497 TRACE( |
| 2467 RegisterName(hint_register), free_until_pos[hint_register].value(), | 2498 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n", |
| 2468 current->id(), current->End().value()); | 2499 RegisterName(hint_register), free_until_pos[hint_register].value(), |
| 2500 current->TopLevel()->vreg(), current->relative_id(), | |
| 2501 current->End().value()); | |
| 2469 | 2502 |
| 2470 // The desired register is free until the end of the current live range. | 2503 // The desired register is free until the end of the current live range. |
| 2471 if (free_until_pos[hint_register] >= current->End()) { | 2504 if (free_until_pos[hint_register] >= current->End()) { |
| 2472 TRACE("Assigning preferred reg %s to live range %d\n", | 2505 TRACE("Assigning preferred reg %s to live range %d:%d\n", |
| 2473 RegisterName(hint_register), current->id()); | 2506 RegisterName(hint_register), current->TopLevel()->vreg(), |
| 2507 current->relative_id()); | |
| 2474 SetLiveRangeAssignedRegister(current, hint_register); | 2508 SetLiveRangeAssignedRegister(current, hint_register); |
| 2475 return true; | 2509 return true; |
| 2476 } | 2510 } |
| 2477 } | 2511 } |
| 2478 | 2512 |
| 2479 // Find the register which stays free for the longest time. | 2513 // Find the register which stays free for the longest time. |
| 2480 int reg = 0; | 2514 int reg = 0; |
| 2481 for (int i = 1; i < num_registers(); ++i) { | 2515 for (int i = 1; i < num_registers(); ++i) { |
| 2482 if (free_until_pos[i] > free_until_pos[reg]) { | 2516 if (free_until_pos[i] > free_until_pos[reg]) { |
| 2483 reg = i; | 2517 reg = i; |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 2494 if (pos < current->End()) { | 2528 if (pos < current->End()) { |
| 2495 // Register reg is available at the range start but becomes blocked before | 2529 // Register reg is available at the range start but becomes blocked before |
| 2496 // the range end. Split current at position where it becomes blocked. | 2530 // the range end. Split current at position where it becomes blocked. |
| 2497 auto tail = SplitRangeAt(current, pos); | 2531 auto tail = SplitRangeAt(current, pos); |
| 2498 AddToUnhandledSorted(tail); | 2532 AddToUnhandledSorted(tail); |
| 2499 } | 2533 } |
| 2500 | 2534 |
| 2501 // Register reg is available at the range start and is free until | 2535 // Register reg is available at the range start and is free until |
| 2502 // the range end. | 2536 // the range end. |
| 2503 DCHECK(pos >= current->End()); | 2537 DCHECK(pos >= current->End()); |
| 2504 TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), | 2538 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg), |
| 2505 current->id()); | 2539 current->TopLevel()->vreg(), current->relative_id()); |
| 2506 SetLiveRangeAssignedRegister(current, reg); | 2540 SetLiveRangeAssignedRegister(current, reg); |
| 2507 | 2541 |
| 2508 return true; | 2542 return true; |
| 2509 } | 2543 } |
| 2510 | 2544 |
| 2511 | 2545 |
| 2512 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { | 2546 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { |
| 2513 auto register_use = current->NextRegisterPosition(current->Start()); | 2547 auto register_use = current->NextRegisterPosition(current->Start()); |
| 2514 if (register_use == nullptr) { | 2548 if (register_use == nullptr) { |
| 2515 // There is no use in the current live range that requires a register. | 2549 // There is no use in the current live range that requires a register. |
| 2516 // We can just spill it. | 2550 // We can just spill it. |
| 2517 Spill(current); | 2551 Spill(current); |
| 2518 return; | 2552 return; |
| 2519 } | 2553 } |
| 2520 | 2554 |
| 2521 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2555 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2522 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; | 2556 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; |
| 2523 | 2557 |
| 2524 for (int i = 0; i < num_registers(); i++) { | 2558 for (int i = 0; i < num_registers(); i++) { |
| 2525 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); | 2559 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); |
| 2526 } | 2560 } |
| 2527 | 2561 |
| 2528 for (auto range : active_live_ranges()) { | 2562 for (auto range : active_live_ranges()) { |
| 2529 int cur_reg = range->assigned_register(); | 2563 int cur_reg = range->assigned_register(); |
| 2530 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { | 2564 if (range->TopLevel()->IsFixed() || |
| 2565 !range->CanBeSpilled(current->Start())) { | |
| 2531 block_pos[cur_reg] = use_pos[cur_reg] = | 2566 block_pos[cur_reg] = use_pos[cur_reg] = |
| 2532 LifetimePosition::GapFromInstructionIndex(0); | 2567 LifetimePosition::GapFromInstructionIndex(0); |
| 2533 } else { | 2568 } else { |
| 2534 auto next_use = | 2569 auto next_use = |
| 2535 range->NextUsePositionRegisterIsBeneficial(current->Start()); | 2570 range->NextUsePositionRegisterIsBeneficial(current->Start()); |
| 2536 if (next_use == nullptr) { | 2571 if (next_use == nullptr) { |
| 2537 use_pos[cur_reg] = range->End(); | 2572 use_pos[cur_reg] = range->End(); |
| 2538 } else { | 2573 } else { |
| 2539 use_pos[cur_reg] = next_use->pos(); | 2574 use_pos[cur_reg] = next_use->pos(); |
| 2540 } | 2575 } |
| 2541 } | 2576 } |
| 2542 } | 2577 } |
| 2543 | 2578 |
| 2544 for (auto range : inactive_live_ranges()) { | 2579 for (auto range : inactive_live_ranges()) { |
| 2545 DCHECK(range->End() > current->Start()); | 2580 DCHECK(range->End() > current->Start()); |
| 2546 auto next_intersection = range->FirstIntersection(current); | 2581 auto next_intersection = range->FirstIntersection(current); |
| 2547 if (!next_intersection.IsValid()) continue; | 2582 if (!next_intersection.IsValid()) continue; |
| 2548 int cur_reg = range->assigned_register(); | 2583 int cur_reg = range->assigned_register(); |
| 2549 if (range->IsFixed()) { | 2584 if (range->TopLevel()->IsFixed()) { |
| 2550 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); | 2585 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
| 2551 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); | 2586 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
| 2552 } else { | 2587 } else { |
| 2553 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); | 2588 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
| 2554 } | 2589 } |
| 2555 } | 2590 } |
| 2556 | 2591 |
| 2557 int reg = 0; | 2592 int reg = 0; |
| 2558 for (int i = 1; i < num_registers(); ++i) { | 2593 for (int i = 1; i < num_registers(); ++i) { |
| 2559 if (use_pos[i] > use_pos[reg]) { | 2594 if (use_pos[i] > use_pos[reg]) { |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 2573 if (block_pos[reg] < current->End()) { | 2608 if (block_pos[reg] < current->End()) { |
| 2574 // Register becomes blocked before the current range end. Split before that | 2609 // Register becomes blocked before the current range end. Split before that |
| 2575 // position. | 2610 // position. |
| 2576 LiveRange* tail = | 2611 LiveRange* tail = |
| 2577 SplitBetween(current, current->Start(), block_pos[reg].Start()); | 2612 SplitBetween(current, current->Start(), block_pos[reg].Start()); |
| 2578 AddToUnhandledSorted(tail); | 2613 AddToUnhandledSorted(tail); |
| 2579 } | 2614 } |
| 2580 | 2615 |
| 2581 // Register reg is not blocked for the whole range. | 2616 // Register reg is not blocked for the whole range. |
| 2582 DCHECK(block_pos[reg] >= current->End()); | 2617 DCHECK(block_pos[reg] >= current->End()); |
| 2583 TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), | 2618 TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg), |
| 2584 current->id()); | 2619 current->TopLevel()->vreg(), current->relative_id()); |
| 2585 SetLiveRangeAssignedRegister(current, reg); | 2620 SetLiveRangeAssignedRegister(current, reg); |
| 2586 | 2621 |
| 2587 // This register was not free. Thus we need to find and spill | 2622 // This register was not free. Thus we need to find and spill |
| 2588 // parts of active and inactive live regions that use the same register | 2623 // parts of active and inactive live regions that use the same register |
| 2589 // at the same lifetime positions as current. | 2624 // at the same lifetime positions as current. |
| 2590 SplitAndSpillIntersecting(current); | 2625 SplitAndSpillIntersecting(current); |
| 2591 } | 2626 } |
| 2592 | 2627 |
| 2593 | 2628 |
| 2594 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 2629 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 2614 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); | 2649 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); |
| 2615 } | 2650 } |
| 2616 ActiveToHandled(range); | 2651 ActiveToHandled(range); |
| 2617 --i; | 2652 --i; |
| 2618 } | 2653 } |
| 2619 } | 2654 } |
| 2620 | 2655 |
| 2621 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { | 2656 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { |
| 2622 auto range = inactive_live_ranges()[i]; | 2657 auto range = inactive_live_ranges()[i]; |
| 2623 DCHECK(range->End() > current->Start()); | 2658 DCHECK(range->End() > current->Start()); |
| 2624 if (range->assigned_register() == reg && !range->IsFixed()) { | 2659 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) { |
| 2625 LifetimePosition next_intersection = range->FirstIntersection(current); | 2660 LifetimePosition next_intersection = range->FirstIntersection(current); |
| 2626 if (next_intersection.IsValid()) { | 2661 if (next_intersection.IsValid()) { |
| 2627 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 2662 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
| 2628 if (next_pos == nullptr) { | 2663 if (next_pos == nullptr) { |
| 2629 SpillAfter(range, split_pos); | 2664 SpillAfter(range, split_pos); |
| 2630 } else { | 2665 } else { |
| 2631 next_intersection = Min(next_intersection, next_pos->pos()); | 2666 next_intersection = Min(next_intersection, next_pos->pos()); |
| 2632 SpillBetween(range, split_pos, next_intersection); | 2667 SpillBetween(range, split_pos, next_intersection); |
| 2633 } | 2668 } |
| 2634 InactiveToHandled(range); | 2669 InactiveToHandled(range); |
| 2635 --i; | 2670 --i; |
| 2636 } | 2671 } |
| 2637 } | 2672 } |
| 2638 } | 2673 } |
| 2639 } | 2674 } |
| 2640 | 2675 |
| 2641 | 2676 |
| 2642 bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) { | 2677 bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) { |
| 2643 if (range->IsChild() || !range->is_phi()) return false; | 2678 if (!range->is_phi()) return false; |
| 2679 | |
| 2644 DCHECK(!range->HasSpillOperand()); | 2680 DCHECK(!range->HasSpillOperand()); |
| 2645 auto phi_map_value = data()->GetPhiMapValueFor(range->id()); | 2681 auto phi_map_value = data()->GetPhiMapValueFor(range); |
| 2646 auto phi = phi_map_value->phi(); | 2682 auto phi = phi_map_value->phi(); |
| 2647 auto block = phi_map_value->block(); | 2683 auto block = phi_map_value->block(); |
| 2648 // Count the number of spilled operands. | 2684 // Count the number of spilled operands. |
| 2649 size_t spilled_count = 0; | 2685 size_t spilled_count = 0; |
| 2650 LiveRange* first_op = nullptr; | 2686 LiveRange* first_op = nullptr; |
| 2651 for (size_t i = 0; i < phi->operands().size(); i++) { | 2687 for (size_t i = 0; i < phi->operands().size(); i++) { |
| 2652 int op = phi->operands()[i]; | 2688 int op = phi->operands()[i]; |
| 2653 LiveRange* op_range = LiveRangeFor(op); | 2689 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op); |
| 2654 if (!op_range->HasSpillRange()) continue; | 2690 if (!op_range->TopLevel()->HasSpillRange()) continue; |
| 2655 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); | 2691 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); |
| 2656 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( | 2692 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( |
| 2657 pred->last_instruction_index()); | 2693 pred->last_instruction_index()); |
| 2658 while (op_range != nullptr && !op_range->CanCover(pred_end)) { | 2694 while (op_range != nullptr && !op_range->CanCover(pred_end)) { |
| 2659 op_range = op_range->next(); | 2695 op_range = op_range->next(); |
| 2660 } | 2696 } |
| 2661 if (op_range != nullptr && op_range->spilled()) { | 2697 if (op_range != nullptr && op_range->spilled()) { |
| 2662 spilled_count++; | 2698 spilled_count++; |
| 2663 if (first_op == nullptr) { | 2699 if (first_op == nullptr) { |
| 2664 first_op = op_range->TopLevel(); | 2700 first_op = op_range->TopLevel(); |
| 2665 } | 2701 } |
| 2666 } | 2702 } |
| 2667 } | 2703 } |
| 2668 | 2704 |
| 2669 // Only continue if more than half of the operands are spilled. | 2705 // Only continue if more than half of the operands are spilled. |
| 2670 if (spilled_count * 2 <= phi->operands().size()) { | 2706 if (spilled_count * 2 <= phi->operands().size()) { |
| 2671 return false; | 2707 return false; |
| 2672 } | 2708 } |
| 2673 | 2709 |
| 2674 // Try to merge the spilled operands and count the number of merged spilled | 2710 // Try to merge the spilled operands and count the number of merged spilled |
| 2675 // operands. | 2711 // operands. |
| 2676 DCHECK(first_op != nullptr); | 2712 DCHECK(first_op != nullptr); |
| 2677 auto first_op_spill = first_op->GetSpillRange(); | 2713 auto first_op_spill = first_op->TopLevel()->GetSpillRange(); |
| 2678 size_t num_merged = 1; | 2714 size_t num_merged = 1; |
| 2679 for (size_t i = 1; i < phi->operands().size(); i++) { | 2715 for (size_t i = 1; i < phi->operands().size(); i++) { |
| 2680 int op = phi->operands()[i]; | 2716 int op = phi->operands()[i]; |
| 2681 auto op_range = LiveRangeFor(op); | 2717 auto op_range = data()->GetOrCreateLiveRangeFor(op); |
| 2682 if (!op_range->HasSpillRange()) continue; | 2718 if (!op_range->HasSpillRange()) continue; |
| 2683 auto op_spill = op_range->GetSpillRange(); | 2719 auto op_spill = op_range->GetSpillRange(); |
| 2684 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { | 2720 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { |
| 2685 num_merged++; | 2721 num_merged++; |
| 2686 } | 2722 } |
| 2687 } | 2723 } |
| 2688 | 2724 |
| 2689 // Only continue if enough operands could be merged to the | 2725 // Only continue if enough operands could be merged to the |
| 2690 // same spill slot. | 2726 // same spill slot. |
| 2691 if (num_merged * 2 <= phi->operands().size() || | 2727 if (num_merged * 2 <= phi->operands().size() || |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2764 } | 2800 } |
| 2765 } | 2801 } |
| 2766 | 2802 |
| 2767 | 2803 |
| 2768 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) | 2804 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) |
| 2769 : data_(data) {} | 2805 : data_(data) {} |
| 2770 | 2806 |
| 2771 | 2807 |
| 2772 void SpillSlotLocator::LocateSpillSlots() { | 2808 void SpillSlotLocator::LocateSpillSlots() { |
| 2773 auto code = data()->code(); | 2809 auto code = data()->code(); |
| 2774 for (auto range : data()->live_ranges()) { | 2810 for (TopLevelLiveRange* range : data()->live_ranges()) { |
| 2775 if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; | 2811 if (range == nullptr || range->IsEmpty()) continue; |
| 2776 // We care only about ranges which spill in the frame. | 2812 // We care only about ranges which spill in the frame. |
| 2777 if (!range->HasSpillRange()) continue; | 2813 if (!range->HasSpillRange()) continue; |
| 2778 auto spills = range->spills_at_definition(); | 2814 auto spills = range->spills_at_definition(); |
| 2779 DCHECK_NOT_NULL(spills); | 2815 DCHECK_NOT_NULL(spills); |
| 2780 for (; spills != nullptr; spills = spills->next) { | 2816 for (; spills != nullptr; spills = spills->next) { |
| 2781 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); | 2817 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); |
| 2782 } | 2818 } |
| 2783 } | 2819 } |
| 2784 } | 2820 } |
| 2785 | 2821 |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 2807 if (range->IsEmpty()) continue; | 2843 if (range->IsEmpty()) continue; |
| 2808 // Allocate a new operand referring to the spill slot. | 2844 // Allocate a new operand referring to the spill slot. |
| 2809 int byte_width = range->ByteWidth(); | 2845 int byte_width = range->ByteWidth(); |
| 2810 int index = data()->frame()->AllocateSpillSlot(byte_width); | 2846 int index = data()->frame()->AllocateSpillSlot(byte_width); |
| 2811 range->set_assigned_slot(index); | 2847 range->set_assigned_slot(index); |
| 2812 } | 2848 } |
| 2813 } | 2849 } |
| 2814 | 2850 |
| 2815 | 2851 |
| 2816 void OperandAssigner::CommitAssignment() { | 2852 void OperandAssigner::CommitAssignment() { |
| 2817 for (auto range : data()->live_ranges()) { | 2853 for (TopLevelLiveRange* top_range : data()->live_ranges()) { |
| 2818 if (range == nullptr || range->IsEmpty()) continue; | 2854 if (top_range == nullptr || top_range->IsEmpty()) continue; |
| 2819 InstructionOperand spill_operand; | 2855 InstructionOperand spill_operand; |
| 2820 if (range->TopLevel()->HasSpillOperand()) { | 2856 if (top_range->HasSpillOperand()) { |
| 2821 spill_operand = *range->TopLevel()->GetSpillOperand(); | 2857 spill_operand = *top_range->TopLevel()->GetSpillOperand(); |
| 2822 } else if (range->TopLevel()->HasSpillRange()) { | 2858 } else if (top_range->TopLevel()->HasSpillRange()) { |
| 2823 spill_operand = range->TopLevel()->GetSpillRangeOperand(); | 2859 spill_operand = top_range->TopLevel()->GetSpillRangeOperand(); |
| 2824 } | 2860 } |
| 2825 auto assigned = range->GetAssignedOperand(); | 2861 if (top_range->is_phi()) { |
| 2826 range->ConvertUsesToOperand(assigned, spill_operand); | 2862 data()->GetPhiMapValueFor(top_range)->CommitAssignment( |
| 2827 if (range->is_phi()) { | 2863 top_range->GetAssignedOperand()); |
| 2828 data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned); | |
| 2829 } | 2864 } |
| 2830 if (!range->IsChild() && !spill_operand.IsInvalid()) { | 2865 for (LiveRange* range = top_range; range != nullptr; |
| 2866 range = range->next()) { | |
| 2867 auto assigned = range->GetAssignedOperand(); | |
| 2868 range->ConvertUsesToOperand(assigned, spill_operand); | |
| 2869 } | |
| 2870 | |
| 2871 if (!spill_operand.IsInvalid()) { | |
| 2831 // If this top level range has a child spilled in a deferred block, we use | 2872 // If this top level range has a child spilled in a deferred block, we use |
| 2832 // the range and control flow connection mechanism instead of spilling at | 2873 // the range and control flow connection mechanism instead of spilling at |
| 2833 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow | 2874 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow |
| 2834 // phases. Normally, when we spill at definition, we do not insert a | 2875 // phases. Normally, when we spill at definition, we do not insert a |
| 2835 // connecting move when a successor child range is spilled - because the | 2876 // connecting move when a successor child range is spilled - because the |
| 2836 // spilled range picks up its value from the slot which was assigned at | 2877 // spilled range picks up its value from the slot which was assigned at |
| 2837 // definition. For ranges that are determined to spill only in deferred | 2878 // definition. For ranges that are determined to spill only in deferred |
| 2838 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such | 2879 // blocks, we let ConnectLiveRanges and ResolveControlFlow insert such |
| 2839 // moves between ranges. Because of how the ranges are split around | 2880 // moves between ranges. Because of how the ranges are split around |
| 2840 // deferred blocks, this amounts to spilling and filling inside such | 2881 // deferred blocks, this amounts to spilling and filling inside such |
| 2841 // blocks. | 2882 // blocks. |
| 2842 if (!range->TryCommitSpillInDeferredBlock(data()->code(), | 2883 if (!top_range->TryCommitSpillInDeferredBlock(data()->code(), |
| 2843 spill_operand)) { | 2884 spill_operand)) { |
| 2844 // Spill at definition if the range isn't spilled only in deferred | 2885 // Spill at definition if the range isn't spilled only in deferred |
| 2845 // blocks. | 2886 // blocks. |
| 2846 range->CommitSpillsAtDefinition( | 2887 top_range->CommitSpillsAtDefinition( |
| 2847 data()->code(), spill_operand, | 2888 data()->code(), spill_operand, |
| 2848 range->has_slot_use() || range->spilled()); | 2889 top_range->has_slot_use() || top_range->spilled()); |
| 2849 } | 2890 } |
| 2850 } | 2891 } |
| 2851 } | 2892 } |
| 2852 } | 2893 } |
| 2853 | 2894 |
| 2854 | 2895 |
| 2855 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) | 2896 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) |
| 2856 : data_(data) {} | 2897 : data_(data) {} |
| 2857 | 2898 |
| 2858 | 2899 |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 2871 // Map all delayed references. | 2912 // Map all delayed references. |
| 2872 for (auto& delayed_reference : data()->delayed_references()) { | 2913 for (auto& delayed_reference : data()->delayed_references()) { |
| 2873 delayed_reference.map->RecordReference( | 2914 delayed_reference.map->RecordReference( |
| 2874 AllocatedOperand::cast(*delayed_reference.operand)); | 2915 AllocatedOperand::cast(*delayed_reference.operand)); |
| 2875 } | 2916 } |
| 2876 // Iterate over all safe point positions and record a pointer | 2917 // Iterate over all safe point positions and record a pointer |
| 2877 // for all spilled live ranges at this point. | 2918 // for all spilled live ranges at this point. |
| 2878 int last_range_start = 0; | 2919 int last_range_start = 0; |
| 2879 auto reference_maps = data()->code()->reference_maps(); | 2920 auto reference_maps = data()->code()->reference_maps(); |
| 2880 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); | 2921 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); |
| 2881 for (LiveRange* range : data()->live_ranges()) { | 2922 for (TopLevelLiveRange* range : data()->live_ranges()) { |
| 2882 if (range == nullptr) continue; | 2923 if (range == nullptr) continue; |
| 2883 // Iterate over the first parts of multi-part live ranges. | |
| 2884 if (range->IsChild()) continue; | |
| 2885 // Skip non-reference values. | 2924 // Skip non-reference values. |
| 2886 if (!data()->IsReference(range->id())) continue; | 2925 if (!data()->IsReference(range)) continue; |
| 2887 // Skip empty live ranges. | 2926 // Skip empty live ranges. |
| 2888 if (range->IsEmpty()) continue; | 2927 if (range->IsEmpty()) continue; |
| 2889 | 2928 |
| 2890 // Find the extent of the range and its children. | 2929 // Find the extent of the range and its children. |
| 2891 int start = range->Start().ToInstructionIndex(); | 2930 int start = range->Start().ToInstructionIndex(); |
| 2892 int end = 0; | 2931 int end = 0; |
| 2893 for (auto cur = range; cur != nullptr; cur = cur->next()) { | 2932 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) { |
| 2894 auto this_end = cur->End(); | 2933 auto this_end = cur->End(); |
| 2895 if (this_end.ToInstructionIndex() > end) | 2934 if (this_end.ToInstructionIndex() > end) |
| 2896 end = this_end.ToInstructionIndex(); | 2935 end = this_end.ToInstructionIndex(); |
| 2897 DCHECK(cur->Start().ToInstructionIndex() >= start); | 2936 DCHECK(cur->Start().ToInstructionIndex() >= start); |
| 2898 } | 2937 } |
| 2899 | 2938 |
| 2900 // Most of the ranges are in order, but not all. Keep an eye on when they | 2939 // Most of the ranges are in order, but not all. Keep an eye on when they |
| 2901 // step backwards and reset the first_it so we don't miss any safe points. | 2940 // step backwards and reset the first_it so we don't miss any safe points. |
| 2902 if (start < last_range_start) first_it = reference_maps->begin(); | 2941 if (start < last_range_start) first_it = reference_maps->begin(); |
| 2903 last_range_start = start; | 2942 last_range_start = start; |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 2928 auto map = *it; | 2967 auto map = *it; |
| 2929 int safe_point = map->instruction_position(); | 2968 int safe_point = map->instruction_position(); |
| 2930 | 2969 |
| 2931 // The safe points are sorted so we can stop searching here. | 2970 // The safe points are sorted so we can stop searching here. |
| 2932 if (safe_point - 1 > end) break; | 2971 if (safe_point - 1 > end) break; |
| 2933 | 2972 |
| 2934 // Advance to the next active range that covers the current | 2973 // Advance to the next active range that covers the current |
| 2935 // safe point position. | 2974 // safe point position. |
| 2936 auto safe_point_pos = | 2975 auto safe_point_pos = |
| 2937 LifetimePosition::InstructionFromInstructionIndex(safe_point); | 2976 LifetimePosition::InstructionFromInstructionIndex(safe_point); |
| 2938 auto cur = range; | 2977 LiveRange* cur = range; |
| 2939 while (cur != nullptr && !cur->Covers(safe_point_pos)) { | 2978 while (cur != nullptr && !cur->Covers(safe_point_pos)) { |
| 2940 cur = cur->next(); | 2979 cur = cur->next(); |
| 2941 } | 2980 } |
| 2942 if (cur == nullptr) continue; | 2981 if (cur == nullptr) continue; |
| 2943 | 2982 |
| 2944 // Check if the live range is spilled and the safe point is after | 2983 // Check if the live range is spilled and the safe point is after |
| 2945 // the spill position. | 2984 // the spill position. |
| 2946 int spill_index = range->IsSpilledOnlyInDeferredBlocks() | 2985 int spill_index = range->IsSpilledOnlyInDeferredBlocks() |
| 2947 ? cur->Start().ToInstructionIndex() | 2986 ? cur->Start().ToInstructionIndex() |
| 2948 : range->spill_start_index(); | 2987 : range->spill_start_index(); |
| 2949 | 2988 |
| 2950 if (!spill_operand.IsInvalid() && safe_point >= spill_index) { | 2989 if (!spill_operand.IsInvalid() && safe_point >= spill_index) { |
| 2951 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", | 2990 TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", |
| 2952 range->id(), spill_index, safe_point); | 2991 range->vreg(), spill_index, safe_point); |
| 2953 map->RecordReference(AllocatedOperand::cast(spill_operand)); | 2992 map->RecordReference(AllocatedOperand::cast(spill_operand)); |
| 2954 } | 2993 } |
| 2955 | 2994 |
| 2956 if (!cur->spilled()) { | 2995 if (!cur->spilled()) { |
| 2957 TRACE( | 2996 TRACE( |
| 2958 "Pointer in register for range %d (start at %d) " | 2997 "Pointer in register for range %d:%d (start at %d) " |
| 2959 "at safe point %d\n", | 2998 "at safe point %d\n", |
| 2960 cur->id(), cur->Start().value(), safe_point); | 2999 range->vreg(), cur->relative_id(), cur->Start().value(), |
| 3000 safe_point); | |
| 2961 auto operand = cur->GetAssignedOperand(); | 3001 auto operand = cur->GetAssignedOperand(); |
| 2962 DCHECK(!operand.IsStackSlot()); | 3002 DCHECK(!operand.IsStackSlot()); |
| 2963 DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type()); | 3003 DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type()); |
| 2964 map->RecordReference(AllocatedOperand::cast(operand)); | 3004 map->RecordReference(AllocatedOperand::cast(operand)); |
| 2965 } | 3005 } |
| 2966 } | 3006 } |
| 2967 } | 3007 } |
| 2968 } | 3008 } |
| 2969 | 3009 |
| 2970 | 3010 |
| (...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3177 ->HasReferenceMap()); | 3217 ->HasReferenceMap()); |
| 3178 gap_index = pred->last_instruction_index(); | 3218 gap_index = pred->last_instruction_index(); |
| 3179 position = Instruction::END; | 3219 position = Instruction::END; |
| 3180 } | 3220 } |
| 3181 data()->AddGapMove(gap_index, position, pred_op, cur_op); | 3221 data()->AddGapMove(gap_index, position, pred_op, cur_op); |
| 3182 } | 3222 } |
| 3183 | 3223 |
| 3184 | 3224 |
| 3185 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { | 3225 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { |
| 3186 DelayedInsertionMap delayed_insertion_map(local_zone); | 3226 DelayedInsertionMap delayed_insertion_map(local_zone); |
| 3187 for (auto first_range : data()->live_ranges()) { | 3227 for (TopLevelLiveRange* top_range : data()->live_ranges()) { |
| 3188 if (first_range == nullptr || first_range->IsChild()) continue; | 3228 if (top_range == nullptr) continue; |
| 3189 bool connect_spilled = first_range->IsSpilledOnlyInDeferredBlocks(); | 3229 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(); |
| 3190 for (auto second_range = first_range->next(); second_range != nullptr; | 3230 LiveRange* first_range = top_range; |
| 3231 for (LiveRange *second_range = first_range->next(); second_range != nullptr; | |
| 3191 first_range = second_range, second_range = second_range->next()) { | 3232 first_range = second_range, second_range = second_range->next()) { |
| 3192 auto pos = second_range->Start(); | 3233 auto pos = second_range->Start(); |
| 3193 // Add gap move if the two live ranges touch and there is no block | 3234 // Add gap move if the two live ranges touch and there is no block |
| 3194 // boundary. | 3235 // boundary. |
| 3195 if (!connect_spilled && second_range->spilled()) continue; | 3236 if (!connect_spilled && second_range->spilled()) continue; |
| 3196 if (first_range->End() != pos) continue; | 3237 if (first_range->End() != pos) continue; |
| 3197 if (data()->IsBlockBoundary(pos) && | 3238 if (data()->IsBlockBoundary(pos) && |
| 3198 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { | 3239 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { |
| 3199 continue; | 3240 continue; |
| 3200 } | 3241 } |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3252 auto eliminate = moves->PrepareInsertAfter(move); | 3293 auto eliminate = moves->PrepareInsertAfter(move); |
| 3253 to_insert.push_back(move); | 3294 to_insert.push_back(move); |
| 3254 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 3295 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
| 3255 } | 3296 } |
| 3256 } | 3297 } |
| 3257 | 3298 |
| 3258 | 3299 |
| 3259 } // namespace compiler | 3300 } // namespace compiler |
| 3260 } // namespace internal | 3301 } // namespace internal |
| 3261 } // namespace v8 | 3302 } // namespace v8 |
| OLD | NEW |