Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 244 UsePositionHintType hint_type() const { | 244 UsePositionHintType hint_type() const { |
| 245 return HintTypeField::decode(flags_); | 245 return HintTypeField::decode(flags_); |
| 246 } | 246 } |
| 247 bool HasHint() const; | 247 bool HasHint() const; |
| 248 bool HintRegister(int* register_index) const; | 248 bool HintRegister(int* register_index) const; |
| 249 void ResolveHint(UsePosition* use_pos); | 249 void ResolveHint(UsePosition* use_pos); |
| 250 bool IsResolved() const { | 250 bool IsResolved() const { |
| 251 return hint_type() != UsePositionHintType::kUnresolved; | 251 return hint_type() != UsePositionHintType::kUnresolved; |
| 252 } | 252 } |
| 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); | 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); |
| 254 void dump_hint(std::ostream& os, const RegisterConfiguration* config); | |
| 254 | 255 |
| 255 private: | 256 private: |
| 256 typedef BitField<UsePositionType, 0, 2> TypeField; | 257 typedef BitField<UsePositionType, 0, 2> TypeField; |
| 257 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; | 258 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; |
| 258 typedef BitField<bool, 5, 1> RegisterBeneficialField; | 259 typedef BitField<bool, 5, 1> RegisterBeneficialField; |
| 259 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | 260 typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
| 260 | 261 |
| 261 InstructionOperand* const operand_; | 262 InstructionOperand* const operand_; |
| 262 void* hint_; | 263 void* hint_; |
| 263 UsePosition* next_; | 264 UsePosition* next_; |
| 264 LifetimePosition const pos_; | 265 LifetimePosition const pos_; |
| 265 uint32_t flags_; | 266 uint32_t flags_; |
| 266 | 267 |
| 267 DISALLOW_COPY_AND_ASSIGN(UsePosition); | 268 DISALLOW_COPY_AND_ASSIGN(UsePosition); |
| 268 }; | 269 }; |
| 269 | 270 |
| 270 | 271 |
| 271 class SpillRange; | 272 class SpillRange; |
| 273 class LiveRangeGroup; | |
| 272 | 274 |
| 273 | 275 |
| 274 // Representation of SSA values' live ranges as a collection of (continuous) | 276 // Representation of SSA values' live ranges as a collection of (continuous) |
| 275 // intervals over the instruction ordering. | 277 // intervals over the instruction ordering. |
| 276 class LiveRange final : public ZoneObject { | 278 class LiveRange final : public ZoneObject { |
| 277 public: | 279 public: |
| 278 explicit LiveRange(int id, MachineType machine_type); | 280 explicit LiveRange(int id, MachineType machine_type); |
| 279 | 281 |
| 280 UseInterval* first_interval() const { return first_interval_; } | 282 UseInterval* first_interval() const { return first_interval_; } |
| 281 UsePosition* first_pos() const { return first_pos_; } | 283 UsePosition* first_pos() const { return first_pos_; } |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 334 // range and which follows both start and last processed use position | 336 // range and which follows both start and last processed use position |
| 335 UsePosition* NextUsePositionRegisterIsBeneficial( | 337 UsePosition* NextUsePositionRegisterIsBeneficial( |
| 336 LifetimePosition start) const; | 338 LifetimePosition start) const; |
| 337 | 339 |
| 338 // Returns use position for which register is beneficial in this live | 340 // Returns use position for which register is beneficial in this live |
| 339 // range and which precedes start. | 341 // range and which precedes start. |
| 340 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 342 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
| 341 LifetimePosition start) const; | 343 LifetimePosition start) const; |
| 342 | 344 |
| 343 // Can this live range be spilled at this position. | 345 // Can this live range be spilled at this position. |
| 344 bool CanBeSpilled(LifetimePosition pos) const; | 346 bool CanBeSpilled(LifetimePosition pos) const { return CanBeSplit(pos); } |
| 347 | |
| 348 // Can this live range be split at this position. | |
| 349 bool CanBeSplit(LifetimePosition pos) const; | |
| 345 | 350 |
| 346 // Split this live range at the given position which must follow the start of | 351 // Split this live range at the given position which must follow the start of |
| 347 // the range. | 352 // the range. |
| 348 // All uses following the given position will be moved from this | 353 // All uses following the given position will be moved from this |
| 349 // live range to the result live range. | 354 // live range to the result live range. |
| 350 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); | 355 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); |
| 351 | 356 |
| 352 // Returns nullptr when no register is hinted, otherwise sets register_index. | 357 // Returns nullptr when no register is hinted, otherwise sets register_index. |
| 353 UsePosition* FirstHintPosition(int* register_index) const; | 358 UsePosition* FirstHintPosition(int* register_index) const; |
| 354 UsePosition* FirstHintPosition() const { | 359 UsePosition* FirstHintPosition() const { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 421 const InstructionOperand& spill_op); | 426 const InstructionOperand& spill_op); |
| 422 void SetUseHints(int register_index); | 427 void SetUseHints(int register_index); |
| 423 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } | 428 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } |
| 424 | 429 |
| 425 struct SpillAtDefinitionList; | 430 struct SpillAtDefinitionList; |
| 426 | 431 |
| 427 SpillAtDefinitionList* spills_at_definition() const { | 432 SpillAtDefinitionList* spills_at_definition() const { |
| 428 return spills_at_definition_; | 433 return spills_at_definition_; |
| 429 } | 434 } |
| 430 | 435 |
| 436 LiveRangeGroup* group() { return group_; } | |
| 437 void SetGroup(LiveRangeGroup* grp) { group_ = grp; } | |
| 438 | |
| 439 float weight() const { | |
| 440 DCHECK(weight_ >= 0.0); | |
| 441 return weight_; | |
| 442 } | |
| 443 | |
| 444 unsigned size() { | |
| 445 if (size_ < 0) RecalculateSize(); | |
| 446 DCHECK(size_ > 0); | |
| 447 return static_cast<unsigned>(size_); | |
| 448 } | |
| 449 | |
| 450 void InvalidateWeightAndSize() { | |
| 451 weight_ = -1.0; | |
| 452 size_ = -1; | |
| 453 } | |
| 454 void RecalculateWeight(InstructionSequence* code); | |
| 455 LifetimePosition GetFirstSplittablePosition(); | |
| 456 | |
| 431 private: | 457 private: |
| 458 void RecalculateSize(); | |
| 432 void set_spill_type(SpillType value) { | 459 void set_spill_type(SpillType value) { |
| 433 bits_ = SpillTypeField::update(bits_, value); | 460 bits_ = SpillTypeField::update(bits_, value); |
| 434 } | 461 } |
| 435 | 462 |
| 436 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | 463 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
| 437 | 464 |
| 438 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | 465 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
| 439 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | 466 void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
| 440 LifetimePosition but_not_past) const; | 467 LifetimePosition but_not_past) const; |
| 441 | 468 |
| 442 typedef BitField<bool, 0, 1> SpilledField; | 469 typedef BitField<bool, 0, 1> SpilledField; |
| 443 typedef BitField<bool, 1, 1> HasSlotUseField; | 470 typedef BitField<bool, 1, 1> HasSlotUseField; |
| 444 typedef BitField<bool, 2, 1> IsPhiField; | 471 typedef BitField<bool, 2, 1> IsPhiField; |
| 445 typedef BitField<bool, 3, 1> IsNonLoopPhiField; | 472 typedef BitField<bool, 3, 1> IsNonLoopPhiField; |
| 446 typedef BitField<SpillType, 4, 2> SpillTypeField; | 473 typedef BitField<SpillType, 4, 2> SpillTypeField; |
| 447 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | 474 typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
| 448 typedef BitField<MachineType, 12, 15> MachineTypeField; | 475 typedef BitField<MachineType, 12, 15> MachineTypeField; |
| 449 | 476 |
| 450 int id_; | 477 int id_; |
| 451 int spill_start_index_; | 478 int spill_start_index_; |
| 452 uint32_t bits_; | 479 uint32_t bits_; |
| 453 UseInterval* last_interval_; | 480 UseInterval* last_interval_; |
| 454 UseInterval* first_interval_; | 481 UseInterval* first_interval_; |
| 455 UsePosition* first_pos_; | 482 UsePosition* first_pos_; |
| 456 LiveRange* parent_; | 483 LiveRange* parent_; |
| 457 LiveRange* next_; | 484 LiveRange* next_; |
| 485 float weight_; | |
|
Jarin
2015/06/03 15:06:16
Could you please split the fields into groups by t
Mircea Trofin
2015/06/04 03:38:39
Actually, the greedy introduces 3 fields only, the
| |
| 486 int size_; | |
| 458 union { | 487 union { |
| 459 // Correct value determined by spill_type() | 488 // Correct value determined by spill_type() |
| 460 InstructionOperand* spill_operand_; | 489 InstructionOperand* spill_operand_; |
| 461 SpillRange* spill_range_; | 490 SpillRange* spill_range_; |
| 462 }; | 491 }; |
| 463 SpillAtDefinitionList* spills_at_definition_; | 492 SpillAtDefinitionList* spills_at_definition_; |
| 464 // This is used as a cache, it doesn't affect correctness. | 493 // This is used as a cache, it doesn't affect correctness. |
| 465 mutable UseInterval* current_interval_; | 494 mutable UseInterval* current_interval_; |
| 466 // This is used as a cache, it doesn't affect correctness. | 495 // This is used as a cache, it doesn't affect correctness. |
| 467 mutable UsePosition* last_processed_use_; | 496 mutable UsePosition* last_processed_use_; |
| 468 // This is used as a cache, it's invalid outside of BuildLiveRanges. | 497 // This is used as a cache, it's invalid outside of BuildLiveRanges. |
| 469 mutable UsePosition* current_hint_position_; | 498 mutable UsePosition* current_hint_position_; |
| 470 | 499 |
| 500 LiveRangeGroup* group_; | |
| 471 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 501 DISALLOW_COPY_AND_ASSIGN(LiveRange); |
| 472 }; | 502 }; |
| 473 | 503 |
| 474 | 504 |
| 505 class LiveRangeGroup final : public ZoneObject { | |
| 506 public: | |
| 507 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {} | |
| 508 ZoneSet<LiveRange*>& ranges() { return ranges_; } | |
| 509 | |
| 510 private: | |
| 511 ZoneSet<LiveRange*> ranges_; | |
| 512 | |
| 513 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup); | |
| 514 }; | |
| 515 | |
| 475 struct PrintableLiveRange { | 516 struct PrintableLiveRange { |
| 476 const RegisterConfiguration* register_configuration_; | 517 const RegisterConfiguration* register_configuration_; |
| 477 const LiveRange* range_; | 518 const LiveRange* range_; |
| 478 }; | 519 }; |
| 479 | 520 |
| 480 | 521 |
| 481 std::ostream& operator<<(std::ostream& os, | 522 std::ostream& operator<<(std::ostream& os, |
| 482 const PrintableLiveRange& printable_range); | 523 const PrintableLiveRange& printable_range); |
| 483 | 524 |
| 484 | 525 |
| 485 class SpillRange final : public ZoneObject { | 526 class SpillRange final : public ZoneObject { |
| 486 public: | 527 public: |
| 487 static const int kUnassignedSlot = -1; | 528 static const int kUnassignedSlot = -1; |
| 488 SpillRange(LiveRange* range, Zone* zone); | 529 SpillRange(LiveRange* range, Zone* zone); |
| 489 | 530 |
| 490 UseInterval* interval() const { return use_interval_; } | 531 UseInterval* interval() const { return use_interval_; } |
| 491 // Currently, only 4 or 8 byte slots are supported. | 532 // Currently, only 4 or 8 byte slots are supported. |
| 492 int ByteWidth() const; | 533 int ByteWidth() const; |
| 493 bool IsEmpty() const { return live_ranges_.empty(); } | 534 bool IsEmpty() const { return live_ranges_.empty(); } |
| 494 bool TryMerge(SpillRange* other); | 535 bool TryMerge(SpillRange* other); |
| 495 | 536 |
| 496 void set_assigned_slot(int index) { | 537 void set_assigned_slot(int index) { |
| 497 DCHECK_EQ(kUnassignedSlot, assigned_slot_); | 538 DCHECK_EQ(kUnassignedSlot, assigned_slot_); |
| 498 assigned_slot_ = index; | 539 assigned_slot_ = index; |
| 499 } | 540 } |
| 541 bool IsSlotAssigned() { return assigned_slot_ != kUnassignedSlot; } | |
| 500 int assigned_slot() { | 542 int assigned_slot() { |
| 501 DCHECK_NE(kUnassignedSlot, assigned_slot_); | 543 DCHECK_NE(kUnassignedSlot, assigned_slot_); |
| 502 return assigned_slot_; | 544 return assigned_slot_; |
| 503 } | 545 } |
| 504 | 546 |
| 505 private: | 547 private: |
| 506 LifetimePosition End() const { return end_position_; } | 548 LifetimePosition End() const { return end_position_; } |
| 507 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } | 549 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } |
| 508 bool IsIntersectingWith(SpillRange* other) const; | 550 bool IsIntersectingWith(SpillRange* other) const; |
| 509 // Merge intervals, making sure the use intervals are sorted | 551 // Merge intervals, making sure the use intervals are sorted |
| 510 void MergeDisjointIntervals(UseInterval* other); | 552 void MergeDisjointIntervals(UseInterval* other); |
| 511 | 553 |
| 512 ZoneVector<LiveRange*> live_ranges_; | 554 ZoneVector<LiveRange*> live_ranges_; |
| 513 UseInterval* use_interval_; | 555 UseInterval* use_interval_; |
| 514 LifetimePosition end_position_; | 556 LifetimePosition end_position_; |
| 515 int assigned_slot_; | 557 int assigned_slot_; |
| 516 | 558 |
| 517 DISALLOW_COPY_AND_ASSIGN(SpillRange); | 559 DISALLOW_COPY_AND_ASSIGN(SpillRange); |
| 518 }; | 560 }; |
| 519 | 561 |
| 520 | 562 |
| 563 std::ostream& operator<<(std::ostream& os, SpillRange* range); | |
| 564 | |
| 565 | |
| 521 class RegisterAllocationData final : public ZoneObject { | 566 class RegisterAllocationData final : public ZoneObject { |
| 522 public: | 567 public: |
| 523 class PhiMapValue : public ZoneObject { | 568 class PhiMapValue : public ZoneObject { |
| 524 public: | 569 public: |
| 525 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); | 570 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); |
| 526 | 571 |
| 527 const PhiInstruction* phi() const { return phi_; } | 572 const PhiInstruction* phi() const { return phi_; } |
| 528 const InstructionBlock* block() const { return block_; } | 573 const InstructionBlock* block() const { return block_; } |
| 529 | 574 |
| 530 // For hinting. | 575 // For hinting. |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 721 InstructionOperand* operand) { | 766 InstructionOperand* operand) { |
| 722 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); | 767 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); |
| 723 } | 768 } |
| 724 | 769 |
| 725 RegisterAllocationData* const data_; | 770 RegisterAllocationData* const data_; |
| 726 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; | 771 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; |
| 727 | 772 |
| 728 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); | 773 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); |
| 729 }; | 774 }; |
| 730 | 775 |
| 776 struct AllocatorStats { | |
| 777 unsigned spills; | |
| 778 unsigned wins; | |
| 779 unsigned losses_no_eviction; | |
| 780 unsigned losses_after_eviction; | |
| 781 unsigned good_split_attempts; | |
| 782 unsigned good_split_successes; | |
| 783 unsigned good_split_smart_above; | |
| 784 unsigned good_split_smart_below; | |
| 785 unsigned good_split_above; | |
| 786 unsigned good_split_below; | |
| 787 unsigned groups_attempted; | |
| 788 unsigned groups_succeeded; | |
| 789 unsigned groups_allocated; | |
| 790 void reset() { *this = {0}; } | |
| 791 }; | |
| 792 | |
| 793 | |
| 794 std::ostream& operator<<(std::ostream& os, const AllocatorStats& stats); | |
| 795 | |
| 731 | 796 |
| 732 class RegisterAllocator : public ZoneObject { | 797 class RegisterAllocator : public ZoneObject { |
| 733 public: | 798 public: |
| 734 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); | 799 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); |
| 735 | 800 |
| 736 protected: | 801 protected: |
| 802 AllocatorStats stats_; | |
| 737 RegisterAllocationData* data() const { return data_; } | 803 RegisterAllocationData* data() const { return data_; } |
| 738 InstructionSequence* code() const { return data()->code(); } | 804 InstructionSequence* code() const { return data()->code(); } |
| 739 RegisterKind mode() const { return mode_; } | 805 RegisterKind mode() const { return mode_; } |
| 740 int num_registers() const { return num_registers_; } | 806 int num_registers() const { return num_registers_; } |
| 741 | 807 |
| 742 Zone* allocation_zone() const { return data()->allocation_zone(); } | 808 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 743 | 809 |
| 744 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | 810 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
| 745 | 811 |
| 746 // Split the given range at the given position. | 812 // Split the given range at the given position. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 760 // loop covered by this interval or the latest possible position. | 826 // loop covered by this interval or the latest possible position. |
| 761 LifetimePosition FindOptimalSplitPos(LifetimePosition start, | 827 LifetimePosition FindOptimalSplitPos(LifetimePosition start, |
| 762 LifetimePosition end); | 828 LifetimePosition end); |
| 763 | 829 |
| 764 void Spill(LiveRange* range); | 830 void Spill(LiveRange* range); |
| 765 | 831 |
| 766 // If we are trying to spill a range inside the loop try to | 832 // If we are trying to spill a range inside the loop try to |
| 767 // hoist spill position out to the point just before the loop. | 833 // hoist spill position out to the point just before the loop. |
| 768 LifetimePosition FindOptimalSpillingPos(LiveRange* range, | 834 LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
| 769 LifetimePosition pos); | 835 LifetimePosition pos); |
| 836 const char* RegisterName(int allocation_index) const; | |
| 770 | 837 |
| 771 private: | 838 private: |
| 772 RegisterAllocationData* const data_; | 839 RegisterAllocationData* const data_; |
| 773 const RegisterKind mode_; | 840 const RegisterKind mode_; |
| 774 const int num_registers_; | 841 const int num_registers_; |
| 775 | 842 |
| 776 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); | 843 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); |
| 777 }; | 844 }; |
| 778 | 845 |
| 779 | 846 |
| 780 class LinearScanAllocator final : public RegisterAllocator { | 847 class LinearScanAllocator final : public RegisterAllocator { |
| 781 public: | 848 public: |
| 782 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, | 849 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 783 Zone* local_zone); | 850 Zone* local_zone); |
| 784 | 851 |
| 785 // Phase 4: compute register assignments. | 852 // Phase 4: compute register assignments. |
| 786 void AllocateRegisters(); | 853 void AllocateRegisters(); |
| 787 | 854 |
| 788 private: | 855 private: |
| 789 const char* RegisterName(int allocation_index) const; | |
| 790 | |
| 791 ZoneVector<LiveRange*>& unhandled_live_ranges() { | 856 ZoneVector<LiveRange*>& unhandled_live_ranges() { |
| 792 return unhandled_live_ranges_; | 857 return unhandled_live_ranges_; |
| 793 } | 858 } |
| 794 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } | 859 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } |
| 795 ZoneVector<LiveRange*>& inactive_live_ranges() { | 860 ZoneVector<LiveRange*>& inactive_live_ranges() { |
| 796 return inactive_live_ranges_; | 861 return inactive_live_ranges_; |
| 797 } | 862 } |
| 798 | 863 |
| 799 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); | 864 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); |
| 800 | 865 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 833 ZoneVector<LiveRange*> active_live_ranges_; | 898 ZoneVector<LiveRange*> active_live_ranges_; |
| 834 ZoneVector<LiveRange*> inactive_live_ranges_; | 899 ZoneVector<LiveRange*> inactive_live_ranges_; |
| 835 | 900 |
| 836 #ifdef DEBUG | 901 #ifdef DEBUG |
| 837 LifetimePosition allocation_finger_; | 902 LifetimePosition allocation_finger_; |
| 838 #endif | 903 #endif |
| 839 | 904 |
| 840 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); | 905 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); |
| 841 }; | 906 }; |
| 842 | 907 |
| 908 | |
| 909 // Storage detail for CoalescedLiveRanges. | |
| 910 struct AllocatedInterval { | |
| 911 LifetimePosition start; | |
| 912 LifetimePosition end; | |
| 913 LiveRange* range; | |
| 914 struct Comparer : public std::binary_function<AllocatedInterval, | |
| 915 AllocatedInterval, bool> { | |
| 916 bool operator()(const AllocatedInterval& x, | |
| 917 const AllocatedInterval& y) const { | |
| 918 return x.start < y.start; | |
| 919 } | |
| 920 }; | |
| 921 }; | |
| 922 | |
| 923 | |
| 924 typedef ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>::const_iterator | |
| 925 interval_iterator; | |
| 926 | |
| 927 | |
| 843 class CoalescedLiveRanges; | 928 class CoalescedLiveRanges; |
| 844 | 929 |
| 845 | 930 |
| 931 // Iterator over allocated live ranges (CoalescedLiveRanges) that conflict with | |
| 932 // a given one. See CoalescedLiveRanges::first_conflict. | |
| 933 // Note that iterating may produce non-consecutively repeating live ranges. | |
| 934 struct conflict_iterator { | |
| 935 friend class CoalescedLiveRanges; | |
| 936 conflict_iterator() : query_(nullptr), storage_(nullptr) {} | |
| 937 | |
| 938 LiveRange* operator->() { return pos_->range; } | |
| 939 | |
| 940 LiveRange* operator*() { return pos_->range; } | |
|
Jarin
2015/06/03 15:06:16
Should not the star operator return a reference ra
Mircea Trofin
2015/06/04 03:38:39
It returns the item "pointed at" by the iterator,
Jarin
2015/06/08 13:32:49
Normally, operator* for T::iterator returns T&; op
| |
| 941 | |
| 942 conflict_iterator& operator++(); | |
| 943 bool IsValid() const { return storage_ != nullptr; } | |
| 944 | |
| 945 bool operator==(const conflict_iterator& other) { | |
| 946 return IsValid() && other.IsValid() && query_ == other.query_ && | |
| 947 pos_ == other.pos_ && storage_ == other.storage_; | |
| 948 } | |
| 949 | |
| 950 bool operator!=(const conflict_iterator& other) { | |
|
Jarin
2015/06/03 15:06:16
Why is not this is simply negation of the equality
Mircea Trofin
2015/06/04 03:38:39
Done.
| |
| 951 return IsValid() ^ other.IsValid() || query_ != other.query_ || | |
| 952 storage_ != other.storage_ || | |
| 953 (storage_ != nullptr && pos_ != other.pos_); | |
| 954 } | |
| 955 | |
| 956 private: | |
| 957 static bool Intersects(LifetimePosition a_start, LifetimePosition a_end, | |
| 958 LifetimePosition b_start, LifetimePosition b_end) { | |
| 959 if (a_start == b_start) return true; | |
| 960 if (a_start < b_start) return a_end > b_start; | |
| 961 return Intersects(b_start, b_end, a_start, a_end); | |
| 962 } | |
| 963 | |
| 964 conflict_iterator(UseInterval* q, | |
| 965 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* s, | |
| 966 interval_iterator& it) | |
| 967 : query_(nullptr), storage_(nullptr), pos_(it) {} | |
| 968 | |
| 969 conflict_iterator( | |
| 970 UseInterval* query, | |
| 971 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage) | |
| 972 : query_(query), storage_(storage) { | |
| 973 InitializeForNewQuery(); | |
| 974 } | |
| 975 | |
| 976 void MoveToEnd() { | |
| 977 query_ = nullptr; | |
| 978 storage_ = nullptr; | |
| 979 } | |
| 980 | |
| 981 void InitializeForNewQuery(); | |
| 982 | |
| 983 AllocatedInterval AsAllocatedInterval(LifetimePosition pos) { | |
| 984 return {pos, LifetimePosition::Invalid(), nullptr}; | |
| 985 } | |
| 986 | |
| 987 UseInterval* query_; | |
| 988 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage_; | |
| 989 interval_iterator pos_; | |
| 990 }; | |
| 991 | |
| 992 | |
| 846 // A variant of the LLVM Greedy Register Allocator. See | 993 // A variant of the LLVM Greedy Register Allocator. See |
| 847 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html | 994 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html |
| 848 class GreedyAllocator final : public RegisterAllocator { | 995 class GreedyAllocator final : public RegisterAllocator { |
| 849 public: | 996 public: |
| 850 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, | 997 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 851 Zone* local_zone); | 998 Zone* local_zone); |
| 852 | 999 |
| 853 void AllocateRegisters(); | 1000 void AllocateRegisters(); |
| 854 | 1001 |
| 855 private: | 1002 private: |
| 856 LifetimePosition GetSplittablePos(LifetimePosition pos); | 1003 void AllocateRange(LiveRange* current); |
| 1004 void AllocateGroup(LiveRangeGroup* grp); | |
| 1005 bool TryAllocateGroup(LiveRangeGroup* grp); | |
| 1006 bool TryAllocateGroupAtRegister(unsigned reg, LiveRangeGroup* grp); | |
| 1007 | |
| 1008 struct PQueueElem { | |
| 1009 bool IsGroup() { return is_grp_; } | |
| 1010 explicit PQueueElem(LiveRange* range) : is_grp_(false) { | |
| 1011 storage_.range_ = range; | |
| 1012 } | |
| 1013 | |
| 1014 explicit PQueueElem(LiveRangeGroup* grp) : is_grp_(true) { | |
| 1015 storage_.grp_ = grp; | |
| 1016 } | |
| 1017 | |
| 1018 | |
| 1019 LiveRange* get_range() { | |
| 1020 if (is_grp_) return nullptr; | |
| 1021 return storage_.range_; | |
| 1022 } | |
| 1023 | |
| 1024 | |
| 1025 LiveRangeGroup* get_group() { | |
| 1026 if (is_grp_) return storage_.grp_; | |
| 1027 return nullptr; | |
| 1028 } | |
| 1029 | |
| 1030 private: | |
| 1031 bool is_grp_; | |
| 1032 union { | |
| 1033 LiveRange* range_; | |
| 1034 LiveRangeGroup* grp_; | |
| 1035 } storage_; | |
| 1036 }; | |
| 1037 | |
| 1038 void GroupAndEnqueue(); | |
| 857 const RegisterConfiguration* config() const { return data()->config(); } | 1039 const RegisterConfiguration* config() const { return data()->config(); } |
| 858 Zone* local_zone() const { return local_zone_; } | 1040 Zone* local_zone() const { return local_zone_; } |
| 859 bool TryReuseSpillForPhi(LiveRange* range); | |
| 860 int GetHintedRegister(LiveRange* range); | |
| 861 | 1041 |
| 862 typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue; | 1042 struct PQueueElemComparer |
| 1043 : public std::binary_function<std::pair<unsigned, PQueueElem>, | |
| 1044 std::pair<unsigned, PQueueElem>, bool> { | |
| 1045 bool operator()(const std::pair<unsigned, PQueueElem>& x, | |
| 1046 const std::pair<unsigned, PQueueElem>& y) const { | |
| 1047 return x.first < y.first; | |
| 1048 } | |
| 1049 }; | |
| 863 | 1050 |
| 864 unsigned GetLiveRangeSize(LiveRange* range); | 1051 typedef ZonePriorityQueue<std::pair<unsigned, PQueueElem>, PQueueElemComparer> |
| 1052 PQueue; | |
| 1053 | |
| 865 void Enqueue(LiveRange* range); | 1054 void Enqueue(LiveRange* range); |
| 1055 void Enqueue(LiveRangeGroup* group); | |
| 866 | 1056 |
| 867 void Evict(LiveRange* range); | 1057 void Evict(LiveRange* range); |
| 868 float CalculateSpillWeight(LiveRange* range); | 1058 void EvictAll(int reg, const conflict_iterator& first_conflict); |
| 869 float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges); | |
| 870 | 1059 |
| 1060 // Find the register with the minimum spill weight. If that is still higher | |
| 1061 // or equal to competing, return -1. | |
| 1062 int FindRegisterToEvictForRange( | |
| 1063 conflict_iterator | |
| 1064 all_conflicts[RegisterConfiguration::kMaxDoubleRegisters], | |
| 1065 float competing); | |
| 871 | 1066 |
| 872 bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting); | 1067 // Same as above, for all the registers in a group. |
| 873 bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range, | 1068 int FindRegisterToEvictForGroup(LiveRangeGroup* grp, float competing); |
| 874 ZoneSet<LiveRange*>* conflicting); | 1069 |
| 875 bool HandleSpillOperands(LiveRange* range); | 1070 template <typename TIter> |
| 876 void AllocateBlockedRange(LiveRange* current, LifetimePosition pos, | 1071 float CalculateMaxSpillWeight(const TIter& begin, const TIter& end); |
| 877 bool spill); | 1072 |
| 1073 bool HandleSpillOperands(LiveRange* range, LiveRange** remaining); | |
| 1074 void HandleBlockedRange(LiveRange* current); | |
| 1075 bool TrySplitBeforeLoops(LiveRange* current); | |
| 1076 bool TrySplitAfterLoops(LiveRange* current); | |
| 1077 bool TrySplitAroundCalls(LiveRange* range); | |
| 878 | 1078 |
| 879 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, | 1079 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, |
| 880 LifetimePosition until, LifetimePosition end); | 1080 LifetimePosition until, LifetimePosition end); |
| 881 void AssignRangeToRegister(int reg_id, LiveRange* range); | 1081 void AssignRangeToRegister(int reg_id, LiveRange* range); |
| 882 | 1082 |
| 883 LifetimePosition FindProgressingSplitPosition(LiveRange* range, | |
| 884 bool* is_spill_pos); | |
| 885 | |
| 886 Zone* local_zone_; | 1083 Zone* local_zone_; |
| 887 ZoneVector<CoalescedLiveRanges*> allocations_; | 1084 ZoneVector<CoalescedLiveRanges*> allocations_; |
| 888 PQueue queue_; | 1085 PQueue queue_; |
| 889 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); | 1086 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); |
| 890 }; | 1087 }; |
| 891 | 1088 |
| 892 | 1089 |
| 893 class SpillSlotLocator final : public ZoneObject { | 1090 class SpillSlotLocator final : public ZoneObject { |
| 894 public: | 1091 public: |
| 895 explicit SpillSlotLocator(RegisterAllocationData* data); | 1092 explicit SpillSlotLocator(RegisterAllocationData* data); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 966 RegisterAllocationData* const data_; | 1163 RegisterAllocationData* const data_; |
| 967 | 1164 |
| 968 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 1165 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 969 }; | 1166 }; |
| 970 | 1167 |
| 971 } // namespace compiler | 1168 } // namespace compiler |
| 972 } // namespace internal | 1169 } // namespace internal |
| 973 } // namespace v8 | 1170 } // namespace v8 |
| 974 | 1171 |
| 975 #endif // V8_REGISTER_ALLOCATOR_H_ | 1172 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |