| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 244 UsePositionHintType hint_type() const { | 244 UsePositionHintType hint_type() const { |
| 245 return HintTypeField::decode(flags_); | 245 return HintTypeField::decode(flags_); |
| 246 } | 246 } |
| 247 bool HasHint() const; | 247 bool HasHint() const; |
| 248 bool HintRegister(int* register_index) const; | 248 bool HintRegister(int* register_index) const; |
| 249 void ResolveHint(UsePosition* use_pos); | 249 void ResolveHint(UsePosition* use_pos); |
| 250 bool IsResolved() const { | 250 bool IsResolved() const { |
| 251 return hint_type() != UsePositionHintType::kUnresolved; | 251 return hint_type() != UsePositionHintType::kUnresolved; |
| 252 } | 252 } |
| 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); | 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); |
| 254 void dump_hint(std::ostream& os, const RegisterConfiguration* config); |
| 254 | 255 |
| 255 private: | 256 private: |
| 256 typedef BitField<UsePositionType, 0, 2> TypeField; | 257 typedef BitField<UsePositionType, 0, 2> TypeField; |
| 257 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; | 258 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; |
| 258 typedef BitField<bool, 5, 1> RegisterBeneficialField; | 259 typedef BitField<bool, 5, 1> RegisterBeneficialField; |
| 259 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | 260 typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
| 260 | 261 |
| 261 InstructionOperand* const operand_; | 262 InstructionOperand* const operand_; |
| 262 void* hint_; | 263 void* hint_; |
| 263 UsePosition* next_; | 264 UsePosition* next_; |
| 264 LifetimePosition const pos_; | 265 LifetimePosition const pos_; |
| 265 uint32_t flags_; | 266 uint32_t flags_; |
| 266 | 267 |
| 267 DISALLOW_COPY_AND_ASSIGN(UsePosition); | 268 DISALLOW_COPY_AND_ASSIGN(UsePosition); |
| 268 }; | 269 }; |
| 269 | 270 |
| 270 | 271 |
| 271 class SpillRange; | 272 class SpillRange; |
| 273 class LiveRangeGroup; |
| 272 | 274 |
| 273 | 275 |
| 274 // Representation of SSA values' live ranges as a collection of (continuous) | 276 // Representation of SSA values' live ranges as a collection of (continuous) |
| 275 // intervals over the instruction ordering. | 277 // intervals over the instruction ordering. |
| 276 class LiveRange final : public ZoneObject { | 278 class LiveRange final : public ZoneObject { |
| 277 public: | 279 public: |
| 278 explicit LiveRange(int id, MachineType machine_type); | 280 explicit LiveRange(int id, MachineType machine_type); |
| 279 | 281 |
| 280 UseInterval* first_interval() const { return first_interval_; } | 282 UseInterval* first_interval() const { return first_interval_; } |
| 281 UsePosition* first_pos() const { return first_pos_; } | 283 UsePosition* first_pos() const { return first_pos_; } |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 334 // range and which follows both start and last processed use position | 336 // range and which follows both start and last processed use position |
| 335 UsePosition* NextUsePositionRegisterIsBeneficial( | 337 UsePosition* NextUsePositionRegisterIsBeneficial( |
| 336 LifetimePosition start) const; | 338 LifetimePosition start) const; |
| 337 | 339 |
| 338 // Returns use position for which register is beneficial in this live | 340 // Returns use position for which register is beneficial in this live |
| 339 // range and which precedes start. | 341 // range and which precedes start. |
| 340 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 342 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
| 341 LifetimePosition start) const; | 343 LifetimePosition start) const; |
| 342 | 344 |
| 343 // Can this live range be spilled at this position. | 345 // Can this live range be spilled at this position. |
| 344 bool CanBeSpilled(LifetimePosition pos) const; | 346 bool CanBeSpilled(LifetimePosition pos) const { return CanBeSplit(pos); } |
| 347 |
| 348 // Can this live range be split at this position. |
| 349 bool CanBeSplit(LifetimePosition pos) const; |
| 345 | 350 |
| 346 // Split this live range at the given position which must follow the start of | 351 // Split this live range at the given position which must follow the start of |
| 347 // the range. | 352 // the range. |
| 348 // All uses following the given position will be moved from this | 353 // All uses following the given position will be moved from this |
| 349 // live range to the result live range. | 354 // live range to the result live range. |
| 350 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); | 355 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); |
| 351 | 356 |
| 352 // Returns nullptr when no register is hinted, otherwise sets register_index. | 357 // Returns nullptr when no register is hinted, otherwise sets register_index. |
| 353 UsePosition* FirstHintPosition(int* register_index) const; | 358 UsePosition* FirstHintPosition(int* register_index) const; |
| 354 UsePosition* FirstHintPosition() const { | 359 UsePosition* FirstHintPosition() const { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 421 const InstructionOperand& spill_op); | 426 const InstructionOperand& spill_op); |
| 422 void SetUseHints(int register_index); | 427 void SetUseHints(int register_index); |
| 423 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } | 428 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } |
| 424 | 429 |
| 425 struct SpillAtDefinitionList; | 430 struct SpillAtDefinitionList; |
| 426 | 431 |
| 427 SpillAtDefinitionList* spills_at_definition() const { | 432 SpillAtDefinitionList* spills_at_definition() const { |
| 428 return spills_at_definition_; | 433 return spills_at_definition_; |
| 429 } | 434 } |
| 430 | 435 |
| 436 LiveRangeGroup* group() { return group_; } |
| 437 void SetGroup(LiveRangeGroup* grp) { group_ = grp; } |
| 438 |
| 439 float weight() const { |
| 440 DCHECK(weight_ >= 0.0f); |
| 441 return weight_; |
| 442 } |
| 443 |
| 444 unsigned size() { |
| 445 if (size_ < 0) RecalculateSize(); |
| 446 DCHECK(size_ > 0); |
| 447 return static_cast<unsigned>(size_); |
| 448 } |
| 449 |
| 450 void InvalidateWeightAndSize() { |
| 451 weight_ = kInvalidWeight; |
| 452 size_ = kInvalidSize; |
| 453 } |
| 454 void RecalculateWeight(InstructionSequence* code); |
| 455 LifetimePosition GetFirstSplittablePosition(); |
| 456 |
| 457 static const float kMaxWeight; |
| 458 |
| 431 private: | 459 private: |
| 460 static const float kUseInLoopWeightMultiplier; |
| 461 static const float kPreferredRegisterWeightMultiplier; |
| 462 static const float kInvalidWeight; |
| 463 static const int kInvalidSize = 0; |
| 464 |
| 465 void RecalculateSize(); |
| 432 void set_spill_type(SpillType value) { | 466 void set_spill_type(SpillType value) { |
| 433 bits_ = SpillTypeField::update(bits_, value); | 467 bits_ = SpillTypeField::update(bits_, value); |
| 434 } | 468 } |
| 435 | 469 |
| 436 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | 470 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
| 437 | 471 |
| 438 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | 472 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
| 439 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | 473 void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
| 440 LifetimePosition but_not_past) const; | 474 LifetimePosition but_not_past) const; |
| 441 | 475 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 461 SpillRange* spill_range_; | 495 SpillRange* spill_range_; |
| 462 }; | 496 }; |
| 463 SpillAtDefinitionList* spills_at_definition_; | 497 SpillAtDefinitionList* spills_at_definition_; |
| 464 // This is used as a cache, it doesn't affect correctness. | 498 // This is used as a cache, it doesn't affect correctness. |
| 465 mutable UseInterval* current_interval_; | 499 mutable UseInterval* current_interval_; |
| 466 // This is used as a cache, it doesn't affect correctness. | 500 // This is used as a cache, it doesn't affect correctness. |
| 467 mutable UsePosition* last_processed_use_; | 501 mutable UsePosition* last_processed_use_; |
| 468 // This is used as a cache, it's invalid outside of BuildLiveRanges. | 502 // This is used as a cache, it's invalid outside of BuildLiveRanges. |
| 469 mutable UsePosition* current_hint_position_; | 503 mutable UsePosition* current_hint_position_; |
| 470 | 504 |
| 505 // Used only by the greedy allocator. |
| 506 float weight_; |
| 507 int size_; |
| 508 LiveRangeGroup* group_; |
| 471 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 509 DISALLOW_COPY_AND_ASSIGN(LiveRange); |
| 472 }; | 510 }; |
| 473 | 511 |
| 474 | 512 |
| 513 class LiveRangeGroup final : public ZoneObject { |
| 514 public: |
| 515 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {} |
| 516 ZoneSet<LiveRange*>& ranges() { return ranges_; } |
| 517 |
| 518 private: |
| 519 ZoneSet<LiveRange*> ranges_; |
| 520 |
| 521 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup); |
| 522 }; |
| 523 |
| 475 struct PrintableLiveRange { | 524 struct PrintableLiveRange { |
| 476 const RegisterConfiguration* register_configuration_; | 525 const RegisterConfiguration* register_configuration_; |
| 477 const LiveRange* range_; | 526 const LiveRange* range_; |
| 478 }; | 527 }; |
| 479 | 528 |
| 480 | 529 |
| 481 std::ostream& operator<<(std::ostream& os, | 530 std::ostream& operator<<(std::ostream& os, |
| 482 const PrintableLiveRange& printable_range); | 531 const PrintableLiveRange& printable_range); |
| 483 | 532 |
| 484 | 533 |
| 485 class SpillRange final : public ZoneObject { | 534 class SpillRange final : public ZoneObject { |
| 486 public: | 535 public: |
| 487 static const int kUnassignedSlot = -1; | 536 static const int kUnassignedSlot = -1; |
| 488 SpillRange(LiveRange* range, Zone* zone); | 537 SpillRange(LiveRange* range, Zone* zone); |
| 489 | 538 |
| 490 UseInterval* interval() const { return use_interval_; } | 539 UseInterval* interval() const { return use_interval_; } |
| 491 // Currently, only 4 or 8 byte slots are supported. | 540 // Currently, only 4 or 8 byte slots are supported. |
| 492 int ByteWidth() const; | 541 int ByteWidth() const; |
| 493 bool IsEmpty() const { return live_ranges_.empty(); } | 542 bool IsEmpty() const { return live_ranges_.empty(); } |
| 494 bool TryMerge(SpillRange* other); | 543 bool TryMerge(SpillRange* other); |
| 495 | 544 |
| 496 void set_assigned_slot(int index) { | 545 void set_assigned_slot(int index) { |
| 497 DCHECK_EQ(kUnassignedSlot, assigned_slot_); | 546 DCHECK_EQ(kUnassignedSlot, assigned_slot_); |
| 498 assigned_slot_ = index; | 547 assigned_slot_ = index; |
| 499 } | 548 } |
| 549 bool IsSlotAssigned() const { return assigned_slot_ != kUnassignedSlot; } |
| 500 int assigned_slot() { | 550 int assigned_slot() { |
| 501 DCHECK_NE(kUnassignedSlot, assigned_slot_); | 551 DCHECK_NE(kUnassignedSlot, assigned_slot_); |
| 502 return assigned_slot_; | 552 return assigned_slot_; |
| 503 } | 553 } |
| 504 | 554 |
| 505 private: | 555 private: |
| 506 LifetimePosition End() const { return end_position_; } | 556 LifetimePosition End() const { return end_position_; } |
| 507 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } | 557 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } |
| 508 bool IsIntersectingWith(SpillRange* other) const; | 558 bool IsIntersectingWith(SpillRange* other) const; |
| 509 // Merge intervals, making sure the use intervals are sorted | 559 // Merge intervals, making sure the use intervals are sorted |
| 510 void MergeDisjointIntervals(UseInterval* other); | 560 void MergeDisjointIntervals(UseInterval* other); |
| 511 | 561 |
| 512 ZoneVector<LiveRange*> live_ranges_; | 562 ZoneVector<LiveRange*> live_ranges_; |
| 513 UseInterval* use_interval_; | 563 UseInterval* use_interval_; |
| 514 LifetimePosition end_position_; | 564 LifetimePosition end_position_; |
| 515 int assigned_slot_; | 565 int assigned_slot_; |
| 516 | 566 |
| 517 DISALLOW_COPY_AND_ASSIGN(SpillRange); | 567 DISALLOW_COPY_AND_ASSIGN(SpillRange); |
| 518 }; | 568 }; |
| 519 | 569 |
| 520 | 570 |
| 571 std::ostream& operator<<(std::ostream& os, SpillRange* range); |
| 572 |
| 573 |
| 521 class RegisterAllocationData final : public ZoneObject { | 574 class RegisterAllocationData final : public ZoneObject { |
| 522 public: | 575 public: |
| 523 class PhiMapValue : public ZoneObject { | 576 class PhiMapValue : public ZoneObject { |
| 524 public: | 577 public: |
| 525 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); | 578 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); |
| 526 | 579 |
| 527 const PhiInstruction* phi() const { return phi_; } | 580 const PhiInstruction* phi() const { return phi_; } |
| 528 const InstructionBlock* block() const { return block_; } | 581 const InstructionBlock* block() const { return block_; } |
| 529 | 582 |
| 530 // For hinting. | 583 // For hinting. |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 721 InstructionOperand* operand) { | 774 InstructionOperand* operand) { |
| 722 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); | 775 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); |
| 723 } | 776 } |
| 724 | 777 |
| 725 RegisterAllocationData* const data_; | 778 RegisterAllocationData* const data_; |
| 726 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; | 779 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; |
| 727 | 780 |
| 728 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); | 781 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); |
| 729 }; | 782 }; |
| 730 | 783 |
| 784 struct AllocatorStats { |
| 785 unsigned spills; |
| 786 unsigned wins; |
| 787 unsigned losses_no_eviction; |
| 788 unsigned losses_after_eviction; |
| 789 unsigned good_split_attempts; |
| 790 unsigned good_split_successes; |
| 791 unsigned good_split_smart_above; |
| 792 unsigned good_split_smart_below; |
| 793 unsigned good_split_above; |
| 794 unsigned good_split_below; |
| 795 unsigned groups_attempted; |
| 796 unsigned groups_succeeded; |
| 797 unsigned groups_allocated; |
| 798 void reset() { *this = {0}; } |
| 799 }; |
| 800 |
| 801 |
| 802 std::ostream& operator<<(std::ostream& os, const AllocatorStats& stats); |
| 803 |
| 731 | 804 |
| 732 class RegisterAllocator : public ZoneObject { | 805 class RegisterAllocator : public ZoneObject { |
| 733 public: | 806 public: |
| 734 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); | 807 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); |
| 735 | 808 |
| 736 protected: | 809 protected: |
| 810 AllocatorStats stats_; |
| 737 RegisterAllocationData* data() const { return data_; } | 811 RegisterAllocationData* data() const { return data_; } |
| 738 InstructionSequence* code() const { return data()->code(); } | 812 InstructionSequence* code() const { return data()->code(); } |
| 739 RegisterKind mode() const { return mode_; } | 813 RegisterKind mode() const { return mode_; } |
| 740 int num_registers() const { return num_registers_; } | 814 int num_registers() const { return num_registers_; } |
| 741 | 815 |
| 742 Zone* allocation_zone() const { return data()->allocation_zone(); } | 816 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 743 | 817 |
| 744 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | 818 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
| 745 | 819 |
| 746 // Split the given range at the given position. | 820 // Split the given range at the given position. |
| (...skipping 13 matching lines...) Expand all Loading... |
| 760 // loop covered by this interval or the latest possible position. | 834 // loop covered by this interval or the latest possible position. |
| 761 LifetimePosition FindOptimalSplitPos(LifetimePosition start, | 835 LifetimePosition FindOptimalSplitPos(LifetimePosition start, |
| 762 LifetimePosition end); | 836 LifetimePosition end); |
| 763 | 837 |
| 764 void Spill(LiveRange* range); | 838 void Spill(LiveRange* range); |
| 765 | 839 |
| 766 // If we are trying to spill a range inside the loop try to | 840 // If we are trying to spill a range inside the loop try to |
| 767 // hoist spill position out to the point just before the loop. | 841 // hoist spill position out to the point just before the loop. |
| 768 LifetimePosition FindOptimalSpillingPos(LiveRange* range, | 842 LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
| 769 LifetimePosition pos); | 843 LifetimePosition pos); |
| 844 const char* RegisterName(int allocation_index) const; |
| 770 | 845 |
| 771 private: | 846 private: |
| 772 RegisterAllocationData* const data_; | 847 RegisterAllocationData* const data_; |
| 773 const RegisterKind mode_; | 848 const RegisterKind mode_; |
| 774 const int num_registers_; | 849 const int num_registers_; |
| 775 | 850 |
| 776 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); | 851 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); |
| 777 }; | 852 }; |
| 778 | 853 |
| 779 | 854 |
| 780 class LinearScanAllocator final : public RegisterAllocator { | 855 class LinearScanAllocator final : public RegisterAllocator { |
| 781 public: | 856 public: |
| 782 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, | 857 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 783 Zone* local_zone); | 858 Zone* local_zone); |
| 784 | 859 |
| 785 // Phase 4: compute register assignments. | 860 // Phase 4: compute register assignments. |
| 786 void AllocateRegisters(); | 861 void AllocateRegisters(); |
| 787 | 862 |
| 788 private: | 863 private: |
| 789 const char* RegisterName(int allocation_index) const; | |
| 790 | |
| 791 ZoneVector<LiveRange*>& unhandled_live_ranges() { | 864 ZoneVector<LiveRange*>& unhandled_live_ranges() { |
| 792 return unhandled_live_ranges_; | 865 return unhandled_live_ranges_; |
| 793 } | 866 } |
| 794 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } | 867 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } |
| 795 ZoneVector<LiveRange*>& inactive_live_ranges() { | 868 ZoneVector<LiveRange*>& inactive_live_ranges() { |
| 796 return inactive_live_ranges_; | 869 return inactive_live_ranges_; |
| 797 } | 870 } |
| 798 | 871 |
| 799 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); | 872 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); |
| 800 | 873 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 833 ZoneVector<LiveRange*> active_live_ranges_; | 906 ZoneVector<LiveRange*> active_live_ranges_; |
| 834 ZoneVector<LiveRange*> inactive_live_ranges_; | 907 ZoneVector<LiveRange*> inactive_live_ranges_; |
| 835 | 908 |
| 836 #ifdef DEBUG | 909 #ifdef DEBUG |
| 837 LifetimePosition allocation_finger_; | 910 LifetimePosition allocation_finger_; |
| 838 #endif | 911 #endif |
| 839 | 912 |
| 840 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); | 913 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); |
| 841 }; | 914 }; |
| 842 | 915 |
| 916 |
| 917 // Storage detail for CoalescedLiveRanges. |
| 918 struct AllocatedInterval { |
| 919 LifetimePosition start; |
| 920 LifetimePosition end; |
| 921 LiveRange* range; |
| 922 struct Comparer : public std::binary_function<AllocatedInterval, |
| 923 AllocatedInterval, bool> { |
| 924 bool operator()(const AllocatedInterval& x, |
| 925 const AllocatedInterval& y) const { |
| 926 return x.start < y.start; |
| 927 } |
| 928 }; |
| 929 }; |
| 930 |
| 931 |
| 843 class CoalescedLiveRanges; | 932 class CoalescedLiveRanges; |
| 844 | 933 |
| 845 | 934 |
| 935 // Iterator over allocated live ranges (CoalescedLiveRanges) that conflict with |
| 936 // a given one. See CoalescedLiveRanges::first_conflict. |
| 937 // Note that iterating may produce non-consecutively repeating live ranges. |
| 938 struct conflict_iterator { |
| 939 friend class CoalescedLiveRanges; |
| 940 friend bool operator==(const conflict_iterator& first, |
| 941 const conflict_iterator& second); |
| 942 conflict_iterator() : query_(nullptr), storage_(nullptr) {} |
| 943 |
| 944 LiveRange* operator->() { return pos_->range; } |
| 945 |
| 946 LiveRange* operator*() { return pos_->range; } |
| 947 |
| 948 conflict_iterator& operator++(); |
| 949 bool IsValid() const { return storage_ != nullptr; } |
| 950 |
| 951 private: |
| 952 typedef ZoneSet<AllocatedInterval, AllocatedInterval::Comparer> IntervalStore; |
| 953 typedef IntervalStore::const_iterator interval_iterator; |
| 954 |
| 955 static bool Intersects(LifetimePosition a_start, LifetimePosition a_end, |
| 956 LifetimePosition b_start, LifetimePosition b_end) { |
| 957 if (a_start == b_start) return true; |
| 958 if (a_start < b_start) return a_end > b_start; |
| 959 return Intersects(b_start, b_end, a_start, a_end); |
| 960 } |
| 961 |
| 962 conflict_iterator(UseInterval* query, IntervalStore* storage) |
| 963 : query_(query), storage_(storage) { |
| 964 InitializeForNewQuery(); |
| 965 } |
| 966 |
| 967 void MoveToEnd() { |
| 968 query_ = nullptr; |
| 969 storage_ = nullptr; |
| 970 } |
| 971 |
| 972 void InitializeForNewQuery(); |
| 973 |
| 974 AllocatedInterval AsAllocatedInterval(LifetimePosition pos) { |
| 975 return {pos, LifetimePosition::Invalid(), nullptr}; |
| 976 } |
| 977 |
| 978 UseInterval* query_; |
| 979 IntervalStore* storage_; |
| 980 interval_iterator pos_; |
| 981 }; |
| 982 |
| 983 bool operator==(const conflict_iterator& first, |
| 984 const conflict_iterator& second); |
| 985 bool operator!=(const conflict_iterator& first, |
| 986 const conflict_iterator& second); |
| 987 |
| 988 |
| 846 // A variant of the LLVM Greedy Register Allocator. See | 989 // A variant of the LLVM Greedy Register Allocator. See |
| 847 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html | 990 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html |
| 848 class GreedyAllocator final : public RegisterAllocator { | 991 class GreedyAllocator final : public RegisterAllocator { |
| 849 public: | 992 public: |
| 850 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, | 993 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 851 Zone* local_zone); | 994 Zone* local_zone); |
| 852 | 995 |
| 853 void AllocateRegisters(); | 996 void AllocateRegisters(); |
| 854 | 997 |
| 855 private: | 998 private: |
| 856 LifetimePosition GetSplittablePos(LifetimePosition pos); | 999 void AllocateRange(LiveRange* current); |
| 1000 void AllocateGroup(LiveRangeGroup* grp); |
| 1001 bool TryAllocateGroup(LiveRangeGroup* grp); |
| 1002 bool TryAllocateGroupAtRegister(unsigned reg, LiveRangeGroup* grp); |
| 1003 |
| 1004 struct PQueueElem { |
| 1005 bool IsGroup() { return is_grp_; } |
| 1006 explicit PQueueElem(LiveRange* range) : is_grp_(false) { |
| 1007 storage_.range_ = range; |
| 1008 } |
| 1009 |
| 1010 explicit PQueueElem(LiveRangeGroup* grp) : is_grp_(true) { |
| 1011 storage_.grp_ = grp; |
| 1012 } |
| 1013 |
| 1014 |
| 1015 LiveRange* get_range() { |
| 1016 if (is_grp_) return nullptr; |
| 1017 return storage_.range_; |
| 1018 } |
| 1019 |
| 1020 |
| 1021 LiveRangeGroup* get_group() { |
| 1022 if (is_grp_) return storage_.grp_; |
| 1023 return nullptr; |
| 1024 } |
| 1025 |
| 1026 private: |
| 1027 bool is_grp_; |
| 1028 union { |
| 1029 LiveRange* range_; |
| 1030 LiveRangeGroup* grp_; |
| 1031 } storage_; |
| 1032 }; |
| 1033 |
| 1034 void GroupAndEnqueue(); |
| 857 const RegisterConfiguration* config() const { return data()->config(); } | 1035 const RegisterConfiguration* config() const { return data()->config(); } |
| 858 Zone* local_zone() const { return local_zone_; } | 1036 Zone* local_zone() const { return local_zone_; } |
| 859 bool TryReuseSpillForPhi(LiveRange* range); | |
| 860 int GetHintedRegister(LiveRange* range); | |
| 861 | 1037 |
| 862 typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue; | 1038 struct PQueueElemComparer |
| 1039 : public std::binary_function<std::pair<unsigned, PQueueElem>, |
| 1040 std::pair<unsigned, PQueueElem>, bool> { |
| 1041 bool operator()(const std::pair<unsigned, PQueueElem>& x, |
| 1042 const std::pair<unsigned, PQueueElem>& y) const { |
| 1043 return x.first < y.first; |
| 1044 } |
| 1045 }; |
| 863 | 1046 |
| 864 unsigned GetLiveRangeSize(LiveRange* range); | 1047 typedef ZonePriorityQueue<std::pair<unsigned, PQueueElem>, PQueueElemComparer> |
| 1048 PQueue; |
| 1049 |
| 865 void Enqueue(LiveRange* range); | 1050 void Enqueue(LiveRange* range); |
| 1051 void Enqueue(LiveRangeGroup* group); |
| 866 | 1052 |
| 867 void Evict(LiveRange* range); | 1053 void Evict(LiveRange* range); |
| 868 float CalculateSpillWeight(LiveRange* range); | 1054 void EvictAll(int reg, const conflict_iterator& first_conflict); |
| 869 float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges); | |
| 870 | 1055 |
| 1056 // Find the register with the minimum spill weight. If that is still higher |
| 1057 // or equal to competing, return -1. |
| 1058 int FindRegisterToEvictForRange( |
| 1059 conflict_iterator |
| 1060 all_conflicts[RegisterConfiguration::kMaxDoubleRegisters], |
| 1061 float competing); |
| 871 | 1062 |
| 872 bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting); | 1063 // Same as above, for all the registers in a group. |
| 873 bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range, | 1064 int FindRegisterToEvictForGroup(LiveRangeGroup* grp, float competing); |
| 874 ZoneSet<LiveRange*>* conflicting); | 1065 |
| 875 bool HandleSpillOperands(LiveRange* range); | 1066 template <typename TIter> |
| 876 void AllocateBlockedRange(LiveRange* current, LifetimePosition pos, | 1067 float CalculateMaxSpillWeight(const TIter& begin, const TIter& end); |
| 877 bool spill); | 1068 |
| 1069 bool HandleSpillOperands(LiveRange* range, LiveRange** remaining); |
| 1070 void HandleBlockedRange(LiveRange* current); |
| 1071 bool TrySplitBeforeLoops(LiveRange* current); |
| 1072 bool TrySplitAfterLoops(LiveRange* current); |
| 1073 bool TrySplitAroundCalls(LiveRange* range); |
| 878 | 1074 |
| 879 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, | 1075 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, |
| 880 LifetimePosition until, LifetimePosition end); | 1076 LifetimePosition until, LifetimePosition end); |
| 881 void AssignRangeToRegister(int reg_id, LiveRange* range); | 1077 void AssignRangeToRegister(int reg_id, LiveRange* range); |
| 882 | 1078 |
| 883 LifetimePosition FindProgressingSplitPosition(LiveRange* range, | |
| 884 bool* is_spill_pos); | |
| 885 | |
| 886 Zone* local_zone_; | 1079 Zone* local_zone_; |
| 887 ZoneVector<CoalescedLiveRanges*> allocations_; | 1080 ZoneVector<CoalescedLiveRanges*> allocations_; |
| 888 PQueue queue_; | 1081 PQueue queue_; |
| 889 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); | 1082 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); |
| 890 }; | 1083 }; |
| 891 | 1084 |
| 892 | 1085 |
| 893 class SpillSlotLocator final : public ZoneObject { | 1086 class SpillSlotLocator final : public ZoneObject { |
| 894 public: | 1087 public: |
| 895 explicit SpillSlotLocator(RegisterAllocationData* data); | 1088 explicit SpillSlotLocator(RegisterAllocationData* data); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 966 RegisterAllocationData* const data_; | 1159 RegisterAllocationData* const data_; |
| 967 | 1160 |
| 968 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 1161 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 969 }; | 1162 }; |
| 970 | 1163 |
| 971 } // namespace compiler | 1164 } // namespace compiler |
| 972 } // namespace internal | 1165 } // namespace internal |
| 973 } // namespace v8 | 1166 } // namespace v8 |
| 974 | 1167 |
| 975 #endif // V8_REGISTER_ALLOCATOR_H_ | 1168 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |