Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 244 UsePositionHintType hint_type() const { | 244 UsePositionHintType hint_type() const { |
| 245 return HintTypeField::decode(flags_); | 245 return HintTypeField::decode(flags_); |
| 246 } | 246 } |
| 247 bool HasHint() const; | 247 bool HasHint() const; |
| 248 bool HintRegister(int* register_index) const; | 248 bool HintRegister(int* register_index) const; |
| 249 void ResolveHint(UsePosition* use_pos); | 249 void ResolveHint(UsePosition* use_pos); |
| 250 bool IsResolved() const { | 250 bool IsResolved() const { |
| 251 return hint_type() != UsePositionHintType::kUnresolved; | 251 return hint_type() != UsePositionHintType::kUnresolved; |
| 252 } | 252 } |
| 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); | 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); |
| 254 void dump_hint(std::ostream& os, const RegisterConfiguration* config); | |
| 254 | 255 |
| 255 private: | 256 private: |
| 256 typedef BitField<UsePositionType, 0, 2> TypeField; | 257 typedef BitField<UsePositionType, 0, 2> TypeField; |
| 257 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; | 258 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; |
| 258 typedef BitField<bool, 5, 1> RegisterBeneficialField; | 259 typedef BitField<bool, 5, 1> RegisterBeneficialField; |
| 259 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | 260 typedef BitField<int32_t, 6, 6> AssignedRegisterField; |
| 260 | 261 |
| 261 InstructionOperand* const operand_; | 262 InstructionOperand* const operand_; |
| 262 void* hint_; | 263 void* hint_; |
| 263 UsePosition* next_; | 264 UsePosition* next_; |
| 264 LifetimePosition const pos_; | 265 LifetimePosition const pos_; |
| 265 uint32_t flags_; | 266 uint32_t flags_; |
| 266 | 267 |
| 267 DISALLOW_COPY_AND_ASSIGN(UsePosition); | 268 DISALLOW_COPY_AND_ASSIGN(UsePosition); |
| 268 }; | 269 }; |
| 269 | 270 |
| 270 | 271 |
| 271 class SpillRange; | 272 class SpillRange; |
| 273 class LiveRangeGroup; | |
| 272 | 274 |
| 273 | 275 |
| 274 // Representation of SSA values' live ranges as a collection of (continuous) | 276 // Representation of SSA values' live ranges as a collection of (continuous) |
| 275 // intervals over the instruction ordering. | 277 // intervals over the instruction ordering. |
| 276 class LiveRange final : public ZoneObject { | 278 class LiveRange final : public ZoneObject { |
| 277 public: | 279 public: |
| 278 explicit LiveRange(int id, MachineType machine_type); | 280 explicit LiveRange(int id, MachineType machine_type); |
| 279 | 281 |
| 280 UseInterval* first_interval() const { return first_interval_; } | 282 UseInterval* first_interval() const { return first_interval_; } |
| 281 UsePosition* first_pos() const { return first_pos_; } | 283 UsePosition* first_pos() const { return first_pos_; } |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 334 // range and which follows both start and last processed use position | 336 // range and which follows both start and last processed use position |
| 335 UsePosition* NextUsePositionRegisterIsBeneficial( | 337 UsePosition* NextUsePositionRegisterIsBeneficial( |
| 336 LifetimePosition start) const; | 338 LifetimePosition start) const; |
| 337 | 339 |
| 338 // Returns use position for which register is beneficial in this live | 340 // Returns use position for which register is beneficial in this live |
| 339 // range and which precedes start. | 341 // range and which precedes start. |
| 340 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 342 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
| 341 LifetimePosition start) const; | 343 LifetimePosition start) const; |
| 342 | 344 |
| 343 // Can this live range be spilled at this position. | 345 // Can this live range be spilled at this position. |
| 344 bool CanBeSpilled(LifetimePosition pos) const; | 346 bool CanBeSpilled(LifetimePosition pos) const { return CanBeSplit(pos); } |
| 347 | |
| 348 // Can this live range be split at this position. | |
| 349 bool CanBeSplit(LifetimePosition pos) const; | |
| 345 | 350 |
| 346 // Split this live range at the given position which must follow the start of | 351 // Split this live range at the given position which must follow the start of |
| 347 // the range. | 352 // the range. |
| 348 // All uses following the given position will be moved from this | 353 // All uses following the given position will be moved from this |
| 349 // live range to the result live range. | 354 // live range to the result live range. |
| 350 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); | 355 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); |
| 351 | 356 |
| 352 // Returns nullptr when no register is hinted, otherwise sets register_index. | 357 // Returns nullptr when no register is hinted, otherwise sets register_index. |
| 353 UsePosition* FirstHintPosition(int* register_index) const; | 358 UsePosition* FirstHintPosition(int* register_index) const; |
| 354 UsePosition* FirstHintPosition() const { | 359 UsePosition* FirstHintPosition() const { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 421 const InstructionOperand& spill_op); | 426 const InstructionOperand& spill_op); |
| 422 void SetUseHints(int register_index); | 427 void SetUseHints(int register_index); |
| 423 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } | 428 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } |
| 424 | 429 |
| 425 struct SpillAtDefinitionList; | 430 struct SpillAtDefinitionList; |
| 426 | 431 |
| 427 SpillAtDefinitionList* spills_at_definition() const { | 432 SpillAtDefinitionList* spills_at_definition() const { |
| 428 return spills_at_definition_; | 433 return spills_at_definition_; |
| 429 } | 434 } |
| 430 | 435 |
| 436 LiveRangeGroup* group() { return group_; } | |
|
Benedikt Meurer
2015/06/11 05:30:29
Nit: Make this method const.
Mircea Trofin
2015/06/11 07:19:36
Done.
| |
| 437 void SetGroup(LiveRangeGroup* grp) { group_ = grp; } | |
| 438 | |
| 439 float weight() const { | |
| 440 DCHECK(weight_ >= 0.0f); | |
|
Benedikt Meurer
2015/06/11 05:30:29
Trivial getters should not have DCHECK's in them.
Mircea Trofin
2015/06/11 07:19:36
The point is that I want to make sure no attempt t
| |
| 441 return weight_; | |
| 442 } | |
| 443 | |
| 444 unsigned size() { | |
|
Benedikt Meurer
2015/06/11 05:30:30
This getter does perform work, so it's not a simpl
Mircea Trofin
2015/06/11 07:19:36
Done.
| |
| 445 if (size_ < 0) RecalculateSize(); | |
| 446 DCHECK(size_ > 0); | |
| 447 return static_cast<unsigned>(size_); | |
| 448 } | |
| 449 | |
| 450 void InvalidateWeightAndSize() { | |
| 451 weight_ = kInvalidWeight; | |
| 452 size_ = kInvalidSize; | |
| 453 } | |
| 454 void RecalculateWeight(InstructionSequence* code); | |
| 455 LifetimePosition GetFirstSplittablePosition(); | |
| 456 | |
| 457 static const float kMaxWeight; | |
| 458 | |
| 431 private: | 459 private: |
| 460 static const float kUseInLoopWeightMultiplier; | |
| 461 static const float kPreferredRegisterWeightMultiplier; | |
| 462 static const float kInvalidWeight; | |
| 463 static const int kInvalidSize = -1; | |
| 464 | |
| 465 void RecalculateSize(); | |
| 432 void set_spill_type(SpillType value) { | 466 void set_spill_type(SpillType value) { |
| 433 bits_ = SpillTypeField::update(bits_, value); | 467 bits_ = SpillTypeField::update(bits_, value); |
| 434 } | 468 } |
| 435 | 469 |
| 436 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | 470 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
| 437 | 471 |
| 438 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | 472 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
| 439 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | 473 void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
| 440 LifetimePosition but_not_past) const; | 474 LifetimePosition but_not_past) const; |
| 441 | 475 |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 461 SpillRange* spill_range_; | 495 SpillRange* spill_range_; |
| 462 }; | 496 }; |
| 463 SpillAtDefinitionList* spills_at_definition_; | 497 SpillAtDefinitionList* spills_at_definition_; |
| 464 // This is used as a cache, it doesn't affect correctness. | 498 // This is used as a cache, it doesn't affect correctness. |
| 465 mutable UseInterval* current_interval_; | 499 mutable UseInterval* current_interval_; |
| 466 // This is used as a cache, it doesn't affect correctness. | 500 // This is used as a cache, it doesn't affect correctness. |
| 467 mutable UsePosition* last_processed_use_; | 501 mutable UsePosition* last_processed_use_; |
| 468 // This is used as a cache, it's invalid outside of BuildLiveRanges. | 502 // This is used as a cache, it's invalid outside of BuildLiveRanges. |
| 469 mutable UsePosition* current_hint_position_; | 503 mutable UsePosition* current_hint_position_; |
| 470 | 504 |
| 505 // Used only by the greedy allocator. | |
| 506 float weight_; | |
| 507 int size_; | |
| 508 LiveRangeGroup* group_; | |
| 471 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 509 DISALLOW_COPY_AND_ASSIGN(LiveRange); |
| 472 }; | 510 }; |
| 473 | 511 |
| 474 | 512 |
| 513 class LiveRangeGroup final : public ZoneObject { | |
| 514 public: | |
| 515 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {} | |
| 516 ZoneSet<LiveRange*>& ranges() { return ranges_; } | |
| 517 bool CanAdd(LiveRange* r); | |
| 518 void TryMerge(LiveRangeGroup* other); | |
| 519 | |
| 520 private: | |
| 521 ZoneSet<LiveRange*> ranges_; | |
|
Benedikt Meurer
2015/06/11 05:30:30
I'm not sure ZoneSet is such a great datastructure
Mircea Trofin
2015/06/11 07:19:36
What would you propose as an alternative?
Full di
| |
| 522 | |
| 523 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup); | |
| 524 }; | |
| 525 | |
| 475 struct PrintableLiveRange { | 526 struct PrintableLiveRange { |
| 476 const RegisterConfiguration* register_configuration_; | 527 const RegisterConfiguration* register_configuration_; |
| 477 const LiveRange* range_; | 528 const LiveRange* range_; |
| 478 }; | 529 }; |
| 479 | 530 |
| 480 | 531 |
| 481 std::ostream& operator<<(std::ostream& os, | 532 std::ostream& operator<<(std::ostream& os, |
| 482 const PrintableLiveRange& printable_range); | 533 const PrintableLiveRange& printable_range); |
| 483 | 534 |
| 484 | 535 |
| 485 class SpillRange final : public ZoneObject { | 536 class SpillRange final : public ZoneObject { |
| 486 public: | 537 public: |
| 487 static const int kUnassignedSlot = -1; | 538 static const int kUnassignedSlot = -1; |
| 488 SpillRange(LiveRange* range, Zone* zone); | 539 SpillRange(LiveRange* range, Zone* zone); |
| 489 | 540 |
| 490 UseInterval* interval() const { return use_interval_; } | 541 UseInterval* interval() const { return use_interval_; } |
| 491 // Currently, only 4 or 8 byte slots are supported. | 542 // Currently, only 4 or 8 byte slots are supported. |
| 492 int ByteWidth() const; | 543 int ByteWidth() const; |
| 493 bool IsEmpty() const { return live_ranges_.empty(); } | 544 bool IsEmpty() const { return live_ranges_.empty(); } |
| 494 bool TryMerge(SpillRange* other); | 545 bool TryMerge(SpillRange* other); |
| 495 | 546 |
| 496 void set_assigned_slot(int index) { | 547 void set_assigned_slot(int index) { |
| 497 DCHECK_EQ(kUnassignedSlot, assigned_slot_); | 548 DCHECK_EQ(kUnassignedSlot, assigned_slot_); |
| 498 assigned_slot_ = index; | 549 assigned_slot_ = index; |
| 499 } | 550 } |
| 551 bool IsSlotAssigned() const { return assigned_slot_ != kUnassignedSlot; } | |
| 500 int assigned_slot() { | 552 int assigned_slot() { |
| 501 DCHECK_NE(kUnassignedSlot, assigned_slot_); | 553 DCHECK_NE(kUnassignedSlot, assigned_slot_); |
| 502 return assigned_slot_; | 554 return assigned_slot_; |
| 503 } | 555 } |
| 504 | 556 |
| 505 private: | 557 private: |
| 506 LifetimePosition End() const { return end_position_; } | 558 LifetimePosition End() const { return end_position_; } |
| 507 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } | 559 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } |
| 508 bool IsIntersectingWith(SpillRange* other) const; | 560 bool IsIntersectingWith(SpillRange* other) const; |
| 509 // Merge intervals, making sure the use intervals are sorted | 561 // Merge intervals, making sure the use intervals are sorted |
| 510 void MergeDisjointIntervals(UseInterval* other); | 562 void MergeDisjointIntervals(UseInterval* other); |
| 511 | 563 |
| 512 ZoneVector<LiveRange*> live_ranges_; | 564 ZoneVector<LiveRange*> live_ranges_; |
| 513 UseInterval* use_interval_; | 565 UseInterval* use_interval_; |
| 514 LifetimePosition end_position_; | 566 LifetimePosition end_position_; |
| 515 int assigned_slot_; | 567 int assigned_slot_; |
| 516 | 568 |
| 517 DISALLOW_COPY_AND_ASSIGN(SpillRange); | 569 DISALLOW_COPY_AND_ASSIGN(SpillRange); |
| 518 }; | 570 }; |
| 519 | 571 |
| 520 | 572 |
| 573 std::ostream& operator<<(std::ostream& os, SpillRange* range); | |
| 574 | |
| 575 | |
| 521 class RegisterAllocationData final : public ZoneObject { | 576 class RegisterAllocationData final : public ZoneObject { |
| 522 public: | 577 public: |
| 523 class PhiMapValue : public ZoneObject { | 578 class PhiMapValue : public ZoneObject { |
| 524 public: | 579 public: |
| 525 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); | 580 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); |
| 526 | 581 |
| 527 const PhiInstruction* phi() const { return phi_; } | 582 const PhiInstruction* phi() const { return phi_; } |
| 528 const InstructionBlock* block() const { return block_; } | 583 const InstructionBlock* block() const { return block_; } |
| 529 | 584 |
| 530 // For hinting. | 585 // For hinting. |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 721 InstructionOperand* operand) { | 776 InstructionOperand* operand) { |
| 722 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); | 777 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); |
| 723 } | 778 } |
| 724 | 779 |
| 725 RegisterAllocationData* const data_; | 780 RegisterAllocationData* const data_; |
| 726 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; | 781 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; |
| 727 | 782 |
| 728 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); | 783 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); |
| 729 }; | 784 }; |
| 730 | 785 |
| 786 struct AllocatorStats { | |
| 787 unsigned spills; | |
| 788 unsigned wins; | |
| 789 unsigned losses_no_eviction; | |
| 790 unsigned losses_after_eviction; | |
| 791 unsigned good_split_attempts; | |
| 792 unsigned good_split_successes; | |
| 793 unsigned good_split_smart_above; | |
| 794 unsigned good_split_smart_below; | |
| 795 unsigned good_split_above; | |
| 796 unsigned good_split_below; | |
| 797 unsigned groups_attempted; | |
| 798 unsigned groups_succeeded; | |
| 799 unsigned groups_allocated; | |
| 800 void reset() { *this = {0}; } | |
| 801 }; | |
| 802 | |
| 803 | |
| 804 std::ostream& operator<<(std::ostream& os, const AllocatorStats& stats); | |
| 805 | |
| 731 | 806 |
| 732 class RegisterAllocator : public ZoneObject { | 807 class RegisterAllocator : public ZoneObject { |
| 733 public: | 808 public: |
| 734 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); | 809 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); |
| 735 | 810 |
| 736 protected: | 811 protected: |
| 812 AllocatorStats stats_; | |
| 737 RegisterAllocationData* data() const { return data_; } | 813 RegisterAllocationData* data() const { return data_; } |
| 738 InstructionSequence* code() const { return data()->code(); } | 814 InstructionSequence* code() const { return data()->code(); } |
| 739 RegisterKind mode() const { return mode_; } | 815 RegisterKind mode() const { return mode_; } |
| 740 int num_registers() const { return num_registers_; } | 816 int num_registers() const { return num_registers_; } |
| 741 | 817 |
| 742 Zone* allocation_zone() const { return data()->allocation_zone(); } | 818 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 743 | 819 |
| 744 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | 820 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } |
| 745 | 821 |
| 746 // Split the given range at the given position. | 822 // Split the given range at the given position. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 760 // loop covered by this interval or the latest possible position. | 836 // loop covered by this interval or the latest possible position. |
| 761 LifetimePosition FindOptimalSplitPos(LifetimePosition start, | 837 LifetimePosition FindOptimalSplitPos(LifetimePosition start, |
| 762 LifetimePosition end); | 838 LifetimePosition end); |
| 763 | 839 |
| 764 void Spill(LiveRange* range); | 840 void Spill(LiveRange* range); |
| 765 | 841 |
| 766 // If we are trying to spill a range inside the loop try to | 842 // If we are trying to spill a range inside the loop try to |
| 767 // hoist spill position out to the point just before the loop. | 843 // hoist spill position out to the point just before the loop. |
| 768 LifetimePosition FindOptimalSpillingPos(LiveRange* range, | 844 LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
| 769 LifetimePosition pos); | 845 LifetimePosition pos); |
| 846 const char* RegisterName(int allocation_index) const; | |
| 770 | 847 |
| 771 private: | 848 private: |
| 772 RegisterAllocationData* const data_; | 849 RegisterAllocationData* const data_; |
| 773 const RegisterKind mode_; | 850 const RegisterKind mode_; |
| 774 const int num_registers_; | 851 const int num_registers_; |
| 775 | 852 |
| 776 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); | 853 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); |
| 777 }; | 854 }; |
| 778 | 855 |
| 779 | 856 |
| 780 class LinearScanAllocator final : public RegisterAllocator { | 857 class LinearScanAllocator final : public RegisterAllocator { |
| 781 public: | 858 public: |
| 782 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, | 859 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 783 Zone* local_zone); | 860 Zone* local_zone); |
| 784 | 861 |
| 785 // Phase 4: compute register assignments. | 862 // Phase 4: compute register assignments. |
| 786 void AllocateRegisters(); | 863 void AllocateRegisters(); |
| 787 | 864 |
| 788 private: | 865 private: |
| 789 const char* RegisterName(int allocation_index) const; | |
| 790 | |
| 791 ZoneVector<LiveRange*>& unhandled_live_ranges() { | 866 ZoneVector<LiveRange*>& unhandled_live_ranges() { |
| 792 return unhandled_live_ranges_; | 867 return unhandled_live_ranges_; |
| 793 } | 868 } |
| 794 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } | 869 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } |
| 795 ZoneVector<LiveRange*>& inactive_live_ranges() { | 870 ZoneVector<LiveRange*>& inactive_live_ranges() { |
| 796 return inactive_live_ranges_; | 871 return inactive_live_ranges_; |
| 797 } | 872 } |
| 798 | 873 |
| 799 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); | 874 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); |
| 800 | 875 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 833 ZoneVector<LiveRange*> active_live_ranges_; | 908 ZoneVector<LiveRange*> active_live_ranges_; |
| 834 ZoneVector<LiveRange*> inactive_live_ranges_; | 909 ZoneVector<LiveRange*> inactive_live_ranges_; |
| 835 | 910 |
| 836 #ifdef DEBUG | 911 #ifdef DEBUG |
| 837 LifetimePosition allocation_finger_; | 912 LifetimePosition allocation_finger_; |
| 838 #endif | 913 #endif |
| 839 | 914 |
| 840 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); | 915 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); |
| 841 }; | 916 }; |
| 842 | 917 |
| 918 | |
| 919 // Storage detail for CoalescedLiveRanges. | |
| 920 struct AllocatedInterval { | |
| 921 LifetimePosition start; | |
| 922 LifetimePosition end; | |
| 923 LiveRange* range; | |
| 924 struct Comparer : public std::binary_function<AllocatedInterval, | |
|
Benedikt Meurer
2015/06/11 05:30:30
Why do you need this Comparer class? Can't you jus
Mircea Trofin
2015/06/11 07:19:36
Oh, I see - because std::less would then just use
| |
| 925 AllocatedInterval, bool> { | |
| 926 bool operator()(const AllocatedInterval& x, | |
| 927 const AllocatedInterval& y) const { | |
| 928 return x.start < y.start; | |
| 929 } | |
| 930 }; | |
| 931 }; | |
| 932 | |
| 933 | |
| 843 class CoalescedLiveRanges; | 934 class CoalescedLiveRanges; |
| 844 | 935 |
| 845 | 936 |
| 937 // Iterator over allocated live ranges (CoalescedLiveRanges) that conflict with | |
|
Benedikt Meurer
2015/06/11 05:30:29
This whole iterator looks quite complex. Maybe we
Mircea Trofin
2015/06/11 07:19:36
Possibly, please see long rant above :)
| |
| 938 // a given one. See CoalescedLiveRanges::first_conflict. | |
| 939 // Note that iterating may produce non-consecutively repeating live ranges. | |
| 940 struct conflict_iterator { | |
| 941 friend class CoalescedLiveRanges; | |
|
Benedikt Meurer
2015/06/11 05:30:30
friend classes should be listed in the private sec
Mircea Trofin
2015/06/11 07:19:36
Done.
| |
| 942 conflict_iterator() : storage_(nullptr) {} | |
| 943 friend bool operator==(const conflict_iterator& first, | |
| 944 const conflict_iterator& second); | |
| 945 LiveRange* operator->() { return pos_->range; } | |
| 946 | |
| 947 LiveRange* operator*() { return pos_->range; } | |
|
Benedikt Meurer
2015/06/11 05:30:29
This should return a LiveRange&, not a LiveRange*.
Mircea Trofin
2015/06/11 07:19:36
The element of the set if "LiveRange*", so derefer
| |
| 948 | |
| 949 conflict_iterator& operator++(); | |
| 950 bool IsEnd() const { | |
|
Benedikt Meurer
2015/06/11 05:30:29
STL iterators compare against some end. Don't try
Mircea Trofin
2015/06/11 07:19:36
I'm not sure how it could it get out of sync (and
| |
| 951 DCHECK(storage_ != nullptr); | |
| 952 return pos_ == storage_->end(); | |
| 953 } | |
| 954 | |
| 955 private: | |
| 956 typedef ZoneSet<AllocatedInterval, AllocatedInterval::Comparer> IntervalStore; | |
| 957 typedef IntervalStore::const_iterator interval_iterator; | |
| 958 | |
| 959 void Invalidate() { | |
| 960 DCHECK(storage_ != nullptr); | |
| 961 pos_ = storage_->end(); | |
| 962 query_ = nullptr; | |
| 963 } | |
| 964 | |
| 965 static bool Intersects(LifetimePosition a_start, LifetimePosition a_end, | |
| 966 LifetimePosition b_start, LifetimePosition b_end) { | |
| 967 if (a_start == b_start) return true; | |
| 968 if (a_start < b_start) return a_end > b_start; | |
| 969 return Intersects(b_start, b_end, a_start, a_end); | |
| 970 } | |
| 971 | |
| 972 bool QueryIntersectsAllocatedInterval() { | |
| 973 DCHECK(query_ != nullptr); | |
| 974 return Intersects(query_->start(), query_->end(), pos_->start, pos_->end); | |
| 975 } | |
| 976 | |
| 977 bool NextQueryIntersectsAllocatedInterval() { | |
| 978 DCHECK(query_->next() != nullptr); | |
| 979 return Intersects(query_->next()->start(), query_->next()->end(), | |
| 980 pos_->start, pos_->end); | |
| 981 } | |
| 982 | |
| 983 conflict_iterator(UseInterval* query, IntervalStore* storage) | |
| 984 : query_(query), storage_(storage) { | |
| 985 InitializeForNewQuery(); | |
| 986 } | |
| 987 | |
| 988 explicit conflict_iterator(IntervalStore* storage) : storage_(storage) { | |
| 989 Invalidate(); | |
|
Benedikt Meurer
2015/06/11 05:30:29
Initialize properly instead of calling Invalidate
Mircea Trofin
2015/06/11 07:19:36
Not sure what you mean by "proper" initialization,
| |
| 990 } | |
| 991 | |
| 992 static conflict_iterator EndIteratorForStorage(IntervalStore* storage) { | |
| 993 conflict_iterator ret(storage); | |
| 994 return ret; | |
| 995 } | |
| 996 | |
| 997 void InitializeForNewQuery(); | |
| 998 void SkipMatchedQueries(); | |
| 999 | |
| 1000 AllocatedInterval AsAllocatedInterval(LifetimePosition pos) { | |
| 1001 return {pos, LifetimePosition::Invalid(), nullptr}; | |
| 1002 } | |
| 1003 | |
| 1004 UseInterval* query_; | |
| 1005 IntervalStore* storage_; | |
| 1006 interval_iterator pos_; | |
| 1007 }; | |
| 1008 | |
| 1009 bool operator==(const conflict_iterator& first, | |
| 1010 const conflict_iterator& second); | |
| 1011 bool operator!=(const conflict_iterator& first, | |
| 1012 const conflict_iterator& second); | |
| 1013 | |
| 1014 | |
| 846 // A variant of the LLVM Greedy Register Allocator. See | 1015 // A variant of the LLVM Greedy Register Allocator. See |
| 847 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html | 1016 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html |
| 848 class GreedyAllocator final : public RegisterAllocator { | 1017 class GreedyAllocator final : public RegisterAllocator { |
| 849 public: | 1018 public: |
| 850 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, | 1019 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, |
| 851 Zone* local_zone); | 1020 Zone* local_zone); |
| 852 | 1021 |
| 853 void AllocateRegisters(); | 1022 void AllocateRegisters(); |
| 854 | 1023 |
| 855 private: | 1024 private: |
| 856 LifetimePosition GetSplittablePos(LifetimePosition pos); | 1025 void AllocateRange(LiveRange* current); |
| 1026 void AllocateGroup(LiveRangeGroup* grp); | |
| 1027 bool TryAllocateGroup(LiveRangeGroup* grp); | |
| 1028 bool TryAllocateGroupAtRegister(unsigned reg, LiveRangeGroup* grp); | |
|
Jarin
2015/06/12 04:09:10
grp -> group
| |
| 1029 | |
| 1030 struct PQueueElem { | |
| 1031 bool IsGroup() { return is_grp_; } | |
|
Jarin
2015/06/12 04:09:10
is_grp_ -> is_group_
| |
| 1032 explicit PQueueElem(LiveRange* range) : is_grp_(false) { | |
| 1033 storage_.range_ = range; | |
| 1034 } | |
| 1035 | |
| 1036 explicit PQueueElem(LiveRangeGroup* grp) : is_grp_(true) { | |
| 1037 storage_.grp_ = grp; | |
| 1038 } | |
| 1039 | |
| 1040 | |
| 1041 LiveRange* get_range() { | |
| 1042 if (is_grp_) return nullptr; | |
| 1043 return storage_.range_; | |
| 1044 } | |
| 1045 | |
| 1046 | |
| 1047 LiveRangeGroup* get_group() { | |
| 1048 if (is_grp_) return storage_.grp_; | |
| 1049 return nullptr; | |
| 1050 } | |
| 1051 | |
| 1052 private: | |
| 1053 bool is_grp_; | |
| 1054 union { | |
| 1055 LiveRange* range_; | |
| 1056 LiveRangeGroup* grp_; | |
| 1057 } storage_; | |
| 1058 }; | |
| 1059 | |
| 1060 void GroupAndEnqueue(); | |
| 857 const RegisterConfiguration* config() const { return data()->config(); } | 1061 const RegisterConfiguration* config() const { return data()->config(); } |
| 858 Zone* local_zone() const { return local_zone_; } | 1062 Zone* local_zone() const { return local_zone_; } |
| 859 bool TryReuseSpillForPhi(LiveRange* range); | |
| 860 int GetHintedRegister(LiveRange* range); | |
| 861 | 1063 |
| 862 typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue; | 1064 struct PQueueElemComparer |
| 1065 : public std::binary_function<std::pair<unsigned, PQueueElem>, | |
| 1066 std::pair<unsigned, PQueueElem>, bool> { | |
| 1067 bool operator()(const std::pair<unsigned, PQueueElem>& x, | |
| 1068 const std::pair<unsigned, PQueueElem>& y) const { | |
| 1069 return x.first < y.first; | |
| 1070 } | |
| 1071 }; | |
| 863 | 1072 |
| 864 unsigned GetLiveRangeSize(LiveRange* range); | 1073 typedef ZonePriorityQueue<std::pair<unsigned, PQueueElem>, PQueueElemComparer> |
| 1074 PQueue; | |
| 1075 | |
| 865 void Enqueue(LiveRange* range); | 1076 void Enqueue(LiveRange* range); |
| 1077 void Enqueue(LiveRangeGroup* group); | |
| 866 | 1078 |
| 867 void Evict(LiveRange* range); | 1079 void Evict(LiveRange* range); |
| 868 float CalculateSpillWeight(LiveRange* range); | 1080 void EvictAll(int reg, const conflict_iterator& first_conflict); |
| 869 float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges); | |
| 870 | 1081 |
| 1082 // Find the register with the minimum spill weight. If that is still higher | |
| 1083 // or equal to competing, return -1. | |
| 1084 int FindRegisterToEvictForRange( | |
| 1085 conflict_iterator | |
| 1086 all_conflicts[RegisterConfiguration::kMaxDoubleRegisters], | |
| 1087 float competing); | |
| 871 | 1088 |
| 872 bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting); | 1089 // Same as above, for all the registers in a group. |
| 873 bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range, | 1090 int FindRegisterToEvictForGroup(LiveRangeGroup* grp, float competing); |
| 874 ZoneSet<LiveRange*>* conflicting); | 1091 |
| 875 bool HandleSpillOperands(LiveRange* range); | 1092 template <typename TIter> |
| 876 void AllocateBlockedRange(LiveRange* current, LifetimePosition pos, | 1093 float CalculateMaxSpillWeight(const TIter& begin, const TIter& end); |
| 877 bool spill); | 1094 |
| 1095 bool HandleSpillOperands(LiveRange* range, LiveRange** remaining); | |
| 1096 void HandleBlockedRange(LiveRange* current); | |
| 1097 bool TrySplitBeforeLoops(LiveRange* current); | |
| 1098 bool TrySplitAfterLoops(LiveRange* current); | |
| 1099 bool TrySplitAroundCalls(LiveRange* range); | |
| 878 | 1100 |
| 879 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, | 1101 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, |
| 880 LifetimePosition until, LifetimePosition end); | 1102 LifetimePosition until, LifetimePosition end); |
| 881 void AssignRangeToRegister(int reg_id, LiveRange* range); | 1103 void AssignRangeToRegister(int reg_id, LiveRange* range); |
| 882 | 1104 |
| 883 LifetimePosition FindProgressingSplitPosition(LiveRange* range, | |
| 884 bool* is_spill_pos); | |
| 885 | |
| 886 Zone* local_zone_; | 1105 Zone* local_zone_; |
| 887 ZoneVector<CoalescedLiveRanges*> allocations_; | 1106 ZoneVector<CoalescedLiveRanges*> allocations_; |
| 888 PQueue queue_; | 1107 PQueue queue_; |
| 889 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); | 1108 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); |
| 890 }; | 1109 }; |
| 891 | 1110 |
| 892 | 1111 |
| 893 class SpillSlotLocator final : public ZoneObject { | 1112 class SpillSlotLocator final : public ZoneObject { |
| 894 public: | 1113 public: |
| 895 explicit SpillSlotLocator(RegisterAllocationData* data); | 1114 explicit SpillSlotLocator(RegisterAllocationData* data); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 966 RegisterAllocationData* const data_; | 1185 RegisterAllocationData* const data_; |
| 967 | 1186 |
| 968 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 1187 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 969 }; | 1188 }; |
| 970 | 1189 |
| 971 } // namespace compiler | 1190 } // namespace compiler |
| 972 } // namespace internal | 1191 } // namespace internal |
| 973 } // namespace v8 | 1192 } // namespace v8 |
| 974 | 1193 |
| 975 #endif // V8_REGISTER_ALLOCATOR_H_ | 1194 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |