Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: src/compiler/register-allocator.h

Issue 1157663007: Greedy allocator: perf work (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: addressed feedback Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/compiler/register-allocator.cc » ('j') | src/compiler/register-allocator.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_REGISTER_ALLOCATOR_H_ 5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_
7 7
8 #include "src/compiler/instruction.h" 8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h" 9 #include "src/ostreams.h"
10 #include "src/zone-containers.h" 10 #include "src/zone-containers.h"
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
244 UsePositionHintType hint_type() const { 244 UsePositionHintType hint_type() const {
245 return HintTypeField::decode(flags_); 245 return HintTypeField::decode(flags_);
246 } 246 }
247 bool HasHint() const; 247 bool HasHint() const;
248 bool HintRegister(int* register_index) const; 248 bool HintRegister(int* register_index) const;
249 void ResolveHint(UsePosition* use_pos); 249 void ResolveHint(UsePosition* use_pos);
250 bool IsResolved() const { 250 bool IsResolved() const {
251 return hint_type() != UsePositionHintType::kUnresolved; 251 return hint_type() != UsePositionHintType::kUnresolved;
252 } 252 }
253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); 253 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
254 void dump_hint(std::ostream& os, const RegisterConfiguration* config);
254 255
255 private: 256 private:
256 typedef BitField<UsePositionType, 0, 2> TypeField; 257 typedef BitField<UsePositionType, 0, 2> TypeField;
257 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; 258 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
258 typedef BitField<bool, 5, 1> RegisterBeneficialField; 259 typedef BitField<bool, 5, 1> RegisterBeneficialField;
259 typedef BitField<int32_t, 6, 6> AssignedRegisterField; 260 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
260 261
261 InstructionOperand* const operand_; 262 InstructionOperand* const operand_;
262 void* hint_; 263 void* hint_;
263 UsePosition* next_; 264 UsePosition* next_;
264 LifetimePosition const pos_; 265 LifetimePosition const pos_;
265 uint32_t flags_; 266 uint32_t flags_;
266 267
267 DISALLOW_COPY_AND_ASSIGN(UsePosition); 268 DISALLOW_COPY_AND_ASSIGN(UsePosition);
268 }; 269 };
269 270
270 271
271 class SpillRange; 272 class SpillRange;
273 class LiveRangeGroup;
272 274
273 275
274 // Representation of SSA values' live ranges as a collection of (continuous) 276 // Representation of SSA values' live ranges as a collection of (continuous)
275 // intervals over the instruction ordering. 277 // intervals over the instruction ordering.
276 class LiveRange final : public ZoneObject { 278 class LiveRange final : public ZoneObject {
277 public: 279 public:
278 explicit LiveRange(int id, MachineType machine_type); 280 explicit LiveRange(int id, MachineType machine_type);
279 281
280 UseInterval* first_interval() const { return first_interval_; } 282 UseInterval* first_interval() const { return first_interval_; }
281 UsePosition* first_pos() const { return first_pos_; } 283 UsePosition* first_pos() const { return first_pos_; }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 // range and which follows both start and last processed use position 336 // range and which follows both start and last processed use position
335 UsePosition* NextUsePositionRegisterIsBeneficial( 337 UsePosition* NextUsePositionRegisterIsBeneficial(
336 LifetimePosition start) const; 338 LifetimePosition start) const;
337 339
338 // Returns use position for which register is beneficial in this live 340 // Returns use position for which register is beneficial in this live
339 // range and which precedes start. 341 // range and which precedes start.
340 UsePosition* PreviousUsePositionRegisterIsBeneficial( 342 UsePosition* PreviousUsePositionRegisterIsBeneficial(
341 LifetimePosition start) const; 343 LifetimePosition start) const;
342 344
343 // Can this live range be spilled at this position. 345 // Can this live range be spilled at this position.
344 bool CanBeSpilled(LifetimePosition pos) const; 346 bool CanBeSpilled(LifetimePosition pos) const { return CanBeSplit(pos); }
347
348 // Can this live range be split at this position.
349 bool CanBeSplit(LifetimePosition pos) const;
345 350
346 // Split this live range at the given position which must follow the start of 351 // Split this live range at the given position which must follow the start of
347 // the range. 352 // the range.
348 // All uses following the given position will be moved from this 353 // All uses following the given position will be moved from this
349 // live range to the result live range. 354 // live range to the result live range.
350 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); 355 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
351 356
352 // Returns nullptr when no register is hinted, otherwise sets register_index. 357 // Returns nullptr when no register is hinted, otherwise sets register_index.
353 UsePosition* FirstHintPosition(int* register_index) const; 358 UsePosition* FirstHintPosition(int* register_index) const;
354 UsePosition* FirstHintPosition() const { 359 UsePosition* FirstHintPosition() const {
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
421 const InstructionOperand& spill_op); 426 const InstructionOperand& spill_op);
422 void SetUseHints(int register_index); 427 void SetUseHints(int register_index);
423 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } 428 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
424 429
425 struct SpillAtDefinitionList; 430 struct SpillAtDefinitionList;
426 431
427 SpillAtDefinitionList* spills_at_definition() const { 432 SpillAtDefinitionList* spills_at_definition() const {
428 return spills_at_definition_; 433 return spills_at_definition_;
429 } 434 }
430 435
436 LiveRangeGroup* group() { return group_; }
437 void SetGroup(LiveRangeGroup* grp) { group_ = grp; }
438
439 float weight() const {
440 DCHECK(weight_ >= 0.0);
441 return weight_;
442 }
443
444 unsigned size() {
445 if (size_ < 0) RecalculateSize();
446 DCHECK(size_ > 0);
447 return static_cast<unsigned>(size_);
448 }
449
450 void InvalidateWeightAndSize() {
451 weight_ = -1.0;
452 size_ = -1;
453 }
454 void RecalculateWeight(InstructionSequence* code);
455 LifetimePosition GetFirstSplittablePosition();
456
431 private: 457 private:
458 void RecalculateSize();
432 void set_spill_type(SpillType value) { 459 void set_spill_type(SpillType value) {
433 bits_ = SpillTypeField::update(bits_, value); 460 bits_ = SpillTypeField::update(bits_, value);
434 } 461 }
435 462
436 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } 463 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
437 464
438 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; 465 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
439 void AdvanceLastProcessedMarker(UseInterval* to_start_of, 466 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
440 LifetimePosition but_not_past) const; 467 LifetimePosition but_not_past) const;
441 468
(...skipping 19 matching lines...) Expand all
461 SpillRange* spill_range_; 488 SpillRange* spill_range_;
462 }; 489 };
463 SpillAtDefinitionList* spills_at_definition_; 490 SpillAtDefinitionList* spills_at_definition_;
464 // This is used as a cache, it doesn't affect correctness. 491 // This is used as a cache, it doesn't affect correctness.
465 mutable UseInterval* current_interval_; 492 mutable UseInterval* current_interval_;
466 // This is used as a cache, it doesn't affect correctness. 493 // This is used as a cache, it doesn't affect correctness.
467 mutable UsePosition* last_processed_use_; 494 mutable UsePosition* last_processed_use_;
468 // This is used as a cache, it's invalid outside of BuildLiveRanges. 495 // This is used as a cache, it's invalid outside of BuildLiveRanges.
469 mutable UsePosition* current_hint_position_; 496 mutable UsePosition* current_hint_position_;
470 497
498 // Used only by the greedy allocator.
499 float weight_;
500 int size_;
501 LiveRangeGroup* group_;
471 DISALLOW_COPY_AND_ASSIGN(LiveRange); 502 DISALLOW_COPY_AND_ASSIGN(LiveRange);
472 }; 503 };
473 504
474 505
506 class LiveRangeGroup final : public ZoneObject {
507 public:
508 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
509 ZoneSet<LiveRange*>& ranges() { return ranges_; }
510
511 private:
512 ZoneSet<LiveRange*> ranges_;
513
514 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
515 };
516
475 struct PrintableLiveRange { 517 struct PrintableLiveRange {
476 const RegisterConfiguration* register_configuration_; 518 const RegisterConfiguration* register_configuration_;
477 const LiveRange* range_; 519 const LiveRange* range_;
478 }; 520 };
479 521
480 522
481 std::ostream& operator<<(std::ostream& os, 523 std::ostream& operator<<(std::ostream& os,
482 const PrintableLiveRange& printable_range); 524 const PrintableLiveRange& printable_range);
483 525
484 526
485 class SpillRange final : public ZoneObject { 527 class SpillRange final : public ZoneObject {
486 public: 528 public:
487 static const int kUnassignedSlot = -1; 529 static const int kUnassignedSlot = -1;
488 SpillRange(LiveRange* range, Zone* zone); 530 SpillRange(LiveRange* range, Zone* zone);
489 531
490 UseInterval* interval() const { return use_interval_; } 532 UseInterval* interval() const { return use_interval_; }
491 // Currently, only 4 or 8 byte slots are supported. 533 // Currently, only 4 or 8 byte slots are supported.
492 int ByteWidth() const; 534 int ByteWidth() const;
493 bool IsEmpty() const { return live_ranges_.empty(); } 535 bool IsEmpty() const { return live_ranges_.empty(); }
494 bool TryMerge(SpillRange* other); 536 bool TryMerge(SpillRange* other);
495 537
496 void set_assigned_slot(int index) { 538 void set_assigned_slot(int index) {
497 DCHECK_EQ(kUnassignedSlot, assigned_slot_); 539 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
498 assigned_slot_ = index; 540 assigned_slot_ = index;
499 } 541 }
542 bool IsSlotAssigned() { return assigned_slot_ != kUnassignedSlot; }
jvoung (off chromium) 2015/06/08 18:59:22 const
Mircea Trofin 2015/06/09 02:56:02 Done.
500 int assigned_slot() { 543 int assigned_slot() {
501 DCHECK_NE(kUnassignedSlot, assigned_slot_); 544 DCHECK_NE(kUnassignedSlot, assigned_slot_);
502 return assigned_slot_; 545 return assigned_slot_;
503 } 546 }
504 547
505 private: 548 private:
506 LifetimePosition End() const { return end_position_; } 549 LifetimePosition End() const { return end_position_; }
507 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } 550 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; }
508 bool IsIntersectingWith(SpillRange* other) const; 551 bool IsIntersectingWith(SpillRange* other) const;
509 // Merge intervals, making sure the use intervals are sorted 552 // Merge intervals, making sure the use intervals are sorted
510 void MergeDisjointIntervals(UseInterval* other); 553 void MergeDisjointIntervals(UseInterval* other);
511 554
512 ZoneVector<LiveRange*> live_ranges_; 555 ZoneVector<LiveRange*> live_ranges_;
513 UseInterval* use_interval_; 556 UseInterval* use_interval_;
514 LifetimePosition end_position_; 557 LifetimePosition end_position_;
515 int assigned_slot_; 558 int assigned_slot_;
516 559
517 DISALLOW_COPY_AND_ASSIGN(SpillRange); 560 DISALLOW_COPY_AND_ASSIGN(SpillRange);
518 }; 561 };
519 562
520 563
564 std::ostream& operator<<(std::ostream& os, SpillRange* range);
565
566
521 class RegisterAllocationData final : public ZoneObject { 567 class RegisterAllocationData final : public ZoneObject {
522 public: 568 public:
523 class PhiMapValue : public ZoneObject { 569 class PhiMapValue : public ZoneObject {
524 public: 570 public:
525 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); 571 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
526 572
527 const PhiInstruction* phi() const { return phi_; } 573 const PhiInstruction* phi() const { return phi_; }
528 const InstructionBlock* block() const { return block_; } 574 const InstructionBlock* block() const { return block_; }
529 575
530 // For hinting. 576 // For hinting.
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
721 InstructionOperand* operand) { 767 InstructionOperand* operand) {
722 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); 768 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
723 } 769 }
724 770
725 RegisterAllocationData* const data_; 771 RegisterAllocationData* const data_;
726 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; 772 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
727 773
728 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); 774 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
729 }; 775 };
730 776
777 struct AllocatorStats {
778 unsigned spills;
779 unsigned wins;
780 unsigned losses_no_eviction;
781 unsigned losses_after_eviction;
782 unsigned good_split_attempts;
783 unsigned good_split_successes;
784 unsigned good_split_smart_above;
785 unsigned good_split_smart_below;
786 unsigned good_split_above;
787 unsigned good_split_below;
788 unsigned groups_attempted;
789 unsigned groups_succeeded;
790 unsigned groups_allocated;
791 void reset() { *this = {0}; }
792 };
793
794
795 std::ostream& operator<<(std::ostream& os, const AllocatorStats& stats);
796
731 797
732 class RegisterAllocator : public ZoneObject { 798 class RegisterAllocator : public ZoneObject {
733 public: 799 public:
734 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); 800 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
735 801
736 protected: 802 protected:
803 AllocatorStats stats_;
737 RegisterAllocationData* data() const { return data_; } 804 RegisterAllocationData* data() const { return data_; }
738 InstructionSequence* code() const { return data()->code(); } 805 InstructionSequence* code() const { return data()->code(); }
739 RegisterKind mode() const { return mode_; } 806 RegisterKind mode() const { return mode_; }
740 int num_registers() const { return num_registers_; } 807 int num_registers() const { return num_registers_; }
741 808
742 Zone* allocation_zone() const { return data()->allocation_zone(); } 809 Zone* allocation_zone() const { return data()->allocation_zone(); }
743 810
744 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } 811 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
745 812
746 // Split the given range at the given position. 813 // Split the given range at the given position.
(...skipping 13 matching lines...) Expand all
760 // loop covered by this interval or the latest possible position. 827 // loop covered by this interval or the latest possible position.
761 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 828 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
762 LifetimePosition end); 829 LifetimePosition end);
763 830
764 void Spill(LiveRange* range); 831 void Spill(LiveRange* range);
765 832
766 // If we are trying to spill a range inside the loop try to 833 // If we are trying to spill a range inside the loop try to
767 // hoist spill position out to the point just before the loop. 834 // hoist spill position out to the point just before the loop.
768 LifetimePosition FindOptimalSpillingPos(LiveRange* range, 835 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
769 LifetimePosition pos); 836 LifetimePosition pos);
837 const char* RegisterName(int allocation_index) const;
770 838
771 private: 839 private:
772 RegisterAllocationData* const data_; 840 RegisterAllocationData* const data_;
773 const RegisterKind mode_; 841 const RegisterKind mode_;
774 const int num_registers_; 842 const int num_registers_;
775 843
776 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); 844 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
777 }; 845 };
778 846
779 847
780 class LinearScanAllocator final : public RegisterAllocator { 848 class LinearScanAllocator final : public RegisterAllocator {
781 public: 849 public:
782 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, 850 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
783 Zone* local_zone); 851 Zone* local_zone);
784 852
785 // Phase 4: compute register assignments. 853 // Phase 4: compute register assignments.
786 void AllocateRegisters(); 854 void AllocateRegisters();
787 855
788 private: 856 private:
789 const char* RegisterName(int allocation_index) const;
790
791 ZoneVector<LiveRange*>& unhandled_live_ranges() { 857 ZoneVector<LiveRange*>& unhandled_live_ranges() {
792 return unhandled_live_ranges_; 858 return unhandled_live_ranges_;
793 } 859 }
794 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } 860 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
795 ZoneVector<LiveRange*>& inactive_live_ranges() { 861 ZoneVector<LiveRange*>& inactive_live_ranges() {
796 return inactive_live_ranges_; 862 return inactive_live_ranges_;
797 } 863 }
798 864
799 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); 865 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
800 866
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
833 ZoneVector<LiveRange*> active_live_ranges_; 899 ZoneVector<LiveRange*> active_live_ranges_;
834 ZoneVector<LiveRange*> inactive_live_ranges_; 900 ZoneVector<LiveRange*> inactive_live_ranges_;
835 901
836 #ifdef DEBUG 902 #ifdef DEBUG
837 LifetimePosition allocation_finger_; 903 LifetimePosition allocation_finger_;
838 #endif 904 #endif
839 905
840 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); 906 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
841 }; 907 };
842 908
909
910 // Storage detail for CoalescedLiveRanges.
911 struct AllocatedInterval {
912 LifetimePosition start;
913 LifetimePosition end;
914 LiveRange* range;
915 struct Comparer : public std::binary_function<AllocatedInterval,
916 AllocatedInterval, bool> {
917 bool operator()(const AllocatedInterval& x,
918 const AllocatedInterval& y) const {
919 return x.start < y.start;
920 }
921 };
922 };
923
924
925 typedef ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>::const_iterator
jvoung (off chromium) 2015/06/08 18:59:22 "ZoneSet<AllocatedInterval, AllocatedInterval::Com
Mircea Trofin 2015/06/09 02:56:02 Done.
926 interval_iterator;
Jarin 2015/06/09 15:00:41 The interval_iterator typedef seems to be local to
927
928
843 class CoalescedLiveRanges; 929 class CoalescedLiveRanges;
844 930
845 931
932 // Iterator over allocated live ranges (CoalescedLiveRanges) that conflict with
933 // a given one. See CoalescedLiveRanges::first_conflict.
934 // Note that iterating may produce non-consecutively repeating live ranges.
935 struct conflict_iterator {
936 friend class CoalescedLiveRanges;
937 friend bool operator==(const conflict_iterator& first,
938 const conflict_iterator& second);
939 conflict_iterator() : query_(nullptr), storage_(nullptr) {}
940
941 LiveRange* operator->() { return pos_->range; }
942
943 LiveRange* operator*() { return pos_->range; }
944
945 conflict_iterator& operator++();
946 bool IsValid() const { return storage_ != nullptr; }
947
948 private:
949 static bool Intersects(LifetimePosition a_start, LifetimePosition a_end,
950 LifetimePosition b_start, LifetimePosition b_end) {
951 if (a_start == b_start) return true;
952 if (a_start < b_start) return a_end > b_start;
953 return Intersects(b_start, b_end, a_start, a_end);
954 }
955
956 conflict_iterator(UseInterval* q,
jvoung (off chromium) 2015/06/08 18:59:22 Is this ctor variant used? Not knowing much it see
Mircea Trofin 2015/06/09 02:56:02 Good catch, it serves no purpose. Removed it.
957 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* s,
958 interval_iterator& it)
959 : query_(nullptr), storage_(nullptr), pos_(it) {}
960
961 conflict_iterator(
962 UseInterval* query,
963 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage)
964 : query_(query), storage_(storage) {
965 InitializeForNewQuery();
966 }
967
968 void MoveToEnd() {
969 query_ = nullptr;
970 storage_ = nullptr;
971 }
972
973 void InitializeForNewQuery();
974
975 AllocatedInterval AsAllocatedInterval(LifetimePosition pos) {
976 return {pos, LifetimePosition::Invalid(), nullptr};
977 }
978
979 UseInterval* query_;
980 ZoneSet<AllocatedInterval, AllocatedInterval::Comparer>* storage_;
981 interval_iterator pos_;
982 };
983
984 bool operator==(const conflict_iterator& first,
985 const conflict_iterator& second);
986 bool operator!=(const conflict_iterator& first,
987 const conflict_iterator& second);
988
989
846 // A variant of the LLVM Greedy Register Allocator. See 990 // A variant of the LLVM Greedy Register Allocator. See
847 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html 991 // http://blog.llvm.org/2011/09/greedy-register-allocation-in-llvm-30.html
848 class GreedyAllocator final : public RegisterAllocator { 992 class GreedyAllocator final : public RegisterAllocator {
849 public: 993 public:
850 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind, 994 explicit GreedyAllocator(RegisterAllocationData* data, RegisterKind kind,
851 Zone* local_zone); 995 Zone* local_zone);
852 996
853 void AllocateRegisters(); 997 void AllocateRegisters();
854 998
855 private: 999 private:
856 LifetimePosition GetSplittablePos(LifetimePosition pos); 1000 void AllocateRange(LiveRange* current);
1001 void AllocateGroup(LiveRangeGroup* grp);
1002 bool TryAllocateGroup(LiveRangeGroup* grp);
1003 bool TryAllocateGroupAtRegister(unsigned reg, LiveRangeGroup* grp);
1004
1005 struct PQueueElem {
1006 bool IsGroup() { return is_grp_; }
jvoung (off chromium) 2015/06/08 18:59:21 const
1007 explicit PQueueElem(LiveRange* range) : is_grp_(false) {
1008 storage_.range_ = range;
1009 }
1010
1011 explicit PQueueElem(LiveRangeGroup* grp) : is_grp_(true) {
1012 storage_.grp_ = grp;
1013 }
1014
1015
jvoung (off chromium) 2015/06/08 18:59:22 can remove some an extra newline here and line 102
1016 LiveRange* get_range() {
jvoung (off chromium) 2015/06/08 18:59:21 const
1017 if (is_grp_) return nullptr;
1018 return storage_.range_;
1019 }
1020
1021
1022 LiveRangeGroup* get_group() {
1023 if (is_grp_) return storage_.grp_;
1024 return nullptr;
1025 }
1026
1027 private:
1028 bool is_grp_;
1029 union {
1030 LiveRange* range_;
1031 LiveRangeGroup* grp_;
1032 } storage_;
1033 };
1034
1035 void GroupAndEnqueue();
857 const RegisterConfiguration* config() const { return data()->config(); } 1036 const RegisterConfiguration* config() const { return data()->config(); }
858 Zone* local_zone() const { return local_zone_; } 1037 Zone* local_zone() const { return local_zone_; }
859 bool TryReuseSpillForPhi(LiveRange* range);
860 int GetHintedRegister(LiveRange* range);
861 1038
862 typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue; 1039 struct PQueueElemComparer
1040 : public std::binary_function<std::pair<unsigned, PQueueElem>,
1041 std::pair<unsigned, PQueueElem>, bool> {
1042 bool operator()(const std::pair<unsigned, PQueueElem>& x,
1043 const std::pair<unsigned, PQueueElem>& y) const {
1044 return x.first < y.first;
1045 }
1046 };
863 1047
864 unsigned GetLiveRangeSize(LiveRange* range); 1048 typedef ZonePriorityQueue<std::pair<unsigned, PQueueElem>, PQueueElemComparer>
1049 PQueue;
1050
865 void Enqueue(LiveRange* range); 1051 void Enqueue(LiveRange* range);
1052 void Enqueue(LiveRangeGroup* group);
866 1053
867 void Evict(LiveRange* range); 1054 void Evict(LiveRange* range);
868 float CalculateSpillWeight(LiveRange* range); 1055 void EvictAll(int reg, const conflict_iterator& first_conflict);
869 float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges);
870 1056
1057 // Find the register with the minimum spill weight. If that is still higher
1058 // or equal to competing, return -1.
1059 int FindRegisterToEvictForRange(
1060 conflict_iterator
1061 all_conflicts[RegisterConfiguration::kMaxDoubleRegisters],
1062 float competing);
871 1063
872 bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting); 1064 // Same as above, for all the registers in a group.
873 bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range, 1065 int FindRegisterToEvictForGroup(LiveRangeGroup* grp, float competing);
874 ZoneSet<LiveRange*>* conflicting); 1066
875 bool HandleSpillOperands(LiveRange* range); 1067 template <typename TIter>
876 void AllocateBlockedRange(LiveRange* current, LifetimePosition pos, 1068 float CalculateMaxSpillWeight(const TIter& begin, const TIter& end);
877 bool spill); 1069
1070 bool HandleSpillOperands(LiveRange* range, LiveRange** remaining);
1071 void HandleBlockedRange(LiveRange* current);
1072 bool TrySplitBeforeLoops(LiveRange* current);
1073 bool TrySplitAfterLoops(LiveRange* current);
1074 bool TrySplitAroundCalls(LiveRange* range);
878 1075
879 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start, 1076 LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start,
880 LifetimePosition until, LifetimePosition end); 1077 LifetimePosition until, LifetimePosition end);
881 void AssignRangeToRegister(int reg_id, LiveRange* range); 1078 void AssignRangeToRegister(int reg_id, LiveRange* range);
882 1079
883 LifetimePosition FindProgressingSplitPosition(LiveRange* range,
884 bool* is_spill_pos);
885
886 Zone* local_zone_; 1080 Zone* local_zone_;
887 ZoneVector<CoalescedLiveRanges*> allocations_; 1081 ZoneVector<CoalescedLiveRanges*> allocations_;
888 PQueue queue_; 1082 PQueue queue_;
889 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator); 1083 DISALLOW_COPY_AND_ASSIGN(GreedyAllocator);
890 }; 1084 };
891 1085
892 1086
893 class SpillSlotLocator final : public ZoneObject { 1087 class SpillSlotLocator final : public ZoneObject {
894 public: 1088 public:
895 explicit SpillSlotLocator(RegisterAllocationData* data); 1089 explicit SpillSlotLocator(RegisterAllocationData* data);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
966 RegisterAllocationData* const data_; 1160 RegisterAllocationData* const data_;
967 1161
968 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 1162 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
969 }; 1163 };
970 1164
971 } // namespace compiler 1165 } // namespace compiler
972 } // namespace internal 1166 } // namespace internal
973 } // namespace v8 1167 } // namespace v8
974 1168
975 #endif // V8_REGISTER_ALLOCATOR_H_ 1169 #endif // V8_REGISTER_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « no previous file | src/compiler/register-allocator.cc » ('j') | src/compiler/register-allocator.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698