Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: src/compiler/register-allocator.h

Issue 1612013002: Revert of [turbofan] optimize spills in defered blocks (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/live-range-separator.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_REGISTER_ALLOCATOR_H_ 5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_
7 7
8 #include "src/compiler/instruction.h" 8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h" 9 #include "src/ostreams.h"
10 #include "src/register-configuration.h" 10 #include "src/register-configuration.h"
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after
572 const InstructionOperand& operand, 572 const InstructionOperand& operand,
573 bool might_be_duplicated); 573 bool might_be_duplicated);
574 574
575 // If all the children of this range are spilled in deferred blocks, and if 575 // If all the children of this range are spilled in deferred blocks, and if
576 // for any non-spilled child with a use position requiring a slot, that range 576 // for any non-spilled child with a use position requiring a slot, that range
577 // is contained in a deferred block, mark the range as 577 // is contained in a deferred block, mark the range as
578 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, 578 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
579 // and instead let the LiveRangeConnector perform the spills within the 579 // and instead let the LiveRangeConnector perform the spills within the
580 // deferred blocks. If so, we insert here spills for non-spilled ranges 580 // deferred blocks. If so, we insert here spills for non-spilled ranges
581 // with slot use positions. 581 // with slot use positions.
582 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) { 582 void MarkSpilledInDeferredBlock() {
583 spill_start_index_ = -1; 583 spill_start_index_ = -1;
584 spilled_in_deferred_blocks_ = true; 584 spilled_in_deferred_blocks_ = true;
585 spill_move_insertion_locations_ = nullptr; 585 spill_move_insertion_locations_ = nullptr;
586 list_of_blocks_requiring_spill_operands_ =
587 new (zone) BitVector(total_block_count, zone);
588 } 586 }
589 587
590 void CommitSpillInDeferredBlocks(RegisterAllocationData* data, 588 bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
591 const InstructionOperand& spill_operand, 589 const InstructionOperand& spill_operand);
592 BitVector* necessary_spill_points);
593 590
594 TopLevelLiveRange* splintered_from() const { return splintered_from_; } 591 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
595 bool IsSplinter() const { return splintered_from_ != nullptr; } 592 bool IsSplinter() const { return splintered_from_ != nullptr; }
596 bool MayRequireSpillRange() const { 593 bool MayRequireSpillRange() const {
597 DCHECK(!IsSplinter()); 594 DCHECK(!IsSplinter());
598 return !HasSpillOperand() && spill_range_ == nullptr; 595 return !HasSpillOperand() && spill_range_ == nullptr;
599 } 596 }
600 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged); 597 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
601 int vreg() const { return vreg_; } 598 int vreg() const { return vreg_; }
602 599
(...skipping 10 matching lines...) Expand all
613 } 610 }
614 611
615 int GetChildCount() const { return last_child_id_ + 1; } 612 int GetChildCount() const { return last_child_id_ + 1; }
616 613
617 bool IsSpilledOnlyInDeferredBlocks() const { 614 bool IsSpilledOnlyInDeferredBlocks() const {
618 return spilled_in_deferred_blocks_; 615 return spilled_in_deferred_blocks_;
619 } 616 }
620 617
621 struct SpillMoveInsertionList; 618 struct SpillMoveInsertionList;
622 619
623 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const { 620 SpillMoveInsertionList* spill_move_insertion_locations() const {
624 DCHECK(!IsSpilledOnlyInDeferredBlocks());
625 return spill_move_insertion_locations_; 621 return spill_move_insertion_locations_;
626 } 622 }
627 TopLevelLiveRange* splinter() const { return splinter_; } 623 TopLevelLiveRange* splinter() const { return splinter_; }
628 void SetSplinter(TopLevelLiveRange* splinter) { 624 void SetSplinter(TopLevelLiveRange* splinter) {
629 DCHECK_NULL(splinter_); 625 DCHECK_NULL(splinter_);
630 DCHECK_NOT_NULL(splinter); 626 DCHECK_NOT_NULL(splinter);
631 627
632 splinter_ = splinter; 628 splinter_ = splinter;
633 splinter->relative_id_ = GetNextChildId(); 629 splinter->relative_id_ = GetNextChildId();
634 splinter->set_spill_type(spill_type()); 630 splinter->set_spill_type(spill_type());
635 splinter->SetSplinteredFrom(this); 631 splinter->SetSplinteredFrom(this);
636 } 632 }
637 633
638 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; } 634 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
639 bool has_preassigned_slot() const { return has_preassigned_slot_; } 635 bool has_preassigned_slot() const { return has_preassigned_slot_; }
640 636
641 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
642 DCHECK(IsSpilledOnlyInDeferredBlocks());
643 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
644 }
645
646 BitVector* GetListOfBlocksRequiringSpillOperands() const {
647 DCHECK(IsSpilledOnlyInDeferredBlocks());
648 return list_of_blocks_requiring_spill_operands_;
649 }
650
651 private: 637 private:
652 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); 638 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
653 639
654 typedef BitField<bool, 1, 1> HasSlotUseField; 640 typedef BitField<bool, 1, 1> HasSlotUseField;
655 typedef BitField<bool, 2, 1> IsPhiField; 641 typedef BitField<bool, 2, 1> IsPhiField;
656 typedef BitField<bool, 3, 1> IsNonLoopPhiField; 642 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
657 typedef BitField<SpillType, 4, 2> SpillTypeField; 643 typedef BitField<SpillType, 4, 2> SpillTypeField;
658 644
659 int vreg_; 645 int vreg_;
660 int last_child_id_; 646 int last_child_id_;
661 TopLevelLiveRange* splintered_from_; 647 TopLevelLiveRange* splintered_from_;
662 union { 648 union {
663 // Correct value determined by spill_type() 649 // Correct value determined by spill_type()
664 InstructionOperand* spill_operand_; 650 InstructionOperand* spill_operand_;
665 SpillRange* spill_range_; 651 SpillRange* spill_range_;
666 }; 652 };
667 653 SpillMoveInsertionList* spill_move_insertion_locations_;
668 union {
669 SpillMoveInsertionList* spill_move_insertion_locations_;
670 BitVector* list_of_blocks_requiring_spill_operands_;
671 };
672
673 // TODO(mtrofin): generalize spilling after definition, currently specialized 654 // TODO(mtrofin): generalize spilling after definition, currently specialized
674 // just for spill in a single deferred block. 655 // just for spill in a single deferred block.
675 bool spilled_in_deferred_blocks_; 656 bool spilled_in_deferred_blocks_;
676 int spill_start_index_; 657 int spill_start_index_;
677 UsePosition* last_pos_; 658 UsePosition* last_pos_;
678 TopLevelLiveRange* splinter_; 659 TopLevelLiveRange* splinter_;
679 bool has_preassigned_slot_; 660 bool has_preassigned_slot_;
680 661
681 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); 662 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
682 }; 663 };
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
1137 RegisterAllocationData* data() const { return data_; } 1118 RegisterAllocationData* data() const { return data_; }
1138 1119
1139 bool SafePointsAreInOrder() const; 1120 bool SafePointsAreInOrder() const;
1140 1121
1141 RegisterAllocationData* const data_; 1122 RegisterAllocationData* const data_;
1142 1123
1143 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator); 1124 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1144 }; 1125 };
1145 1126
1146 1127
1147 class LiveRangeBoundArray;
1148 // Insert moves of the form 1128 // Insert moves of the form
1149 // 1129 //
1150 // Operand(child_(k+1)) = Operand(child_k) 1130 // Operand(child_(k+1)) = Operand(child_k)
1151 // 1131 //
1152 // where child_k and child_(k+1) are consecutive children of a range (so 1132 // where child_k and child_(k+1) are consecutive children of a range (so
1153 // child_k->next() == child_(k+1)), and Operand(...) refers to the 1133 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1154 // assigned operand, be it a register or a slot. 1134 // assigned operand, be it a register or a slot.
1155 class LiveRangeConnector final : public ZoneObject { 1135 class LiveRangeConnector final : public ZoneObject {
1156 public: 1136 public:
1157 explicit LiveRangeConnector(RegisterAllocationData* data); 1137 explicit LiveRangeConnector(RegisterAllocationData* data);
(...skipping 12 matching lines...) Expand all
1170 InstructionSequence* code() const { return data()->code(); } 1150 InstructionSequence* code() const { return data()->code(); }
1171 Zone* code_zone() const { return code()->zone(); } 1151 Zone* code_zone() const { return code()->zone(); }
1172 1152
1173 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const; 1153 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1174 1154
1175 int ResolveControlFlow(const InstructionBlock* block, 1155 int ResolveControlFlow(const InstructionBlock* block,
1176 const InstructionOperand& cur_op, 1156 const InstructionOperand& cur_op,
1177 const InstructionBlock* pred, 1157 const InstructionBlock* pred,
1178 const InstructionOperand& pred_op); 1158 const InstructionOperand& pred_op);
1179 1159
1180 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1181 LiveRangeBoundArray* array,
1182 Zone* temp_zone);
1183
1184 RegisterAllocationData* const data_; 1160 RegisterAllocationData* const data_;
1185 1161
1186 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 1162 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1187 }; 1163 };
1188 1164
1189 } // namespace compiler 1165 } // namespace compiler
1190 } // namespace internal 1166 } // namespace internal
1191 } // namespace v8 1167 } // namespace v8
1192 1168
1193 #endif // V8_REGISTER_ALLOCATOR_H_ 1169 #endif // V8_REGISTER_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « src/compiler/live-range-separator.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698