| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/register-configuration.h" | 10 #include "src/register-configuration.h" |
| (...skipping 477 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 488 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); | 488 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); |
| 489 void AddUsePosition(UsePosition* pos); | 489 void AddUsePosition(UsePosition* pos); |
| 490 | 490 |
| 491 // Shorten the most recently added interval by setting a new start. | 491 // Shorten the most recently added interval by setting a new start. |
| 492 void ShortenTo(LifetimePosition start); | 492 void ShortenTo(LifetimePosition start); |
| 493 | 493 |
| 494 // Detaches between start and end, and attributes the resulting range to | 494 // Detaches between start and end, and attributes the resulting range to |
| 495 // result. | 495 // result. |
| 496 // The current range is pointed to as "splintered_from". No parent/child | 496 // The current range is pointed to as "splintered_from". No parent/child |
| 497 // relationship is established between this and result. | 497 // relationship is established between this and result. |
| 498 void Splinter(LifetimePosition start, LifetimePosition end, | 498 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone); |
| 499 TopLevelLiveRange* result, Zone* zone); | |
| 500 | 499 |
| 501 // Assuming other was splintered from this range, embeds other and its | 500 // Assuming other was splintered from this range, embeds other and its |
| 502 // children as part of the children sequence of this range. | 501 // children as part of the children sequence of this range. |
| 503 void Merge(TopLevelLiveRange* other, Zone* zone); | 502 void Merge(TopLevelLiveRange* other, Zone* zone); |
| 504 | 503 |
| 505 // Spill range management. | 504 // Spill range management. |
| 506 void SetSpillRange(SpillRange* spill_range); | 505 void SetSpillRange(SpillRange* spill_range); |
| 507 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; | 506 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; |
| 508 void set_spill_type(SpillType value) { | 507 void set_spill_type(SpillType value) { |
| 509 bits_ = SpillTypeField::update(bits_, value); | 508 bits_ = SpillTypeField::update(bits_, value); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 533 | 532 |
| 534 AllocatedOperand GetSpillRangeOperand() const; | 533 AllocatedOperand GetSpillRangeOperand() const; |
| 535 | 534 |
| 536 void SpillAtDefinition(Zone* zone, int gap_index, | 535 void SpillAtDefinition(Zone* zone, int gap_index, |
| 537 InstructionOperand* operand); | 536 InstructionOperand* operand); |
| 538 void SetSpillOperand(InstructionOperand* operand); | 537 void SetSpillOperand(InstructionOperand* operand); |
| 539 void SetSpillStartIndex(int start) { | 538 void SetSpillStartIndex(int start) { |
| 540 spill_start_index_ = Min(start, spill_start_index_); | 539 spill_start_index_ = Min(start, spill_start_index_); |
| 541 } | 540 } |
| 542 | 541 |
| 543 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); | |
| 544 void CommitSpillsAtDefinition(InstructionSequence* sequence, | 542 void CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 545 const InstructionOperand& operand, | 543 const InstructionOperand& operand, |
| 546 bool might_be_duplicated); | 544 bool might_be_duplicated); |
| 547 | 545 |
| 548 // If all the children of this range are spilled in deferred blocks, and if | 546 // If all the children of this range are spilled in deferred blocks, and if |
| 549 // for any non-spilled child with a use position requiring a slot, that range | 547 // for any non-spilled child with a use position requiring a slot, that range |
| 550 // is contained in a deferred block, mark the range as | 548 // is contained in a deferred block, mark the range as |
| 551 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, | 549 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, |
| 552 // and instead let the LiveRangeConnector perform the spills within the | 550 // and instead let the LiveRangeConnector perform the spills within the |
| 553 // deferred blocks. If so, we insert here spills for non-spilled ranges | 551 // deferred blocks. If so, we insert here spills for non-spilled ranges |
| (...skipping 19 matching lines...) Expand all Loading... |
| 573 return spilled_in_deferred_blocks_; | 571 return spilled_in_deferred_blocks_; |
| 574 } | 572 } |
| 575 | 573 |
| 576 struct SpillAtDefinitionList; | 574 struct SpillAtDefinitionList; |
| 577 | 575 |
| 578 SpillAtDefinitionList* spills_at_definition() const { | 576 SpillAtDefinitionList* spills_at_definition() const { |
| 579 return spills_at_definition_; | 577 return spills_at_definition_; |
| 580 } | 578 } |
| 581 void set_last_child(LiveRange* range) { last_child_ = range; } | 579 void set_last_child(LiveRange* range) { last_child_ = range; } |
| 582 LiveRange* last_child() const { return last_child_; } | 580 LiveRange* last_child() const { return last_child_; } |
| 581 TopLevelLiveRange* splinter() const { return splinter_; } |
| 582 void SetSplinter(TopLevelLiveRange* splinter) { |
| 583 DCHECK_NULL(splinter_); |
| 584 DCHECK_NOT_NULL(splinter); |
| 585 |
| 586 splinter_ = splinter; |
| 587 splinter->relative_id_ = GetNextChildId(); |
| 588 splinter->set_spill_type(spill_type()); |
| 589 splinter->SetSplinteredFrom(this); |
| 590 } |
| 583 | 591 |
| 584 private: | 592 private: |
| 593 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); |
| 594 |
| 585 typedef BitField<bool, 1, 1> HasSlotUseField; | 595 typedef BitField<bool, 1, 1> HasSlotUseField; |
| 586 typedef BitField<bool, 2, 1> IsPhiField; | 596 typedef BitField<bool, 2, 1> IsPhiField; |
| 587 typedef BitField<bool, 3, 1> IsNonLoopPhiField; | 597 typedef BitField<bool, 3, 1> IsNonLoopPhiField; |
| 588 typedef BitField<SpillType, 4, 2> SpillTypeField; | 598 typedef BitField<SpillType, 4, 2> SpillTypeField; |
| 589 | 599 |
| 590 int vreg_; | 600 int vreg_; |
| 591 int last_child_id_; | 601 int last_child_id_; |
| 592 TopLevelLiveRange* splintered_from_; | 602 TopLevelLiveRange* splintered_from_; |
| 593 union { | 603 union { |
| 594 // Correct value determined by spill_type() | 604 // Correct value determined by spill_type() |
| 595 InstructionOperand* spill_operand_; | 605 InstructionOperand* spill_operand_; |
| 596 SpillRange* spill_range_; | 606 SpillRange* spill_range_; |
| 597 }; | 607 }; |
| 598 SpillAtDefinitionList* spills_at_definition_; | 608 SpillAtDefinitionList* spills_at_definition_; |
| 599 // TODO(mtrofin): generalize spilling after definition, currently specialized | 609 // TODO(mtrofin): generalize spilling after definition, currently specialized |
| 600 // just for spill in a single deferred block. | 610 // just for spill in a single deferred block. |
| 601 bool spilled_in_deferred_blocks_; | 611 bool spilled_in_deferred_blocks_; |
| 602 int spill_start_index_; | 612 int spill_start_index_; |
| 603 LiveRange* last_child_; | 613 LiveRange* last_child_; |
| 604 LiveRange* last_insertion_point_; | 614 UsePosition* last_pos_; |
| 615 TopLevelLiveRange* splinter_; |
| 605 | 616 |
| 606 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); | 617 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); |
| 607 }; | 618 }; |
| 608 | 619 |
| 609 | 620 |
| 610 struct PrintableLiveRange { | 621 struct PrintableLiveRange { |
| 611 const RegisterConfiguration* register_configuration_; | 622 const RegisterConfiguration* register_configuration_; |
| 612 const LiveRange* range_; | 623 const LiveRange* range_; |
| 613 }; | 624 }; |
| 614 | 625 |
| (...skipping 473 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1088 RegisterAllocationData* const data_; | 1099 RegisterAllocationData* const data_; |
| 1089 | 1100 |
| 1090 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 1101 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 1091 }; | 1102 }; |
| 1092 | 1103 |
| 1093 } // namespace compiler | 1104 } // namespace compiler |
| 1094 } // namespace internal | 1105 } // namespace internal |
| 1095 } // namespace v8 | 1106 } // namespace v8 |
| 1096 | 1107 |
| 1097 #endif // V8_REGISTER_ALLOCATOR_H_ | 1108 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |