| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 } | 323 } |
| 324 | 324 |
| 325 // Returns use position in this live range that follows both start | 325 // Returns use position in this live range that follows both start |
| 326 // and last processed use position. | 326 // and last processed use position. |
| 327 UsePosition* NextUsePosition(LifetimePosition start) const; | 327 UsePosition* NextUsePosition(LifetimePosition start) const; |
| 328 | 328 |
| 329 // Returns use position for which register is required in this live | 329 // Returns use position for which register is required in this live |
| 330 // range and which follows both start and last processed use position | 330 // range and which follows both start and last processed use position |
| 331 UsePosition* NextRegisterPosition(LifetimePosition start) const; | 331 UsePosition* NextRegisterPosition(LifetimePosition start) const; |
| 332 | 332 |
| 333 // Returns the first use position requiring stack slot, or nullptr. |
| 334 UsePosition* NextStackPosition(LifetimePosition start) const; |
| 335 |
| 333 // Returns use position for which register is beneficial in this live | 336 // Returns use position for which register is beneficial in this live |
| 334 // range and which follows both start and last processed use position | 337 // range and which follows both start and last processed use position |
| 335 UsePosition* NextUsePositionRegisterIsBeneficial( | 338 UsePosition* NextUsePositionRegisterIsBeneficial( |
| 336 LifetimePosition start) const; | 339 LifetimePosition start) const; |
| 337 | 340 |
| 338 // Returns use position for which register is beneficial in this live | 341 // Returns use position for which register is beneficial in this live |
| 339 // range and which precedes start. | 342 // range and which precedes start. |
| 340 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 343 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
| 341 LifetimePosition start) const; | 344 LifetimePosition start) const; |
| 342 | 345 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 390 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } | 393 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } |
| 391 AllocatedOperand GetSpillRangeOperand() const; | 394 AllocatedOperand GetSpillRangeOperand() const; |
| 392 | 395 |
| 393 void SpillAtDefinition(Zone* zone, int gap_index, | 396 void SpillAtDefinition(Zone* zone, int gap_index, |
| 394 InstructionOperand* operand); | 397 InstructionOperand* operand); |
| 395 void SetSpillOperand(InstructionOperand* operand); | 398 void SetSpillOperand(InstructionOperand* operand); |
| 396 void SetSpillRange(SpillRange* spill_range); | 399 void SetSpillRange(SpillRange* spill_range); |
| 397 void CommitSpillsAtDefinition(InstructionSequence* sequence, | 400 void CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 398 const InstructionOperand& operand, | 401 const InstructionOperand& operand, |
| 399 bool might_be_duplicated); | 402 bool might_be_duplicated); |
| 403 // This must be applied on top level ranges. |
| 404 // If only one of the children(1) of this range is spilled, and the block is |
| 405 // deferred(2), then we will spill in that block rather than at definition, |
| 406 // to avoid the penalty of the spill operation. |
| 407 // (1) if the top level were spilled, it means it is defined on the stack, |
| 408 // so we wouldn't have had spilled it at definition in the first place. If |
| 409 // more than one child is spilled, we would also need to change how we handle |
| 410 // traced references, and we'd need to ensure the first spill dominates the |
| 411 // rest. |
| 412 // (2) if the block is not deferred, it may be on a hot path (or loop), case |
| 413 // in which it may be worse to spill in it. |
| 414 bool TryCommitSpillInDeferredBlock(InstructionSequence* code); |
| 400 | 415 |
| 401 void SetSpillStartIndex(int start) { | 416 void SetSpillStartIndex(int start) { |
| 402 spill_start_index_ = Min(start, spill_start_index_); | 417 spill_start_index_ = Min(start, spill_start_index_); |
| 403 } | 418 } |
| 404 | 419 |
| 405 bool ShouldBeAllocatedBefore(const LiveRange* other) const; | 420 bool ShouldBeAllocatedBefore(const LiveRange* other) const; |
| 406 bool CanCover(LifetimePosition position) const; | 421 bool CanCover(LifetimePosition position) const; |
| 407 bool Covers(LifetimePosition position) const; | 422 bool Covers(LifetimePosition position) const; |
| 408 LifetimePosition FirstIntersection(LiveRange* other) const; | 423 LifetimePosition FirstIntersection(LiveRange* other) const; |
| 409 | 424 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 426 | 441 |
| 427 SpillAtDefinitionList* spills_at_definition() const { | 442 SpillAtDefinitionList* spills_at_definition() const { |
| 428 return spills_at_definition_; | 443 return spills_at_definition_; |
| 429 } | 444 } |
| 430 | 445 |
| 431 // Used solely by the Greedy Allocator: | 446 // Used solely by the Greedy Allocator: |
| 432 unsigned GetSize(); | 447 unsigned GetSize(); |
| 433 float weight() const { return weight_; } | 448 float weight() const { return weight_; } |
| 434 void set_weight(float weight) { weight_ = weight; } | 449 void set_weight(float weight) { weight_ = weight; } |
| 435 | 450 |
| 451 bool IsSpilledInSingleDeferredBlock() const { |
| 452 return spilled_in_deferred_block_; |
| 453 } |
| 454 |
| 436 static const int kInvalidSize = -1; | 455 static const int kInvalidSize = -1; |
| 437 static const float kInvalidWeight; | 456 static const float kInvalidWeight; |
| 438 static const float kMaxWeight; | 457 static const float kMaxWeight; |
| 439 | 458 |
| 440 private: | 459 private: |
| 441 void set_spill_type(SpillType value) { | 460 void set_spill_type(SpillType value) { |
| 442 bits_ = SpillTypeField::update(bits_, value); | 461 bits_ = SpillTypeField::update(bits_, value); |
| 443 } | 462 } |
| 444 | 463 |
| 445 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | 464 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 478 mutable UsePosition* current_hint_position_; | 497 mutable UsePosition* current_hint_position_; |
| 479 | 498 |
| 480 // greedy: the number of LifetimePositions covered by this range. Used to | 499 // greedy: the number of LifetimePositions covered by this range. Used to |
| 481 // prioritize selecting live ranges for register assignment, as well as | 500 // prioritize selecting live ranges for register assignment, as well as |
| 482 // in weight calculations. | 501 // in weight calculations. |
| 483 int size_; | 502 int size_; |
| 484 | 503 |
| 485 // greedy: a metric for resolving conflicts between ranges with an assigned | 504 // greedy: a metric for resolving conflicts between ranges with an assigned |
| 486 // register and ranges that intersect them and need a register. | 505 // register and ranges that intersect them and need a register. |
| 487 float weight_; | 506 float weight_; |
| 507 |
| 508 // TODO(mtrofin): generalize spilling after definition, currently specialized |
| 509 // just for spill in a single deferred block. |
| 510 bool spilled_in_deferred_block_; |
| 488 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 511 DISALLOW_COPY_AND_ASSIGN(LiveRange); |
| 489 }; | 512 }; |
| 490 | 513 |
| 491 | 514 |
| 492 struct PrintableLiveRange { | 515 struct PrintableLiveRange { |
| 493 const RegisterConfiguration* register_configuration_; | 516 const RegisterConfiguration* register_configuration_; |
| 494 const LiveRange* range_; | 517 const LiveRange* range_; |
| 495 }; | 518 }; |
| 496 | 519 |
| 497 | 520 |
| (...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 936 RegisterAllocationData* const data_; | 959 RegisterAllocationData* const data_; |
| 937 | 960 |
| 938 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 961 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 939 }; | 962 }; |
| 940 | 963 |
| 941 } // namespace compiler | 964 } // namespace compiler |
| 942 } // namespace internal | 965 } // namespace internal |
| 943 } // namespace v8 | 966 } // namespace v8 |
| 944 | 967 |
| 945 #endif // V8_REGISTER_ALLOCATOR_H_ | 968 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |