OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
7 | 7 |
8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
327 } | 327 } |
328 | 328 |
329 // Returns use position in this live range that follows both start | 329 // Returns use position in this live range that follows both start |
330 // and last processed use position. | 330 // and last processed use position. |
331 UsePosition* NextUsePosition(LifetimePosition start) const; | 331 UsePosition* NextUsePosition(LifetimePosition start) const; |
332 | 332 |
333 // Returns use position for which register is required in this live | 333 // Returns use position for which register is required in this live |
334 // range and which follows both start and last processed use position | 334 // range and which follows both start and last processed use position |
335 UsePosition* NextRegisterPosition(LifetimePosition start) const; | 335 UsePosition* NextRegisterPosition(LifetimePosition start) const; |
336 | 336 |
337 // Returns the first use position requiring stack slot, or nullptr. | |
338 UsePosition* NextSlotPosition(LifetimePosition start) const; | |
339 | |
337 // Returns use position for which register is beneficial in this live | 340 // Returns use position for which register is beneficial in this live |
338 // range and which follows both start and last processed use position | 341 // range and which follows both start and last processed use position |
339 UsePosition* NextUsePositionRegisterIsBeneficial( | 342 UsePosition* NextUsePositionRegisterIsBeneficial( |
340 LifetimePosition start) const; | 343 LifetimePosition start) const; |
341 | 344 |
342 // Returns use position for which register is beneficial in this live | 345 // Returns use position for which register is beneficial in this live |
343 // range and which precedes start. | 346 // range and which precedes start. |
344 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 347 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
345 LifetimePosition start) const; | 348 LifetimePosition start) const; |
346 | 349 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
394 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } | 397 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } |
395 AllocatedOperand GetSpillRangeOperand() const; | 398 AllocatedOperand GetSpillRangeOperand() const; |
396 | 399 |
397 void SpillAtDefinition(Zone* zone, int gap_index, | 400 void SpillAtDefinition(Zone* zone, int gap_index, |
398 InstructionOperand* operand); | 401 InstructionOperand* operand); |
399 void SetSpillOperand(InstructionOperand* operand); | 402 void SetSpillOperand(InstructionOperand* operand); |
400 void SetSpillRange(SpillRange* spill_range); | 403 void SetSpillRange(SpillRange* spill_range); |
401 void CommitSpillsAtDefinition(InstructionSequence* sequence, | 404 void CommitSpillsAtDefinition(InstructionSequence* sequence, |
402 const InstructionOperand& operand, | 405 const InstructionOperand& operand, |
403 bool might_be_duplicated); | 406 bool might_be_duplicated); |
407 // This must be applied on top level ranges. | |
408 // If only one of the children(1) of this range is spilled, and the block is | |
409 // deferred(2), then we will spill in that block rather than at definition, | |
410 // to avoid the penalty of the spill operation. | |
411 // (1) if the top level were spilled, it means it is defined on the stack, | |
412 // so we wouldn't have had spilled it at definition in the first place. If | |
413 // more than one child is spilled, we would also need to change how we handle | |
414 // traced references, and we'd need to ensure the first spill dominates the | |
415 // rest. | |
416 // (2) if the block is not deferred, it may be on a hot path (or loop), case | |
417 // in which it may be worse to spill in it. | |
418 bool TryCommitSpillInDeferredBlock(InstructionSequence* code, | |
419 const InstructionOperand& spill_operand); | |
404 | 420 |
405 void SetSpillStartIndex(int start) { | 421 void SetSpillStartIndex(int start) { |
406 spill_start_index_ = Min(start, spill_start_index_); | 422 spill_start_index_ = Min(start, spill_start_index_); |
407 } | 423 } |
408 | 424 |
409 bool ShouldBeAllocatedBefore(const LiveRange* other) const; | 425 bool ShouldBeAllocatedBefore(const LiveRange* other) const; |
410 bool CanCover(LifetimePosition position) const; | 426 bool CanCover(LifetimePosition position) const; |
411 bool Covers(LifetimePosition position) const; | 427 bool Covers(LifetimePosition position) const; |
412 LifetimePosition FirstIntersection(LiveRange* other) const; | 428 LifetimePosition FirstIntersection(LiveRange* other) const; |
413 | 429 |
(...skipping 16 matching lines...) Expand all Loading... | |
430 | 446 |
431 SpillAtDefinitionList* spills_at_definition() const { | 447 SpillAtDefinitionList* spills_at_definition() const { |
432 return spills_at_definition_; | 448 return spills_at_definition_; |
433 } | 449 } |
434 | 450 |
435 // Used solely by the Greedy Allocator: | 451 // Used solely by the Greedy Allocator: |
436 unsigned GetSize(); | 452 unsigned GetSize(); |
437 float weight() const { return weight_; } | 453 float weight() const { return weight_; } |
438 void set_weight(float weight) { weight_ = weight; } | 454 void set_weight(float weight) { weight_ = weight; } |
439 | 455 |
456 bool IsSpilledInSingleDeferredBlock() const { | |
Jarin
2015/08/04 19:39:43
I am really confused about this. The name refers t
Mircea Trofin
2015/08/04 20:34:46
Sorry - the name was a point in time :) It's not f
| |
457 return spilled_in_deferred_block_; | |
458 } | |
459 | |
440 static const int kInvalidSize = -1; | 460 static const int kInvalidSize = -1; |
441 static const float kInvalidWeight; | 461 static const float kInvalidWeight; |
442 static const float kMaxWeight; | 462 static const float kMaxWeight; |
443 | 463 |
444 private: | 464 private: |
445 void set_spill_type(SpillType value) { | 465 void set_spill_type(SpillType value) { |
446 bits_ = SpillTypeField::update(bits_, value); | 466 bits_ = SpillTypeField::update(bits_, value); |
447 } | 467 } |
448 | 468 |
449 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | 469 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
482 mutable UsePosition* current_hint_position_; | 502 mutable UsePosition* current_hint_position_; |
483 | 503 |
484 // greedy: the number of LifetimePositions covered by this range. Used to | 504 // greedy: the number of LifetimePositions covered by this range. Used to |
485 // prioritize selecting live ranges for register assignment, as well as | 505 // prioritize selecting live ranges for register assignment, as well as |
486 // in weight calculations. | 506 // in weight calculations. |
487 int size_; | 507 int size_; |
488 | 508 |
489 // greedy: a metric for resolving conflicts between ranges with an assigned | 509 // greedy: a metric for resolving conflicts between ranges with an assigned |
490 // register and ranges that intersect them and need a register. | 510 // register and ranges that intersect them and need a register. |
491 float weight_; | 511 float weight_; |
512 | |
513 // TODO(mtrofin): generalize spilling after definition, currently specialized | |
514 // just for spill in a single deferred block. | |
515 bool spilled_in_deferred_block_; | |
492 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 516 DISALLOW_COPY_AND_ASSIGN(LiveRange); |
493 }; | 517 }; |
494 | 518 |
495 | 519 |
496 struct PrintableLiveRange { | 520 struct PrintableLiveRange { |
497 const RegisterConfiguration* register_configuration_; | 521 const RegisterConfiguration* register_configuration_; |
498 const LiveRange* range_; | 522 const LiveRange* range_; |
499 }; | 523 }; |
500 | 524 |
501 | 525 |
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
940 RegisterAllocationData* const data_; | 964 RegisterAllocationData* const data_; |
941 | 965 |
942 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 966 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
943 }; | 967 }; |
944 | 968 |
945 } // namespace compiler | 969 } // namespace compiler |
946 } // namespace internal | 970 } // namespace internal |
947 } // namespace v8 | 971 } // namespace v8 |
948 | 972 |
949 #endif // V8_REGISTER_ALLOCATOR_H_ | 973 #endif // V8_REGISTER_ALLOCATOR_H_ |
OLD | NEW |