Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(106)

Side by Side Diff: src/compiler/register-allocator.h

Issue 1242123006: [turbofan] Deferred block spilling heuristic - first step. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/move-optimizer.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_REGISTER_ALLOCATOR_H_ 5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_
7 7
8 #include "src/compiler/instruction.h" 8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h" 9 #include "src/ostreams.h"
10 #include "src/zone-containers.h" 10 #include "src/zone-containers.h"
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
327 } 327 }
328 328
329 // Returns use position in this live range that follows both start 329 // Returns use position in this live range that follows both start
330 // and last processed use position. 330 // and last processed use position.
331 UsePosition* NextUsePosition(LifetimePosition start) const; 331 UsePosition* NextUsePosition(LifetimePosition start) const;
332 332
333 // Returns use position for which register is required in this live 333 // Returns use position for which register is required in this live
334 // range and which follows both start and last processed use position 334 // range and which follows both start and last processed use position
335 UsePosition* NextRegisterPosition(LifetimePosition start) const; 335 UsePosition* NextRegisterPosition(LifetimePosition start) const;
336 336
337 // Returns the first use position requiring stack slot, or nullptr.
338 UsePosition* NextSlotPosition(LifetimePosition start) const;
339
337 // Returns use position for which register is beneficial in this live 340 // Returns use position for which register is beneficial in this live
338 // range and which follows both start and last processed use position 341 // range and which follows both start and last processed use position
339 UsePosition* NextUsePositionRegisterIsBeneficial( 342 UsePosition* NextUsePositionRegisterIsBeneficial(
340 LifetimePosition start) const; 343 LifetimePosition start) const;
341 344
342 // Returns use position for which register is beneficial in this live 345 // Returns use position for which register is beneficial in this live
343 // range and which precedes start. 346 // range and which precedes start.
344 UsePosition* PreviousUsePositionRegisterIsBeneficial( 347 UsePosition* PreviousUsePositionRegisterIsBeneficial(
345 LifetimePosition start) const; 348 LifetimePosition start) const;
346 349
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
394 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } 397 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
395 AllocatedOperand GetSpillRangeOperand() const; 398 AllocatedOperand GetSpillRangeOperand() const;
396 399
397 void SpillAtDefinition(Zone* zone, int gap_index, 400 void SpillAtDefinition(Zone* zone, int gap_index,
398 InstructionOperand* operand); 401 InstructionOperand* operand);
399 void SetSpillOperand(InstructionOperand* operand); 402 void SetSpillOperand(InstructionOperand* operand);
400 void SetSpillRange(SpillRange* spill_range); 403 void SetSpillRange(SpillRange* spill_range);
401 void CommitSpillsAtDefinition(InstructionSequence* sequence, 404 void CommitSpillsAtDefinition(InstructionSequence* sequence,
402 const InstructionOperand& operand, 405 const InstructionOperand& operand,
403 bool might_be_duplicated); 406 bool might_be_duplicated);
407 // This must be applied on top level ranges.
408 // If only one of the children(1) of this range is spilled, and the block is
409 // deferred(2), then we will spill in that block rather than at definition,
410 // to avoid the penalty of the spill operation.
411 // (1) if the top level were spilled, it means it is defined on the stack,
412 // so we wouldn't have had spilled it at definition in the first place. If
413 // more than one child is spilled, we would also need to change how we handle
414 // traced references, and we'd need to ensure the first spill dominates the
415 // rest.
416 // (2) if the block is not deferred, it may be on a hot path (or loop), case
417 // in which it may be worse to spill in it.
418 bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
419 const InstructionOperand& spill_operand);
404 420
405 void SetSpillStartIndex(int start) { 421 void SetSpillStartIndex(int start) {
406 spill_start_index_ = Min(start, spill_start_index_); 422 spill_start_index_ = Min(start, spill_start_index_);
407 } 423 }
408 424
409 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 425 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
410 bool CanCover(LifetimePosition position) const; 426 bool CanCover(LifetimePosition position) const;
411 bool Covers(LifetimePosition position) const; 427 bool Covers(LifetimePosition position) const;
412 LifetimePosition FirstIntersection(LiveRange* other) const; 428 LifetimePosition FirstIntersection(LiveRange* other) const;
413 429
(...skipping 12 matching lines...) Expand all
426 void SetUseHints(int register_index); 442 void SetUseHints(int register_index);
427 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } 443 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
428 444
429 struct SpillAtDefinitionList; 445 struct SpillAtDefinitionList;
430 446
431 SpillAtDefinitionList* spills_at_definition() const { 447 SpillAtDefinitionList* spills_at_definition() const {
432 return spills_at_definition_; 448 return spills_at_definition_;
433 } 449 }
434 450
435 // Used solely by the Greedy Allocator: 451 // Used solely by the Greedy Allocator:
436 unsigned GetSize(); 452 unsigned size() const { return size_; }
453 bool IsSizeValid() const { return size() != kInvalidSize; }
454 void UpdateSize();
437 float weight() const { return weight_; } 455 float weight() const { return weight_; }
438 void set_weight(float weight) { weight_ = weight; } 456 void set_weight(float weight) { weight_ = weight; }
439 457
458 bool IsSpilledInSingleDeferredBlock() const {
459 return spilled_in_deferred_block_;
460 }
461
440 static const int kInvalidSize = -1; 462 static const int kInvalidSize = -1;
441 static const float kInvalidWeight; 463 static const float kInvalidWeight;
442 static const float kMaxWeight; 464 static const float kMaxWeight;
443 465
444 private: 466 private:
445 void set_spill_type(SpillType value) { 467 void set_spill_type(SpillType value) {
446 bits_ = SpillTypeField::update(bits_, value); 468 bits_ = SpillTypeField::update(bits_, value);
447 } 469 }
448 470
449 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } 471 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
482 mutable UsePosition* current_hint_position_; 504 mutable UsePosition* current_hint_position_;
483 505
484 // greedy: the number of LifetimePositions covered by this range. Used to 506 // greedy: the number of LifetimePositions covered by this range. Used to
485 // prioritize selecting live ranges for register assignment, as well as 507 // prioritize selecting live ranges for register assignment, as well as
486 // in weight calculations. 508 // in weight calculations.
487 int size_; 509 int size_;
488 510
489 // greedy: a metric for resolving conflicts between ranges with an assigned 511 // greedy: a metric for resolving conflicts between ranges with an assigned
490 // register and ranges that intersect them and need a register. 512 // register and ranges that intersect them and need a register.
491 float weight_; 513 float weight_;
514
515 // TODO(mtrofin): generalize spilling after definition, currently specialized
516 // just for spill in a single deferred block.
517 bool spilled_in_deferred_block_;
492 DISALLOW_COPY_AND_ASSIGN(LiveRange); 518 DISALLOW_COPY_AND_ASSIGN(LiveRange);
493 }; 519 };
494 520
495 521
496 struct PrintableLiveRange { 522 struct PrintableLiveRange {
497 const RegisterConfiguration* register_configuration_; 523 const RegisterConfiguration* register_configuration_;
498 const LiveRange* range_; 524 const LiveRange* range_;
499 }; 525 };
500 526
501 527
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after
940 RegisterAllocationData* const data_; 966 RegisterAllocationData* const data_;
941 967
942 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 968 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
943 }; 969 };
944 970
945 } // namespace compiler 971 } // namespace compiler
946 } // namespace internal 972 } // namespace internal
947 } // namespace v8 973 } // namespace v8
948 974
949 #endif // V8_REGISTER_ALLOCATOR_H_ 975 #endif // V8_REGISTER_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « src/compiler/move-optimizer.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698