Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(111)

Side by Side Diff: src/compiler/register-allocator.h

Issue 1271703002: [turbofan] Stand-alone deferred block splitting - full. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/preprocess-live-ranges.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_REGISTER_ALLOCATOR_H_ 5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_
7 7
8 #include "src/compiler/instruction.h" 8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h" 9 #include "src/ostreams.h"
10 #include "src/zone-containers.h" 10 #include "src/zone-containers.h"
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
327 } 327 }
328 328
329 // Returns use position in this live range that follows both start 329 // Returns use position in this live range that follows both start
330 // and last processed use position. 330 // and last processed use position.
331 UsePosition* NextUsePosition(LifetimePosition start) const; 331 UsePosition* NextUsePosition(LifetimePosition start) const;
332 332
333 // Returns use position for which register is required in this live 333 // Returns use position for which register is required in this live
334 // range and which follows both start and last processed use position 334 // range and which follows both start and last processed use position
335 UsePosition* NextRegisterPosition(LifetimePosition start) const; 335 UsePosition* NextRegisterPosition(LifetimePosition start) const;
336 336
337 // Returns the first use position requiring stack slot, or nullptr.
338 UsePosition* NextSlotPosition(LifetimePosition start) const;
339
337 // Returns use position for which register is beneficial in this live 340 // Returns use position for which register is beneficial in this live
338 // range and which follows both start and last processed use position 341 // range and which follows both start and last processed use position
339 UsePosition* NextUsePositionRegisterIsBeneficial( 342 UsePosition* NextUsePositionRegisterIsBeneficial(
340 LifetimePosition start) const; 343 LifetimePosition start) const;
341 344
342 // Returns use position for which register is beneficial in this live 345 // Returns use position for which register is beneficial in this live
343 // range and which precedes start. 346 // range and which precedes start.
344 UsePosition* PreviousUsePositionRegisterIsBeneficial( 347 UsePosition* PreviousUsePositionRegisterIsBeneficial(
345 LifetimePosition start) const; 348 LifetimePosition start) const;
346 349
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
394 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } 397 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
395 AllocatedOperand GetSpillRangeOperand() const; 398 AllocatedOperand GetSpillRangeOperand() const;
396 399
397 void SpillAtDefinition(Zone* zone, int gap_index, 400 void SpillAtDefinition(Zone* zone, int gap_index,
398 InstructionOperand* operand); 401 InstructionOperand* operand);
399 void SetSpillOperand(InstructionOperand* operand); 402 void SetSpillOperand(InstructionOperand* operand);
400 void SetSpillRange(SpillRange* spill_range); 403 void SetSpillRange(SpillRange* spill_range);
401 void CommitSpillsAtDefinition(InstructionSequence* sequence, 404 void CommitSpillsAtDefinition(InstructionSequence* sequence,
402 const InstructionOperand& operand, 405 const InstructionOperand& operand,
403 bool might_be_duplicated); 406 bool might_be_duplicated);
407 // This must be applied on top level ranges.
408 // If all the children of this range are spilled in deferred blocks, and if
409 // for any non-spilled child with a use position requiring a slot, that range
410 // is contained in a deferred block, mark the range as
411 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
412 // and instead let the LiveRangeConnector perform the spills within the
413 // deferred blocks. If so, we insert here spills for non-spilled ranges
414 // with slot use positions.
415 bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
416 const InstructionOperand& spill_operand);
404 417
405 void SetSpillStartIndex(int start) { 418 void SetSpillStartIndex(int start) {
406 spill_start_index_ = Min(start, spill_start_index_); 419 spill_start_index_ = Min(start, spill_start_index_);
407 } 420 }
408 421
409 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 422 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
410 bool CanCover(LifetimePosition position) const; 423 bool CanCover(LifetimePosition position) const;
411 bool Covers(LifetimePosition position) const; 424 bool Covers(LifetimePosition position) const;
412 LifetimePosition FirstIntersection(LiveRange* other) const; 425 LifetimePosition FirstIntersection(LiveRange* other) const;
413 426
(...skipping 16 matching lines...) Expand all
430 443
431 SpillAtDefinitionList* spills_at_definition() const { 444 SpillAtDefinitionList* spills_at_definition() const {
432 return spills_at_definition_; 445 return spills_at_definition_;
433 } 446 }
434 447
435 // Used solely by the Greedy Allocator: 448 // Used solely by the Greedy Allocator:
436 unsigned GetSize(); 449 unsigned GetSize();
437 float weight() const { return weight_; } 450 float weight() const { return weight_; }
438 void set_weight(float weight) { weight_ = weight; } 451 void set_weight(float weight) { weight_ = weight; }
439 452
453 bool IsSpilledOnlyInDeferredBlocks() const {
454 return spilled_in_deferred_block_;
455 }
456
440 static const int kInvalidSize = -1; 457 static const int kInvalidSize = -1;
441 static const float kInvalidWeight; 458 static const float kInvalidWeight;
442 static const float kMaxWeight; 459 static const float kMaxWeight;
443 460
444 private: 461 private:
445 void set_spill_type(SpillType value) { 462 void set_spill_type(SpillType value) {
446 bits_ = SpillTypeField::update(bits_, value); 463 bits_ = SpillTypeField::update(bits_, value);
447 } 464 }
448 465
449 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } 466 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
482 mutable UsePosition* current_hint_position_; 499 mutable UsePosition* current_hint_position_;
483 500
484 // greedy: the number of LifetimePositions covered by this range. Used to 501 // greedy: the number of LifetimePositions covered by this range. Used to
485 // prioritize selecting live ranges for register assignment, as well as 502 // prioritize selecting live ranges for register assignment, as well as
486 // in weight calculations. 503 // in weight calculations.
487 int size_; 504 int size_;
488 505
489 // greedy: a metric for resolving conflicts between ranges with an assigned 506 // greedy: a metric for resolving conflicts between ranges with an assigned
490 // register and ranges that intersect them and need a register. 507 // register and ranges that intersect them and need a register.
491 float weight_; 508 float weight_;
509
510 // TODO(mtrofin): generalize spilling after definition, currently specialized
511 // just for spill in a single deferred block.
512 bool spilled_in_deferred_block_;
492 DISALLOW_COPY_AND_ASSIGN(LiveRange); 513 DISALLOW_COPY_AND_ASSIGN(LiveRange);
493 }; 514 };
494 515
495 516
496 struct PrintableLiveRange { 517 struct PrintableLiveRange {
497 const RegisterConfiguration* register_configuration_; 518 const RegisterConfiguration* register_configuration_;
498 const LiveRange* range_; 519 const LiveRange* range_;
499 }; 520 };
500 521
501 522
(...skipping 413 matching lines...) Expand 10 before | Expand all | Expand 10 after
915 RegisterAllocationData* data() const { return data_; } 936 RegisterAllocationData* data() const { return data_; }
916 937
917 bool SafePointsAreInOrder() const; 938 bool SafePointsAreInOrder() const;
918 939
919 RegisterAllocationData* const data_; 940 RegisterAllocationData* const data_;
920 941
921 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator); 942 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
922 }; 943 };
923 944
924 945
946 // Insert moves of the form
947 //
948 // Operand(child_(k+1)) = Operand(child_k)
949 //
950 // where child_k and child_(k+1) are consecutive children of a range (so
951 // child_k->next() == child_(k+1)), and Operand(...) refers to the
952 // assigned operand, be it a register or a slot.
925 class LiveRangeConnector final : public ZoneObject { 953 class LiveRangeConnector final : public ZoneObject {
926 public: 954 public:
927 explicit LiveRangeConnector(RegisterAllocationData* data); 955 explicit LiveRangeConnector(RegisterAllocationData* data);
928 956
929 // Phase 8: reconnect split ranges with moves. 957 // Phase 8: reconnect split ranges with moves, when the control flow
958 // between the ranges is trivial (no branches).
930 void ConnectRanges(Zone* local_zone); 959 void ConnectRanges(Zone* local_zone);
931 960
932 // Phase 9: insert moves to connect ranges across basic blocks. 961 // Phase 9: insert moves to connect ranges across basic blocks, when the
962 // control flow between them cannot be trivially resolved, such as joining
963 // branches.
933 void ResolveControlFlow(Zone* local_zone); 964 void ResolveControlFlow(Zone* local_zone);
934 965
935 private: 966 private:
936 RegisterAllocationData* data() const { return data_; } 967 RegisterAllocationData* data() const { return data_; }
937 InstructionSequence* code() const { return data()->code(); } 968 InstructionSequence* code() const { return data()->code(); }
938 Zone* code_zone() const { return code()->zone(); } 969 Zone* code_zone() const { return code()->zone(); }
939 970
940 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const; 971 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
972
941 void ResolveControlFlow(const InstructionBlock* block, 973 void ResolveControlFlow(const InstructionBlock* block,
942 const InstructionOperand& cur_op, 974 const InstructionOperand& cur_op,
943 const InstructionBlock* pred, 975 const InstructionBlock* pred,
944 const InstructionOperand& pred_op); 976 const InstructionOperand& pred_op);
945 977
946 RegisterAllocationData* const data_; 978 RegisterAllocationData* const data_;
947 979
948 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 980 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
949 }; 981 };
950 982
951 } // namespace compiler 983 } // namespace compiler
952 } // namespace internal 984 } // namespace internal
953 } // namespace v8 985 } // namespace v8
954 986
955 #endif // V8_REGISTER_ALLOCATOR_H_ 987 #endif // V8_REGISTER_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « src/compiler/preprocess-live-ranges.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698