Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/ostreams.h" | 9 #include "src/ostreams.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 267 UsePosition* next_; | 267 UsePosition* next_; |
| 268 LifetimePosition const pos_; | 268 LifetimePosition const pos_; |
| 269 uint32_t flags_; | 269 uint32_t flags_; |
| 270 | 270 |
| 271 DISALLOW_COPY_AND_ASSIGN(UsePosition); | 271 DISALLOW_COPY_AND_ASSIGN(UsePosition); |
| 272 }; | 272 }; |
| 273 | 273 |
| 274 | 274 |
| 275 class SpillRange; | 275 class SpillRange; |
| 276 class RegisterAllocationData; | 276 class RegisterAllocationData; |
| 277 class TopLevelLiveRange; | |
| 277 | 278 |
| 278 // Representation of SSA values' live ranges as a collection of (continuous) | 279 // Representation of SSA values' live ranges as a collection of (continuous) |
| 279 // intervals over the instruction ordering. | 280 // intervals over the instruction ordering. |
| 280 class LiveRange final : public ZoneObject { | 281 class LiveRange : public ZoneObject { |
| 281 public: | 282 public: |
| 282 explicit LiveRange(int id, MachineType machine_type); | |
| 283 | |
| 284 UseInterval* first_interval() const { return first_interval_; } | 283 UseInterval* first_interval() const { return first_interval_; } |
| 285 UsePosition* first_pos() const { return first_pos_; } | 284 UsePosition* first_pos() const { return first_pos_; } |
| 286 LiveRange* parent() const { return parent_; } | 285 TopLevelLiveRange* TopLevel() { return parent_; } |
|
Benedikt Meurer
2015/08/26 04:31:20
parent_ should be renamed to toplevel_ (or top_lev
Mircea Trofin
2015/08/26 05:00:13
Done.
| |
| 287 LiveRange* TopLevel() { return (parent_ == nullptr) ? this : parent_; } | 286 const TopLevelLiveRange* TopLevel() const { return parent_; } |
| 288 const LiveRange* TopLevel() const { | 287 |
| 289 return (parent_ == nullptr) ? this : parent_; | 288 bool IsTopLevel() const { |
| 289 return reinterpret_cast<LiveRange*>(parent_) == this; | |
|
Benedikt Meurer
2015/08/26 04:31:20
The reinterpret_cast is unnecessary (and dangerous
Mircea Trofin
2015/08/26 05:00:13
Done.
| |
| 290 } | 290 } |
| 291 LiveRange* next() const { return next_; } | 291 LiveRange* next() const { return next_; } |
| 292 bool IsChild() const { return parent() != nullptr; } | 292 |
| 293 int id() const { return id_; } | 293 int relative_id() const { return relative_id_; } |
| 294 bool IsFixed() const { return id_ < 0; } | 294 |
| 295 bool IsEmpty() const { return first_interval() == nullptr; } | 295 bool IsEmpty() const { return first_interval() == nullptr; } |
| 296 | |
| 296 InstructionOperand GetAssignedOperand() const; | 297 InstructionOperand GetAssignedOperand() const; |
| 297 int spill_start_index() const { return spill_start_index_; } | |
| 298 | 298 |
| 299 MachineType machine_type() const { return MachineTypeField::decode(bits_); } | 299 MachineType machine_type() const { return MachineTypeField::decode(bits_); } |
| 300 | 300 |
| 301 int assigned_register() const { return AssignedRegisterField::decode(bits_); } | 301 int assigned_register() const { return AssignedRegisterField::decode(bits_); } |
| 302 bool HasRegisterAssigned() const { | 302 bool HasRegisterAssigned() const { |
| 303 return assigned_register() != kUnassignedRegister; | 303 return assigned_register() != kUnassignedRegister; |
| 304 } | 304 } |
| 305 void set_assigned_register(int reg); | 305 void set_assigned_register(int reg); |
| 306 void UnsetAssignedRegister(); | 306 void UnsetAssignedRegister(); |
| 307 | 307 |
| 308 bool spilled() const { return SpilledField::decode(bits_); } | 308 bool spilled() const { return SpilledField::decode(bits_); } |
| 309 void Spill(); | 309 void Spill(); |
| 310 | 310 |
| 311 RegisterKind kind() const; | 311 RegisterKind kind() const; |
| 312 | 312 |
| 313 // Correct only for parent. | |
| 314 bool is_phi() const { return IsPhiField::decode(bits_); } | |
| 315 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); } | |
| 316 | |
| 317 // Correct only for parent. | |
| 318 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); } | |
| 319 void set_is_non_loop_phi(bool value) { | |
| 320 bits_ = IsNonLoopPhiField::update(bits_, value); | |
| 321 } | |
| 322 | |
| 323 // Relevant only for parent. | |
| 324 bool has_slot_use() const { return HasSlotUseField::decode(bits_); } | |
| 325 void set_has_slot_use(bool value) { | |
| 326 bits_ = HasSlotUseField::update(bits_, value); | |
| 327 } | |
| 328 | |
| 329 // Returns use position in this live range that follows both start | 313 // Returns use position in this live range that follows both start |
| 330 // and last processed use position. | 314 // and last processed use position. |
| 331 UsePosition* NextUsePosition(LifetimePosition start) const; | 315 UsePosition* NextUsePosition(LifetimePosition start) const; |
| 332 | 316 |
| 333 // Returns use position for which register is required in this live | 317 // Returns use position for which register is required in this live |
| 334 // range and which follows both start and last processed use position | 318 // range and which follows both start and last processed use position |
| 335 UsePosition* NextRegisterPosition(LifetimePosition start) const; | 319 UsePosition* NextRegisterPosition(LifetimePosition start) const; |
| 336 | 320 |
| 337 // Returns the first use position requiring stack slot, or nullptr. | 321 // Returns the first use position requiring stack slot, or nullptr. |
| 338 UsePosition* NextSlotPosition(LifetimePosition start) const; | 322 UsePosition* NextSlotPosition(LifetimePosition start) const; |
| 339 | 323 |
| 340 // Returns use position for which register is beneficial in this live | 324 // Returns use position for which register is beneficial in this live |
| 341 // range and which follows both start and last processed use position | 325 // range and which follows both start and last processed use position |
| 342 UsePosition* NextUsePositionRegisterIsBeneficial( | 326 UsePosition* NextUsePositionRegisterIsBeneficial( |
| 343 LifetimePosition start) const; | 327 LifetimePosition start) const; |
| 344 | 328 |
| 345 // Returns use position for which register is beneficial in this live | 329 // Returns use position for which register is beneficial in this live |
| 346 // range and which precedes start. | 330 // range and which precedes start. |
| 347 UsePosition* PreviousUsePositionRegisterIsBeneficial( | 331 UsePosition* PreviousUsePositionRegisterIsBeneficial( |
| 348 LifetimePosition start) const; | 332 LifetimePosition start) const; |
| 349 | 333 |
| 350 // Can this live range be spilled at this position. | 334 // Can this live range be spilled at this position. |
| 351 bool CanBeSpilled(LifetimePosition pos) const; | 335 bool CanBeSpilled(LifetimePosition pos) const; |
| 352 | 336 |
| 353 // Split this live range at the given position which must follow the start of | 337 // Splitting primitive used by both splitting and splintering members. |
| 354 // the range. | 338 // Performs the split, but does not link the resulting ranges. |
| 339 // The given position must follow the start of the range. | |
| 355 // All uses following the given position will be moved from this | 340 // All uses following the given position will be moved from this |
| 356 // live range to the result live range. | 341 // live range to the result live range. |
| 357 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); | 342 // The current range will terminate at position, while result will start from |
| 358 void Splinter(LifetimePosition start, LifetimePosition end, LiveRange* result, | 343 // position. |
| 359 Zone* zone); | 344 void DetachAt(LifetimePosition position, LiveRange* result, Zone* zone); |
| 360 void Merge(LiveRange* other, RegisterAllocationData* data); | 345 |
| 346 // Detaches at position, and then links the resulting ranges. Returns the | |
| 347 // child, which starts at position. | |
| 348 LiveRange* SplitAt(LifetimePosition position, Zone* zone); | |
| 361 | 349 |
| 362 // Returns nullptr when no register is hinted, otherwise sets register_index. | 350 // Returns nullptr when no register is hinted, otherwise sets register_index. |
| 363 UsePosition* FirstHintPosition(int* register_index) const; | 351 UsePosition* FirstHintPosition(int* register_index) const; |
| 364 UsePosition* FirstHintPosition() const { | 352 UsePosition* FirstHintPosition() const { |
| 365 int register_index; | 353 int register_index; |
| 366 return FirstHintPosition(®ister_index); | 354 return FirstHintPosition(®ister_index); |
| 367 } | 355 } |
| 368 | 356 |
| 369 UsePosition* current_hint_position() const { | 357 UsePosition* current_hint_position() const { |
| 370 DCHECK(current_hint_position_ == FirstHintPosition()); | 358 DCHECK(current_hint_position_ == FirstHintPosition()); |
| 371 return current_hint_position_; | 359 return current_hint_position_; |
| 372 } | 360 } |
| 373 | 361 |
| 374 LifetimePosition Start() const { | 362 LifetimePosition Start() const { |
| 375 DCHECK(!IsEmpty()); | 363 DCHECK(!IsEmpty()); |
| 376 return first_interval()->start(); | 364 return first_interval()->start(); |
| 377 } | 365 } |
| 378 | 366 |
| 379 LifetimePosition End() const { | 367 LifetimePosition End() const { |
| 380 DCHECK(!IsEmpty()); | 368 DCHECK(!IsEmpty()); |
| 381 return last_interval_->end(); | 369 return last_interval_->end(); |
| 382 } | 370 } |
| 383 | 371 |
| 372 bool ShouldBeAllocatedBefore(const LiveRange* other) const; | |
| 373 bool CanCover(LifetimePosition position) const; | |
| 374 bool Covers(LifetimePosition position) const; | |
| 375 LifetimePosition FirstIntersection(LiveRange* other) const; | |
| 376 | |
| 377 void Verify() const; | |
| 378 | |
| 379 void ConvertUsesToOperand(const InstructionOperand& op, | |
| 380 const InstructionOperand& spill_op); | |
| 381 void SetUseHints(int register_index); | |
| 382 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } | |
| 383 | |
| 384 // Used solely by the Greedy Allocator: | |
| 385 unsigned GetSize(); | |
| 386 float weight() const { return weight_; } | |
| 387 void set_weight(float weight) { weight_ = weight; } | |
| 388 | |
| 389 static const int kInvalidSize = -1; | |
| 390 static const float kInvalidWeight; | |
| 391 static const float kMaxWeight; | |
| 392 | |
| 393 private: | |
| 394 friend class TopLevelLiveRange; | |
| 395 explicit LiveRange(int relative_id, MachineType machine_type, | |
| 396 TopLevelLiveRange* parent); | |
| 397 | |
| 398 void AppendAsChild(TopLevelLiveRange* other); | |
| 399 void UpdateParentForAllChildren(TopLevelLiveRange* new_parent); | |
| 400 | |
| 401 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | |
| 402 | |
| 403 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | |
| 404 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | |
| 405 LifetimePosition but_not_past) const; | |
| 406 | |
| 407 typedef BitField<bool, 0, 1> SpilledField; | |
| 408 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | |
| 409 typedef BitField<MachineType, 12, 15> MachineTypeField; | |
| 410 | |
| 411 int relative_id_; | |
| 412 uint32_t bits_; | |
| 413 UseInterval* last_interval_; | |
| 414 UseInterval* first_interval_; | |
| 415 UsePosition* first_pos_; | |
| 416 TopLevelLiveRange* parent_; | |
| 417 LiveRange* next_; | |
| 418 // This is used as a cache, it doesn't affect correctness. | |
| 419 mutable UseInterval* current_interval_; | |
| 420 // This is used as a cache, it doesn't affect correctness. | |
| 421 mutable UsePosition* last_processed_use_; | |
| 422 // This is used as a cache, it's invalid outside of BuildLiveRanges. | |
| 423 mutable UsePosition* current_hint_position_; | |
| 424 | |
| 425 // greedy: the number of LifetimePositions covered by this range. Used to | |
| 426 // prioritize selecting live ranges for register assignment, as well as | |
| 427 // in weight calculations. | |
| 428 int size_; | |
| 429 | |
| 430 // greedy: a metric for resolving conflicts between ranges with an assigned | |
| 431 // register and ranges that intersect them and need a register. | |
| 432 float weight_; | |
| 433 | |
| 434 DISALLOW_COPY_AND_ASSIGN(LiveRange); | |
| 435 }; | |
| 436 | |
| 437 | |
| 438 class TopLevelLiveRange final : public LiveRange { | |
| 439 public: | |
| 440 explicit TopLevelLiveRange(int vreg, MachineType machine_type); | |
| 441 int spill_start_index() const { return spill_start_index_; } | |
| 442 | |
| 443 bool IsFixed() const { return vreg_ < 0; } | |
| 444 | |
| 445 bool is_phi() const { return IsPhiField::decode(bits_); } | |
| 446 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); } | |
| 447 | |
| 448 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); } | |
| 449 void set_is_non_loop_phi(bool value) { | |
| 450 bits_ = IsNonLoopPhiField::update(bits_, value); | |
| 451 } | |
| 452 | |
| 453 bool has_slot_use() const { return HasSlotUseField::decode(bits_); } | |
| 454 void set_has_slot_use(bool value) { | |
| 455 bits_ = HasSlotUseField::update(bits_, value); | |
| 456 } | |
| 457 | |
| 458 // Add a new interval or a new use position to this live range. | |
| 459 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone); | |
| 460 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); | |
| 461 void AddUsePosition(UsePosition* pos); | |
| 462 | |
| 463 // Shorten the most recently added interval by setting a new start. | |
| 464 void ShortenTo(LifetimePosition start); | |
| 465 | |
| 466 // Detaches between start and end, and attributes the resulting range to | |
| 467 // result. | |
| 468 // The current range is pointed to as "splintered_from". No parent/child | |
| 469 // relationship is established between this and result. | |
| 470 void Splinter(LifetimePosition start, LifetimePosition end, | |
| 471 TopLevelLiveRange* result, Zone* zone); | |
| 472 | |
| 473 // Assuming other was splintered from this range, embeds other and its | |
| 474 // children as part of the children sequence of this range. | |
| 475 void Merge(TopLevelLiveRange* other, RegisterAllocationData* data); | |
| 476 | |
| 477 // Spill range management. | |
| 478 void SetSpillRange(SpillRange* spill_range); | |
| 384 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; | 479 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; |
| 480 void set_spill_type(SpillType value) { | |
| 481 bits_ = SpillTypeField::update(bits_, value); | |
| 482 } | |
| 385 SpillType spill_type() const { return SpillTypeField::decode(bits_); } | 483 SpillType spill_type() const { return SpillTypeField::decode(bits_); } |
| 386 InstructionOperand* GetSpillOperand() const { | 484 InstructionOperand* GetSpillOperand() const { |
| 387 DCHECK(spill_type() == SpillType::kSpillOperand); | 485 DCHECK(spill_type() == SpillType::kSpillOperand); |
| 388 return spill_operand_; | 486 return spill_operand_; |
| 389 } | 487 } |
| 390 | 488 |
| 391 SpillRange* GetAllocatedSpillRange() const { | 489 SpillRange* GetAllocatedSpillRange() const { |
| 392 DCHECK(spill_type() != SpillType::kSpillOperand); | 490 DCHECK(spill_type() != SpillType::kSpillOperand); |
| 393 return spill_range_; | 491 return spill_range_; |
| 394 } | 492 } |
| 395 | 493 |
| 396 SpillRange* GetSpillRange() const { | 494 SpillRange* GetSpillRange() const { |
| 397 DCHECK(spill_type() == SpillType::kSpillRange); | 495 DCHECK(spill_type() == SpillType::kSpillRange); |
| 398 return spill_range_; | 496 return spill_range_; |
| 399 } | 497 } |
| 400 bool HasNoSpillType() const { | 498 bool HasNoSpillType() const { |
| 401 return spill_type() == SpillType::kNoSpillType; | 499 return spill_type() == SpillType::kNoSpillType; |
| 402 } | 500 } |
| 403 bool HasSpillOperand() const { | 501 bool HasSpillOperand() const { |
| 404 return spill_type() == SpillType::kSpillOperand; | 502 return spill_type() == SpillType::kSpillOperand; |
| 405 } | 503 } |
| 406 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } | 504 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } |
| 407 bool MayRequireSpillRange() const { | |
| 408 DCHECK(!IsChild() && !IsSplinter()); | |
| 409 return !HasSpillOperand() && spill_range_ == nullptr; | |
| 410 } | |
| 411 | 505 |
| 412 AllocatedOperand GetSpillRangeOperand() const; | 506 AllocatedOperand GetSpillRangeOperand() const; |
| 413 | 507 |
| 414 void SpillAtDefinition(Zone* zone, int gap_index, | 508 void SpillAtDefinition(Zone* zone, int gap_index, |
| 415 InstructionOperand* operand); | 509 InstructionOperand* operand); |
| 416 void SetSpillOperand(InstructionOperand* operand); | 510 void SetSpillOperand(InstructionOperand* operand); |
| 417 void SetSpillRange(SpillRange* spill_range); | 511 void SetSpillStartIndex(int start) { |
| 512 spill_start_index_ = Min(start, spill_start_index_); | |
| 513 } | |
| 514 | |
| 515 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); | |
| 418 void CommitSpillsAtDefinition(InstructionSequence* sequence, | 516 void CommitSpillsAtDefinition(InstructionSequence* sequence, |
| 419 const InstructionOperand& operand, | 517 const InstructionOperand& operand, |
| 420 bool might_be_duplicated); | 518 bool might_be_duplicated); |
| 421 // This must be applied on top level ranges. | 519 |
| 422 // If all the children of this range are spilled in deferred blocks, and if | 520 // If all the children of this range are spilled in deferred blocks, and if |
| 423 // for any non-spilled child with a use position requiring a slot, that range | 521 // for any non-spilled child with a use position requiring a slot, that range |
| 424 // is contained in a deferred block, mark the range as | 522 // is contained in a deferred block, mark the range as |
| 425 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, | 523 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, |
| 426 // and instead let the LiveRangeConnector perform the spills within the | 524 // and instead let the LiveRangeConnector perform the spills within the |
| 427 // deferred blocks. If so, we insert here spills for non-spilled ranges | 525 // deferred blocks. If so, we insert here spills for non-spilled ranges |
| 428 // with slot use positions. | 526 // with slot use positions. |
| 429 bool TryCommitSpillInDeferredBlock(InstructionSequence* code, | 527 bool TryCommitSpillInDeferredBlock(InstructionSequence* code, |
| 430 const InstructionOperand& spill_operand); | 528 const InstructionOperand& spill_operand); |
| 431 | 529 |
| 432 void SetSpillStartIndex(int start) { | 530 TopLevelLiveRange* splintered_from() const { return splintered_from_; } |
| 433 spill_start_index_ = Min(start, spill_start_index_); | 531 bool IsSplinter() const { return splintered_from_ != nullptr; } |
| 532 bool MayRequireSpillRange() const { | |
| 533 DCHECK(!IsSplinter()); | |
| 534 return !HasSpillOperand() && spill_range_ == nullptr; | |
| 434 } | 535 } |
| 536 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged); | |
| 537 int vreg() const { return vreg_; } | |
| 435 | 538 |
| 436 bool ShouldBeAllocatedBefore(const LiveRange* other) const; | 539 int GetNextChildId() { return ++last_child_id_; } |
| 437 bool CanCover(LifetimePosition position) const; | 540 bool IsSpilledOnlyInDeferredBlocks() const { |
| 438 bool Covers(LifetimePosition position) const; | 541 return spilled_in_deferred_blocks_; |
| 439 LifetimePosition FirstIntersection(LiveRange* other) const; | 542 } |
| 440 | |
| 441 // Add a new interval or a new use position to this live range. | |
| 442 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone); | |
| 443 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); | |
| 444 void AddUsePosition(UsePosition* pos); | |
| 445 | |
| 446 // Shorten the most recently added interval by setting a new start. | |
| 447 void ShortenTo(LifetimePosition start); | |
| 448 | |
| 449 void Verify() const; | |
| 450 | |
| 451 void ConvertUsesToOperand(const InstructionOperand& op, | |
| 452 const InstructionOperand& spill_op); | |
| 453 void SetUseHints(int register_index); | |
| 454 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } | |
| 455 | 543 |
| 456 struct SpillAtDefinitionList; | 544 struct SpillAtDefinitionList; |
| 457 | 545 |
| 458 SpillAtDefinitionList* spills_at_definition() const { | 546 SpillAtDefinitionList* spills_at_definition() const { |
| 459 return spills_at_definition_; | 547 return spills_at_definition_; |
| 460 } | 548 } |
| 461 | 549 |
| 462 // Used solely by the Greedy Allocator: | |
| 463 unsigned GetSize(); | |
| 464 float weight() const { return weight_; } | |
| 465 void set_weight(float weight) { weight_ = weight; } | |
| 466 | |
| 467 bool IsSpilledOnlyInDeferredBlocks() const { | |
| 468 return spilled_in_deferred_block_; | |
| 469 } | |
| 470 | |
| 471 static const int kInvalidSize = -1; | |
| 472 static const float kInvalidWeight; | |
| 473 static const float kMaxWeight; | |
| 474 | |
| 475 LiveRange* splintered_from() const { | |
| 476 DCHECK(!IsChild()); | |
| 477 return splintered_from_; | |
| 478 } | |
| 479 bool IsSplinter() const { | |
| 480 DCHECK(!IsChild()); | |
| 481 return splintered_from_ != nullptr; | |
| 482 } | |
| 483 | |
| 484 void set_spill_type(SpillType value) { | |
| 485 bits_ = SpillTypeField::update(bits_, value); | |
| 486 } | |
| 487 | |
| 488 private: | 550 private: |
| 489 void AppendChild(LiveRange* other); | |
| 490 void UpdateParentForAllChildren(LiveRange* new_parent); | |
| 491 void UpdateSpillRangePostMerge(LiveRange* merged); | |
| 492 | |
| 493 void SetSplinteredFrom(LiveRange* splinter_parent); | |
| 494 | |
| 495 | |
| 496 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } | |
| 497 | |
| 498 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | |
| 499 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | |
| 500 LifetimePosition but_not_past) const; | |
| 501 | |
| 502 LiveRange* GetLastChild(); | |
| 503 | |
| 504 typedef BitField<bool, 0, 1> SpilledField; | |
| 505 typedef BitField<bool, 1, 1> HasSlotUseField; | 551 typedef BitField<bool, 1, 1> HasSlotUseField; |
| 506 typedef BitField<bool, 2, 1> IsPhiField; | 552 typedef BitField<bool, 2, 1> IsPhiField; |
| 507 typedef BitField<bool, 3, 1> IsNonLoopPhiField; | 553 typedef BitField<bool, 3, 1> IsNonLoopPhiField; |
| 508 typedef BitField<SpillType, 4, 2> SpillTypeField; | 554 typedef BitField<SpillType, 4, 2> SpillTypeField; |
| 509 typedef BitField<int32_t, 6, 6> AssignedRegisterField; | |
| 510 typedef BitField<MachineType, 12, 15> MachineTypeField; | |
| 511 | 555 |
| 512 int id_; | 556 LiveRange* GetLastChild(); |
| 513 int spill_start_index_; | 557 |
| 514 uint32_t bits_; | 558 int vreg_; |
| 515 UseInterval* last_interval_; | 559 int last_child_id_; |
| 516 UseInterval* first_interval_; | 560 TopLevelLiveRange* splintered_from_; |
| 517 UsePosition* first_pos_; | |
| 518 LiveRange* parent_; | |
| 519 LiveRange* next_; | |
| 520 LiveRange* splintered_from_; | |
| 521 union { | 561 union { |
| 522 // Correct value determined by spill_type() | 562 // Correct value determined by spill_type() |
| 523 InstructionOperand* spill_operand_; | 563 InstructionOperand* spill_operand_; |
| 524 SpillRange* spill_range_; | 564 SpillRange* spill_range_; |
| 525 }; | 565 }; |
| 526 SpillAtDefinitionList* spills_at_definition_; | 566 SpillAtDefinitionList* spills_at_definition_; |
| 527 // This is used as a cache, it doesn't affect correctness. | |
| 528 mutable UseInterval* current_interval_; | |
| 529 // This is used as a cache, it doesn't affect correctness. | |
| 530 mutable UsePosition* last_processed_use_; | |
| 531 // This is used as a cache, it's invalid outside of BuildLiveRanges. | |
| 532 mutable UsePosition* current_hint_position_; | |
| 533 | |
| 534 // greedy: the number of LifetimePositions covered by this range. Used to | |
| 535 // prioritize selecting live ranges for register assignment, as well as | |
| 536 // in weight calculations. | |
| 537 int size_; | |
| 538 | |
| 539 // greedy: a metric for resolving conflicts between ranges with an assigned | |
| 540 // register and ranges that intersect them and need a register. | |
| 541 float weight_; | |
| 542 | |
| 543 // TODO(mtrofin): generalize spilling after definition, currently specialized | 567 // TODO(mtrofin): generalize spilling after definition, currently specialized |
| 544 // just for spill in a single deferred block. | 568 // just for spill in a single deferred block. |
| 545 bool spilled_in_deferred_block_; | 569 bool spilled_in_deferred_blocks_; |
| 546 DISALLOW_COPY_AND_ASSIGN(LiveRange); | 570 int spill_start_index_; |
| 571 | |
| 572 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); | |
| 547 }; | 573 }; |
| 548 | 574 |
| 549 | 575 |
| 550 struct PrintableLiveRange { | 576 struct PrintableLiveRange { |
| 551 const RegisterConfiguration* register_configuration_; | 577 const RegisterConfiguration* register_configuration_; |
| 552 const LiveRange* range_; | 578 const LiveRange* range_; |
| 553 }; | 579 }; |
| 554 | 580 |
| 555 | 581 |
| 556 std::ostream& operator<<(std::ostream& os, | 582 std::ostream& operator<<(std::ostream& os, |
| 557 const PrintableLiveRange& printable_range); | 583 const PrintableLiveRange& printable_range); |
| 558 | 584 |
| 559 | 585 |
| 560 class SpillRange final : public ZoneObject { | 586 class SpillRange final : public ZoneObject { |
| 561 public: | 587 public: |
| 562 static const int kUnassignedSlot = -1; | 588 static const int kUnassignedSlot = -1; |
| 563 SpillRange(LiveRange* range, Zone* zone); | 589 SpillRange(TopLevelLiveRange* range, Zone* zone); |
| 564 | 590 |
| 565 UseInterval* interval() const { return use_interval_; } | 591 UseInterval* interval() const { return use_interval_; } |
| 566 // Currently, only 4 or 8 byte slots are supported. | 592 // Currently, only 4 or 8 byte slots are supported. |
| 567 int ByteWidth() const; | 593 int ByteWidth() const; |
| 568 bool IsEmpty() const { return live_ranges_.empty(); } | 594 bool IsEmpty() const { return live_ranges_.empty(); } |
| 569 bool TryMerge(SpillRange* other); | 595 bool TryMerge(SpillRange* other); |
| 570 | 596 |
| 571 void set_assigned_slot(int index) { | 597 void set_assigned_slot(int index) { |
| 572 DCHECK_EQ(kUnassignedSlot, assigned_slot_); | 598 DCHECK_EQ(kUnassignedSlot, assigned_slot_); |
| 573 assigned_slot_ = index; | 599 assigned_slot_ = index; |
| 574 } | 600 } |
| 575 int assigned_slot() { | 601 int assigned_slot() { |
| 576 DCHECK_NE(kUnassignedSlot, assigned_slot_); | 602 DCHECK_NE(kUnassignedSlot, assigned_slot_); |
| 577 return assigned_slot_; | 603 return assigned_slot_; |
| 578 } | 604 } |
| 579 const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } | 605 const ZoneVector<TopLevelLiveRange*>& live_ranges() const { |
| 580 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } | 606 return live_ranges_; |
| 607 } | |
| 608 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } | |
| 581 int byte_width() const { return byte_width_; } | 609 int byte_width() const { return byte_width_; } |
| 582 RegisterKind kind() const { return kind_; } | 610 RegisterKind kind() const { return kind_; } |
| 583 | 611 |
| 584 private: | 612 private: |
| 585 LifetimePosition End() const { return end_position_; } | 613 LifetimePosition End() const { return end_position_; } |
| 586 bool IsIntersectingWith(SpillRange* other) const; | 614 bool IsIntersectingWith(SpillRange* other) const; |
| 587 // Merge intervals, making sure the use intervals are sorted | 615 // Merge intervals, making sure the use intervals are sorted |
| 588 void MergeDisjointIntervals(UseInterval* other); | 616 void MergeDisjointIntervals(UseInterval* other); |
| 589 | 617 |
| 590 ZoneVector<LiveRange*> live_ranges_; | 618 ZoneVector<TopLevelLiveRange*> live_ranges_; |
| 591 UseInterval* use_interval_; | 619 UseInterval* use_interval_; |
| 592 LifetimePosition end_position_; | 620 LifetimePosition end_position_; |
| 593 int assigned_slot_; | 621 int assigned_slot_; |
| 594 int byte_width_; | 622 int byte_width_; |
| 595 RegisterKind kind_; | 623 RegisterKind kind_; |
| 596 | 624 |
| 597 DISALLOW_COPY_AND_ASSIGN(SpillRange); | 625 DISALLOW_COPY_AND_ASSIGN(SpillRange); |
| 598 }; | 626 }; |
| 599 | 627 |
| 600 | 628 |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 630 ReferenceMap* map; | 658 ReferenceMap* map; |
| 631 InstructionOperand* operand; | 659 InstructionOperand* operand; |
| 632 }; | 660 }; |
| 633 typedef ZoneVector<DelayedReference> DelayedReferences; | 661 typedef ZoneVector<DelayedReference> DelayedReferences; |
| 634 | 662 |
| 635 RegisterAllocationData(const RegisterConfiguration* config, | 663 RegisterAllocationData(const RegisterConfiguration* config, |
| 636 Zone* allocation_zone, Frame* frame, | 664 Zone* allocation_zone, Frame* frame, |
| 637 InstructionSequence* code, | 665 InstructionSequence* code, |
| 638 const char* debug_name = nullptr); | 666 const char* debug_name = nullptr); |
| 639 | 667 |
| 640 const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } | 668 const ZoneVector<TopLevelLiveRange*>& live_ranges() const { |
| 641 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } | 669 return live_ranges_; |
| 642 const ZoneVector<LiveRange*>& fixed_live_ranges() const { | 670 } |
| 671 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } | |
| 672 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const { | |
| 643 return fixed_live_ranges_; | 673 return fixed_live_ranges_; |
| 644 } | 674 } |
| 645 ZoneVector<LiveRange*>& fixed_live_ranges() { return fixed_live_ranges_; } | 675 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() { |
| 646 ZoneVector<LiveRange*>& fixed_double_live_ranges() { | 676 return fixed_live_ranges_; |
| 677 } | |
| 678 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() { | |
| 647 return fixed_double_live_ranges_; | 679 return fixed_double_live_ranges_; |
| 648 } | 680 } |
| 649 const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { | 681 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const { |
| 650 return fixed_double_live_ranges_; | 682 return fixed_double_live_ranges_; |
| 651 } | 683 } |
| 652 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } | 684 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } |
| 653 ZoneSet<SpillRange*>& spill_ranges() { return spill_ranges_; } | 685 ZoneSet<SpillRange*>& spill_ranges() { return spill_ranges_; } |
| 654 DelayedReferences& delayed_references() { return delayed_references_; } | 686 DelayedReferences& delayed_references() { return delayed_references_; } |
| 655 InstructionSequence* code() const { return code_; } | 687 InstructionSequence* code() const { return code_; } |
| 656 // This zone is for datastructures only needed during register allocation | 688 // This zone is for datastructures only needed during register allocation |
| 657 // phases. | 689 // phases. |
| 658 Zone* allocation_zone() const { return allocation_zone_; } | 690 Zone* allocation_zone() const { return allocation_zone_; } |
| 659 // This zone is for InstructionOperands and moves that live beyond register | 691 // This zone is for InstructionOperands and moves that live beyond register |
| 660 // allocation. | 692 // allocation. |
| 661 Zone* code_zone() const { return code()->zone(); } | 693 Zone* code_zone() const { return code()->zone(); } |
| 662 Frame* frame() const { return frame_; } | 694 Frame* frame() const { return frame_; } |
| 663 const char* debug_name() const { return debug_name_; } | 695 const char* debug_name() const { return debug_name_; } |
| 664 const RegisterConfiguration* config() const { return config_; } | 696 const RegisterConfiguration* config() const { return config_; } |
| 665 | 697 |
| 666 MachineType MachineTypeFor(int virtual_register); | 698 MachineType MachineTypeFor(int virtual_register); |
| 667 | 699 |
| 668 LiveRange* LiveRangeFor(int index); | 700 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index); |
| 669 // Creates a new live range. | 701 // Creates a new live range. |
| 670 LiveRange* NewLiveRange(int index, MachineType machine_type); | 702 TopLevelLiveRange* NewLiveRange(int index, MachineType machine_type); |
| 671 LiveRange* NextLiveRange(MachineType machine_type); | 703 TopLevelLiveRange* NextLiveRange(MachineType machine_type); |
| 672 LiveRange* NewChildRangeFor(LiveRange* range); | |
| 673 | 704 |
| 674 SpillRange* AssignSpillRangeToLiveRange(LiveRange* range); | 705 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range); |
| 675 SpillRange* CreateSpillRangeForLiveRange(LiveRange* range); | 706 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range); |
| 676 | 707 |
| 677 MoveOperands* AddGapMove(int index, Instruction::GapPosition position, | 708 MoveOperands* AddGapMove(int index, Instruction::GapPosition position, |
| 678 const InstructionOperand& from, | 709 const InstructionOperand& from, |
| 679 const InstructionOperand& to); | 710 const InstructionOperand& to); |
| 680 | 711 |
| 681 bool IsReference(int virtual_register) const { | 712 bool IsReference(TopLevelLiveRange* top_range) const { |
| 682 return code()->IsReference(virtual_register); | 713 return code()->IsReference(top_range->vreg()); |
| 683 } | 714 } |
| 684 | 715 |
| 685 bool ExistsUseWithoutDefinition(); | 716 bool ExistsUseWithoutDefinition(); |
| 686 | 717 |
| 687 void MarkAllocated(RegisterKind kind, int index); | 718 void MarkAllocated(RegisterKind kind, int index); |
| 688 | 719 |
| 689 PhiMapValue* InitializePhiMap(const InstructionBlock* block, | 720 PhiMapValue* InitializePhiMap(const InstructionBlock* block, |
| 690 PhiInstruction* phi); | 721 PhiInstruction* phi); |
| 722 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range); | |
| 691 PhiMapValue* GetPhiMapValueFor(int virtual_register); | 723 PhiMapValue* GetPhiMapValueFor(int virtual_register); |
| 692 bool IsBlockBoundary(LifetimePosition pos) const; | 724 bool IsBlockBoundary(LifetimePosition pos) const; |
| 693 | 725 |
| 694 void Print(const InstructionSequence* instructionSequence); | 726 void Print(const InstructionSequence* instructionSequence); |
| 695 void Print(const Instruction* instruction); | 727 void Print(const Instruction* instruction); |
| 696 void Print(const LiveRange* range, bool with_children = false); | 728 void Print(const LiveRange* range, bool with_children = false); |
| 697 void Print(const InstructionOperand& op); | 729 void Print(const InstructionOperand& op); |
| 698 void Print(const MoveOperands* move); | 730 void Print(const MoveOperands* move); |
| 699 void Print(const SpillRange* spill_range); | 731 void Print(const SpillRange* spill_range); |
| 700 | 732 |
| 701 private: | 733 private: |
| 734 int GetNextLiveRangeId(); | |
| 735 | |
| 702 Zone* const allocation_zone_; | 736 Zone* const allocation_zone_; |
| 703 Frame* const frame_; | 737 Frame* const frame_; |
| 704 InstructionSequence* const code_; | 738 InstructionSequence* const code_; |
| 705 const char* const debug_name_; | 739 const char* const debug_name_; |
| 706 const RegisterConfiguration* const config_; | 740 const RegisterConfiguration* const config_; |
| 707 PhiMap phi_map_; | 741 PhiMap phi_map_; |
| 708 ZoneVector<BitVector*> live_in_sets_; | 742 ZoneVector<BitVector*> live_in_sets_; |
| 709 ZoneVector<LiveRange*> live_ranges_; | 743 ZoneVector<TopLevelLiveRange*> live_ranges_; |
| 710 ZoneVector<LiveRange*> fixed_live_ranges_; | 744 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_; |
| 711 ZoneVector<LiveRange*> fixed_double_live_ranges_; | 745 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_; |
| 712 ZoneSet<SpillRange*> spill_ranges_; | 746 ZoneSet<SpillRange*> spill_ranges_; |
| 713 DelayedReferences delayed_references_; | 747 DelayedReferences delayed_references_; |
| 714 BitVector* assigned_registers_; | 748 BitVector* assigned_registers_; |
| 715 BitVector* assigned_double_registers_; | 749 BitVector* assigned_double_registers_; |
| 716 int virtual_register_count_; | 750 int virtual_register_count_; |
| 717 | 751 |
| 718 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData); | 752 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData); |
| 719 }; | 753 }; |
| 720 | 754 |
| 721 | 755 |
| 722 class ConstraintBuilder final : public ZoneObject { | 756 class ConstraintBuilder final : public ZoneObject { |
| 723 public: | 757 public: |
| 724 explicit ConstraintBuilder(RegisterAllocationData* data); | 758 explicit ConstraintBuilder(RegisterAllocationData* data); |
| 725 | 759 |
| 726 // Phase 1 : insert moves to account for fixed register operands. | 760 // Phase 1 : insert moves to account for fixed register operands. |
| 727 void MeetRegisterConstraints(); | 761 void MeetRegisterConstraints(); |
| 728 | 762 |
| 729 // Phase 2: deconstruct SSA by inserting moves in successors and the headers | 763 // Phase 2: deconstruct SSA by inserting moves in successors and the headers |
| 730 // of blocks containing phis. | 764 // of blocks containing phis. |
| 731 void ResolvePhis(); | 765 void ResolvePhis(); |
| 732 | 766 |
| 733 private: | 767 private: |
| 734 RegisterAllocationData* data() const { return data_; } | 768 RegisterAllocationData* data() const { return data_; } |
| 735 InstructionSequence* code() const { return data()->code(); } | 769 InstructionSequence* code() const { return data()->code(); } |
| 736 Zone* allocation_zone() const { return data()->allocation_zone(); } | 770 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 737 | 771 |
| 738 Instruction* InstructionAt(int index) { return code()->InstructionAt(index); } | |
| 739 bool IsReference(int virtual_register) const { | |
| 740 return data()->IsReference(virtual_register); | |
| 741 } | |
| 742 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | |
| 743 | |
| 744 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, | 772 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, |
| 745 bool is_tagged); | 773 bool is_tagged); |
| 746 void MeetRegisterConstraints(const InstructionBlock* block); | 774 void MeetRegisterConstraints(const InstructionBlock* block); |
| 747 void MeetConstraintsBefore(int index); | 775 void MeetConstraintsBefore(int index); |
| 748 void MeetConstraintsAfter(int index); | 776 void MeetConstraintsAfter(int index); |
| 749 void MeetRegisterConstraintsForLastInstructionInBlock( | 777 void MeetRegisterConstraintsForLastInstructionInBlock( |
| 750 const InstructionBlock* block); | 778 const InstructionBlock* block); |
| 751 void ResolvePhis(const InstructionBlock* block); | 779 void ResolvePhis(const InstructionBlock* block); |
| 752 | 780 |
| 753 RegisterAllocationData* const data_; | 781 RegisterAllocationData* const data_; |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 768 private: | 796 private: |
| 769 RegisterAllocationData* data() const { return data_; } | 797 RegisterAllocationData* data() const { return data_; } |
| 770 InstructionSequence* code() const { return data()->code(); } | 798 InstructionSequence* code() const { return data()->code(); } |
| 771 Zone* allocation_zone() const { return data()->allocation_zone(); } | 799 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 772 Zone* code_zone() const { return code()->zone(); } | 800 Zone* code_zone() const { return code()->zone(); } |
| 773 const RegisterConfiguration* config() const { return data()->config(); } | 801 const RegisterConfiguration* config() const { return data()->config(); } |
| 774 ZoneVector<BitVector*>& live_in_sets() const { | 802 ZoneVector<BitVector*>& live_in_sets() const { |
| 775 return data()->live_in_sets(); | 803 return data()->live_in_sets(); |
| 776 } | 804 } |
| 777 | 805 |
| 778 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | |
| 779 | |
| 780 void Verify() const; | 806 void Verify() const; |
| 781 | 807 |
| 782 // Liveness analysis support. | 808 // Liveness analysis support. |
| 783 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); | 809 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); |
| 784 void ProcessInstructions(const InstructionBlock* block, BitVector* live); | 810 void ProcessInstructions(const InstructionBlock* block, BitVector* live); |
| 785 void ProcessPhis(const InstructionBlock* block, BitVector* live); | 811 void ProcessPhis(const InstructionBlock* block, BitVector* live); |
| 786 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live); | 812 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live); |
| 787 | 813 |
| 788 static int FixedLiveRangeID(int index) { return -index - 1; } | 814 static int FixedLiveRangeID(int index) { return -index - 1; } |
| 789 int FixedDoubleLiveRangeID(int index); | 815 int FixedDoubleLiveRangeID(int index); |
| 790 LiveRange* FixedLiveRangeFor(int index); | 816 TopLevelLiveRange* FixedLiveRangeFor(int index); |
| 791 LiveRange* FixedDoubleLiveRangeFor(int index); | 817 TopLevelLiveRange* FixedDoubleLiveRangeFor(int index); |
| 792 | 818 |
| 793 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); | 819 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); |
| 794 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); | 820 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); |
| 795 | 821 |
| 796 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand, | 822 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand, |
| 797 void* hint, UsePositionHintType hint_type); | 823 void* hint, UsePositionHintType hint_type); |
| 798 UsePosition* NewUsePosition(LifetimePosition pos) { | 824 UsePosition* NewUsePosition(LifetimePosition pos) { |
| 799 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); | 825 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); |
| 800 } | 826 } |
| 801 LiveRange* LiveRangeFor(InstructionOperand* operand); | 827 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand); |
| 802 // Helper methods for building intervals. | 828 // Helper methods for building intervals. |
| 803 UsePosition* Define(LifetimePosition position, InstructionOperand* operand, | 829 UsePosition* Define(LifetimePosition position, InstructionOperand* operand, |
| 804 void* hint, UsePositionHintType hint_type); | 830 void* hint, UsePositionHintType hint_type); |
| 805 void Define(LifetimePosition position, InstructionOperand* operand) { | 831 void Define(LifetimePosition position, InstructionOperand* operand) { |
| 806 Define(position, operand, nullptr, UsePositionHintType::kNone); | 832 Define(position, operand, nullptr, UsePositionHintType::kNone); |
| 807 } | 833 } |
| 808 UsePosition* Use(LifetimePosition block_start, LifetimePosition position, | 834 UsePosition* Use(LifetimePosition block_start, LifetimePosition position, |
| 809 InstructionOperand* operand, void* hint, | 835 InstructionOperand* operand, void* hint, |
| 810 UsePositionHintType hint_type); | 836 UsePositionHintType hint_type); |
| 811 void Use(LifetimePosition block_start, LifetimePosition position, | 837 void Use(LifetimePosition block_start, LifetimePosition position, |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 825 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); | 851 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); |
| 826 | 852 |
| 827 protected: | 853 protected: |
| 828 RegisterAllocationData* data() const { return data_; } | 854 RegisterAllocationData* data() const { return data_; } |
| 829 InstructionSequence* code() const { return data()->code(); } | 855 InstructionSequence* code() const { return data()->code(); } |
| 830 RegisterKind mode() const { return mode_; } | 856 RegisterKind mode() const { return mode_; } |
| 831 int num_registers() const { return num_registers_; } | 857 int num_registers() const { return num_registers_; } |
| 832 | 858 |
| 833 Zone* allocation_zone() const { return data()->allocation_zone(); } | 859 Zone* allocation_zone() const { return data()->allocation_zone(); } |
| 834 | 860 |
| 835 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); } | |
| 836 | |
| 837 // Split the given range at the given position. | 861 // Split the given range at the given position. |
| 838 // If range starts at or after the given position then the | 862 // If range starts at or after the given position then the |
| 839 // original range is returned. | 863 // original range is returned. |
| 840 // Otherwise returns the live range that starts at pos and contains | 864 // Otherwise returns the live range that starts at pos and contains |
| 841 // all uses from the original range that follow pos. Uses at pos will | 865 // all uses from the original range that follow pos. Uses at pos will |
| 842 // still be owned by the original range after splitting. | 866 // still be owned by the original range after splitting. |
| 843 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); | 867 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); |
| 844 | 868 |
| 845 // Split the given range in a position from the interval [start, end]. | 869 // Split the given range in a position from the interval [start, end]. |
| 846 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start, | 870 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start, |
| 847 LifetimePosition end); | 871 LifetimePosition end); |
| 848 | 872 |
| 849 // Find a lifetime position in the interval [start, end] which | 873 // Find a lifetime position in the interval [start, end] which |
| 850 // is optimal for splitting: it is either header of the outermost | 874 // is optimal for splitting: it is either header of the outermost |
| 851 // loop covered by this interval or the latest possible position. | 875 // loop covered by this interval or the latest possible position. |
| 852 LifetimePosition FindOptimalSplitPos(LifetimePosition start, | 876 LifetimePosition FindOptimalSplitPos(LifetimePosition start, |
| 853 LifetimePosition end); | 877 LifetimePosition end); |
| 854 | 878 |
| 855 void Spill(LiveRange* range); | 879 void Spill(LiveRange* range); |
| 856 | 880 |
| 857 // If we are trying to spill a range inside the loop try to | 881 // If we are trying to spill a range inside the loop try to |
| 858 // hoist spill position out to the point just before the loop. | 882 // hoist spill position out to the point just before the loop. |
| 859 LifetimePosition FindOptimalSpillingPos(LiveRange* range, | 883 LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
| 860 LifetimePosition pos); | 884 LifetimePosition pos); |
| 861 | 885 |
| 862 const ZoneVector<LiveRange*>& GetFixedRegisters() const; | 886 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const; |
| 863 const char* RegisterName(int allocation_index) const; | 887 const char* RegisterName(int allocation_index) const; |
| 864 | 888 |
| 865 private: | 889 private: |
| 866 RegisterAllocationData* const data_; | 890 RegisterAllocationData* const data_; |
| 867 const RegisterKind mode_; | 891 const RegisterKind mode_; |
| 868 const int num_registers_; | 892 const int num_registers_; |
| 869 | 893 |
| 870 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); | 894 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); |
| 871 }; | 895 }; |
| 872 | 896 |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 896 void AddToUnhandledSorted(LiveRange* range); | 920 void AddToUnhandledSorted(LiveRange* range); |
| 897 void AddToUnhandledUnsorted(LiveRange* range); | 921 void AddToUnhandledUnsorted(LiveRange* range); |
| 898 void SortUnhandled(); | 922 void SortUnhandled(); |
| 899 bool UnhandledIsSorted(); | 923 bool UnhandledIsSorted(); |
| 900 void ActiveToHandled(LiveRange* range); | 924 void ActiveToHandled(LiveRange* range); |
| 901 void ActiveToInactive(LiveRange* range); | 925 void ActiveToInactive(LiveRange* range); |
| 902 void InactiveToHandled(LiveRange* range); | 926 void InactiveToHandled(LiveRange* range); |
| 903 void InactiveToActive(LiveRange* range); | 927 void InactiveToActive(LiveRange* range); |
| 904 | 928 |
| 905 // Helper methods for allocating registers. | 929 // Helper methods for allocating registers. |
| 906 bool TryReuseSpillForPhi(LiveRange* range); | 930 bool TryReuseSpillForPhi(TopLevelLiveRange* range); |
| 907 bool TryAllocateFreeReg(LiveRange* range); | 931 bool TryAllocateFreeReg(LiveRange* range); |
| 908 void AllocateBlockedReg(LiveRange* range); | 932 void AllocateBlockedReg(LiveRange* range); |
| 909 | 933 |
| 910 // Spill the given life range after position pos. | 934 // Spill the given life range after position pos. |
| 911 void SpillAfter(LiveRange* range, LifetimePosition pos); | 935 void SpillAfter(LiveRange* range, LifetimePosition pos); |
| 912 | 936 |
| 913 // Spill the given life range after position [start] and up to position [end]. | 937 // Spill the given life range after position [start] and up to position [end]. |
| 914 void SpillBetween(LiveRange* range, LifetimePosition start, | 938 void SpillBetween(LiveRange* range, LifetimePosition start, |
| 915 LifetimePosition end); | 939 LifetimePosition end); |
| 916 | 940 |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1020 RegisterAllocationData* const data_; | 1044 RegisterAllocationData* const data_; |
| 1021 | 1045 |
| 1022 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); | 1046 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); |
| 1023 }; | 1047 }; |
| 1024 | 1048 |
| 1025 } // namespace compiler | 1049 } // namespace compiler |
| 1026 } // namespace internal | 1050 } // namespace internal |
| 1027 } // namespace v8 | 1051 } // namespace v8 |
| 1028 | 1052 |
| 1029 #endif // V8_REGISTER_ALLOCATOR_H_ | 1053 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |