| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_REGISTER_ALLOCATOR_H_ | 5 #ifndef V8_REGISTER_ALLOCATOR_H_ |
| 6 #define V8_REGISTER_ALLOCATOR_H_ | 6 #define V8_REGISTER_ALLOCATOR_H_ |
| 7 | 7 |
| 8 #include "src/compiler/instruction.h" | 8 #include "src/compiler/instruction.h" |
| 9 #include "src/zone-containers.h" | 9 #include "src/zone-containers.h" |
| 10 | 10 |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 LiveRange* next() const { return next_; } | 190 LiveRange* next() const { return next_; } |
| 191 bool IsChild() const { return parent() != NULL; } | 191 bool IsChild() const { return parent() != NULL; } |
| 192 int id() const { return id_; } | 192 int id() const { return id_; } |
| 193 bool IsFixed() const { return id_ < 0; } | 193 bool IsFixed() const { return id_ < 0; } |
| 194 bool IsEmpty() const { return first_interval() == NULL; } | 194 bool IsEmpty() const { return first_interval() == NULL; } |
| 195 InstructionOperand* CreateAssignedOperand(Zone* zone) const; | 195 InstructionOperand* CreateAssignedOperand(Zone* zone) const; |
| 196 int assigned_register() const { return assigned_register_; } | 196 int assigned_register() const { return assigned_register_; } |
| 197 int spill_start_index() const { return spill_start_index_; } | 197 int spill_start_index() const { return spill_start_index_; } |
| 198 void set_assigned_register(int reg, Zone* zone); | 198 void set_assigned_register(int reg, Zone* zone); |
| 199 void MakeSpilled(Zone* zone); | 199 void MakeSpilled(Zone* zone); |
| 200 bool is_phi() const { return is_phi_; } | |
| 201 void set_is_phi(bool is_phi) { is_phi_ = is_phi; } | |
| 202 bool is_non_loop_phi() const { return is_non_loop_phi_; } | |
| 203 void set_is_non_loop_phi(bool is_non_loop_phi) { | |
| 204 is_non_loop_phi_ = is_non_loop_phi; | |
| 205 } | |
| 206 | 200 |
| 207 // Returns use position in this live range that follows both start | 201 // Returns use position in this live range that follows both start |
| 208 // and last processed use position. | 202 // and last processed use position. |
| 209 // Modifies internal state of live range! | 203 // Modifies internal state of live range! |
| 210 UsePosition* NextUsePosition(LifetimePosition start); | 204 UsePosition* NextUsePosition(LifetimePosition start); |
| 211 | 205 |
| 212 // Returns use position for which register is required in this live | 206 // Returns use position for which register is required in this live |
| 213 // range and which follows both start and last processed use position | 207 // range and which follows both start and last processed use position |
| 214 // Modifies internal state of live range! | 208 // Modifies internal state of live range! |
| 215 UsePosition* NextRegisterPosition(LifetimePosition start); | 209 UsePosition* NextRegisterPosition(LifetimePosition start); |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 288 #endif | 282 #endif |
| 289 | 283 |
| 290 private: | 284 private: |
| 291 void ConvertOperands(Zone* zone); | 285 void ConvertOperands(Zone* zone); |
| 292 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; | 286 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; |
| 293 void AdvanceLastProcessedMarker(UseInterval* to_start_of, | 287 void AdvanceLastProcessedMarker(UseInterval* to_start_of, |
| 294 LifetimePosition but_not_past) const; | 288 LifetimePosition but_not_past) const; |
| 295 | 289 |
| 296 int id_; | 290 int id_; |
| 297 bool spilled_; | 291 bool spilled_; |
| 298 bool is_phi_; | |
| 299 bool is_non_loop_phi_; | |
| 300 RegisterKind kind_; | 292 RegisterKind kind_; |
| 301 int assigned_register_; | 293 int assigned_register_; |
| 302 UseInterval* last_interval_; | 294 UseInterval* last_interval_; |
| 303 UseInterval* first_interval_; | 295 UseInterval* first_interval_; |
| 304 UsePosition* first_pos_; | 296 UsePosition* first_pos_; |
| 305 LiveRange* parent_; | 297 LiveRange* parent_; |
| 306 LiveRange* next_; | 298 LiveRange* next_; |
| 307 // This is used as a cache, it doesn't affect correctness. | 299 // This is used as a cache, it doesn't affect correctness. |
| 308 mutable UseInterval* current_interval_; | 300 mutable UseInterval* current_interval_; |
| 309 UsePosition* last_processed_use_; | 301 UsePosition* last_processed_use_; |
| (...skipping 24 matching lines...) Expand all Loading... |
| 334 const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { | 326 const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { |
| 335 return fixed_double_live_ranges_; | 327 return fixed_double_live_ranges_; |
| 336 } | 328 } |
| 337 InstructionSequence* code() const { return code_; } | 329 InstructionSequence* code() const { return code_; } |
| 338 // This zone is for datastructures only needed during register allocation. | 330 // This zone is for datastructures only needed during register allocation. |
| 339 Zone* local_zone() const { return local_zone_; } | 331 Zone* local_zone() const { return local_zone_; } |
| 340 | 332 |
| 341 // Phase 1 : insert moves to account for fixed register operands. | 333 // Phase 1 : insert moves to account for fixed register operands. |
| 342 void MeetRegisterConstraints(); | 334 void MeetRegisterConstraints(); |
| 343 | 335 |
| 344 // Phase 2: deconstruct SSA by inserting moves in successors and the headers | 336 // Phase 2: compute liveness of all virtual register. |
| 345 // of blocks containing phis. | |
| 346 void ResolvePhis(); | |
| 347 | |
| 348 // Phase 3: compute liveness of all virtual register. | |
| 349 void BuildLiveRanges(); | 337 void BuildLiveRanges(); |
| 350 bool ExistsUseWithoutDefinition(); | 338 bool ExistsUseWithoutDefinition(); |
| 351 | 339 |
| 352 // Phase 4: compute register assignments. | 340 // Phase 3: compute register assignments. |
| 353 void AllocateGeneralRegisters(); | 341 void AllocateGeneralRegisters(); |
| 354 void AllocateDoubleRegisters(); | 342 void AllocateDoubleRegisters(); |
| 355 | 343 |
| 356 // Phase 5: compute values for pointer maps. | 344 // Phase 4: compute values for pointer maps. |
| 357 void PopulatePointerMaps(); // TODO(titzer): rename to PopulateReferenceMaps. | 345 void PopulatePointerMaps(); // TODO(titzer): rename to PopulateReferenceMaps. |
| 358 | 346 |
| 359 // Phase 6: reconnect split ranges with moves. | 347 // Phase 5: reconnect split ranges with moves. |
| 360 void ConnectRanges(); | 348 void ConnectRanges(); |
| 361 | 349 |
| 362 // Phase 7: insert moves to connect ranges across basic blocks. | 350 // Phase 6: insert moves to connect ranges across basic blocks. |
| 363 void ResolveControlFlow(); | 351 void ResolveControlFlow(); |
| 364 | 352 |
| 365 private: | 353 private: |
| 366 int GetVirtualRegister() { | 354 int GetVirtualRegister() { |
| 367 int vreg = code()->NextVirtualRegister(); | 355 int vreg = code()->NextVirtualRegister(); |
| 368 if (vreg >= UnallocatedOperand::kMaxVirtualRegisters) { | 356 if (vreg >= UnallocatedOperand::kMaxVirtualRegisters) { |
| 369 allocation_ok_ = false; | 357 allocation_ok_ = false; |
| 370 // Maintain the invariant that we return something below the maximum. | 358 // Maintain the invariant that we return something below the maximum. |
| 371 return 0; | 359 return 0; |
| 372 } | 360 } |
| (...skipping 27 matching lines...) Expand all Loading... |
| 400 BitVector* ComputeLiveOut(const InstructionBlock* block); | 388 BitVector* ComputeLiveOut(const InstructionBlock* block); |
| 401 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); | 389 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); |
| 402 bool IsOutputRegisterOf(Instruction* instr, int index); | 390 bool IsOutputRegisterOf(Instruction* instr, int index); |
| 403 bool IsOutputDoubleRegisterOf(Instruction* instr, int index); | 391 bool IsOutputDoubleRegisterOf(Instruction* instr, int index); |
| 404 void ProcessInstructions(const InstructionBlock* block, BitVector* live); | 392 void ProcessInstructions(const InstructionBlock* block, BitVector* live); |
| 405 void MeetRegisterConstraints(const InstructionBlock* block); | 393 void MeetRegisterConstraints(const InstructionBlock* block); |
| 406 void MeetConstraintsBetween(Instruction* first, Instruction* second, | 394 void MeetConstraintsBetween(Instruction* first, Instruction* second, |
| 407 int gap_index); | 395 int gap_index); |
| 408 void MeetRegisterConstraintsForLastInstructionInBlock( | 396 void MeetRegisterConstraintsForLastInstructionInBlock( |
| 409 const InstructionBlock* block); | 397 const InstructionBlock* block); |
| 410 void ResolvePhis(const InstructionBlock* block); | 398 void ProcessPhis(const InstructionBlock* block); |
| 411 | 399 |
| 412 // Helper methods for building intervals. | 400 // Helper methods for building intervals. |
| 413 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, | 401 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, |
| 414 bool is_tagged); | 402 bool is_tagged); |
| 415 LiveRange* LiveRangeFor(InstructionOperand* operand); | 403 LiveRange* LiveRangeFor(InstructionOperand* operand); |
| 416 void Define(LifetimePosition position, InstructionOperand* operand, | 404 void Define(LifetimePosition position, InstructionOperand* operand, |
| 417 InstructionOperand* hint); | 405 InstructionOperand* hint); |
| 418 void Use(LifetimePosition block_start, LifetimePosition position, | 406 void Use(LifetimePosition block_start, LifetimePosition position, |
| 419 InstructionOperand* operand, InstructionOperand* hint); | 407 InstructionOperand* operand, InstructionOperand* hint); |
| 420 void AddConstraintsGapMove(int index, InstructionOperand* from, | 408 void AddConstraintsGapMove(int index, InstructionOperand* from, |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 475 // If we are trying to spill a range inside the loop try to | 463 // If we are trying to spill a range inside the loop try to |
| 476 // hoist spill position out to the point just before the loop. | 464 // hoist spill position out to the point just before the loop. |
| 477 LifetimePosition FindOptimalSpillingPos(LiveRange* range, | 465 LifetimePosition FindOptimalSpillingPos(LiveRange* range, |
| 478 LifetimePosition pos); | 466 LifetimePosition pos); |
| 479 | 467 |
| 480 void Spill(LiveRange* range); | 468 void Spill(LiveRange* range); |
| 481 bool IsBlockBoundary(LifetimePosition pos); | 469 bool IsBlockBoundary(LifetimePosition pos); |
| 482 | 470 |
| 483 // Helper methods for resolving control flow. | 471 // Helper methods for resolving control flow. |
| 484 void ResolveControlFlow(const InstructionBlock* block, | 472 void ResolveControlFlow(const InstructionBlock* block, |
| 485 const LiveRange* cur_cover, | 473 InstructionOperand* cur_op, |
| 486 const InstructionBlock* pred, | 474 const InstructionBlock* pred, |
| 487 const LiveRange* pred_cover); | 475 InstructionOperand* pred_op); |
| 488 | 476 |
| 489 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); | 477 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); |
| 490 | 478 |
| 491 // Return parallel move that should be used to connect ranges split at the | 479 // Return parallel move that should be used to connect ranges split at the |
| 492 // given position. | 480 // given position. |
| 493 ParallelMove* GetConnectingParallelMove(LifetimePosition pos); | 481 ParallelMove* GetConnectingParallelMove(LifetimePosition pos); |
| 494 | 482 |
| 495 // Return the block which contains give lifetime position. | 483 // Return the block which contains give lifetime position. |
| 496 const InstructionBlock* GetInstructionBlock(LifetimePosition pos); | 484 const InstructionBlock* GetInstructionBlock(LifetimePosition pos); |
| 497 | 485 |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 548 #endif | 536 #endif |
| 549 | 537 |
| 550 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); | 538 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); |
| 551 }; | 539 }; |
| 552 | 540 |
| 553 } | 541 } |
| 554 } | 542 } |
| 555 } // namespace v8::internal::compiler | 543 } // namespace v8::internal::compiler |
| 556 | 544 |
| 557 #endif // V8_REGISTER_ALLOCATOR_H_ | 545 #endif // V8_REGISTER_ALLOCATOR_H_ |
| OLD | NEW |