| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/compiler/register-allocator.h" |
| 6 | 6 |
| 7 #include "src/compiler/linkage.h" |
| 7 #include "src/hydrogen.h" | 8 #include "src/hydrogen.h" |
| 8 #include "src/lithium-allocator-inl.h" | |
| 9 #include "src/string-stream.h" | 9 #include "src/string-stream.h" |
| 10 | 10 |
| 11 #if V8_TARGET_ARCH_IA32 | |
| 12 #include "src/ia32/lithium-ia32.h" // NOLINT | |
| 13 #elif V8_TARGET_ARCH_X64 | |
| 14 #include "src/x64/lithium-x64.h" // NOLINT | |
| 15 #elif V8_TARGET_ARCH_ARM64 | |
| 16 #include "src/arm64/lithium-arm64.h" // NOLINT | |
| 17 #elif V8_TARGET_ARCH_ARM | |
| 18 #include "src/arm/lithium-arm.h" // NOLINT | |
| 19 #elif V8_TARGET_ARCH_MIPS | |
| 20 #include "src/mips/lithium-mips.h" // NOLINT | |
| 21 #elif V8_TARGET_ARCH_MIPS64 | |
| 22 #include "src/mips64/lithium-mips64.h" // NOLINT | |
| 23 #elif V8_TARGET_ARCH_X87 | |
| 24 #include "src/x87/lithium-x87.h" // NOLINT | |
| 25 #else | |
| 26 #error "Unknown architecture." | |
| 27 #endif | |
| 28 | |
| 29 namespace v8 { | 11 namespace v8 { |
| 30 namespace internal { | 12 namespace internal { |
| 13 namespace compiler { |
| 31 | 14 |
| 32 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 33 return a.Value() < b.Value() ? a : b; | 16 return a.Value() < b.Value() ? a : b; |
| 34 } | 17 } |
| 35 | 18 |
| 36 | 19 |
| 37 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 20 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
| 38 return a.Value() > b.Value() ? a : b; | 21 return a.Value() > b.Value() ? a : b; |
| 39 } | 22 } |
| 40 | 23 |
| 41 | 24 |
| 42 UsePosition::UsePosition(LifetimePosition pos, | 25 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand, |
| 43 LOperand* operand, | 26 InstructionOperand* hint) |
| 44 LOperand* hint) | |
| 45 : operand_(operand), | 27 : operand_(operand), |
| 46 hint_(hint), | 28 hint_(hint), |
| 47 pos_(pos), | 29 pos_(pos), |
| 48 next_(NULL), | 30 next_(NULL), |
| 49 requires_reg_(false), | 31 requires_reg_(false), |
| 50 register_beneficial_(true) { | 32 register_beneficial_(true) { |
| 51 if (operand_ != NULL && operand_->IsUnallocated()) { | 33 if (operand_ != NULL && operand_->IsUnallocated()) { |
| 52 LUnallocated* unalloc = LUnallocated::cast(operand_); | 34 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_); |
| 53 requires_reg_ = unalloc->HasRegisterPolicy() || | 35 requires_reg_ = unalloc->HasRegisterPolicy(); |
| 54 unalloc->HasDoubleRegisterPolicy(); | |
| 55 register_beneficial_ = !unalloc->HasAnyPolicy(); | 36 register_beneficial_ = !unalloc->HasAnyPolicy(); |
| 56 } | 37 } |
| 57 ASSERT(pos_.IsValid()); | 38 ASSERT(pos_.IsValid()); |
| 58 } | 39 } |
| 59 | 40 |
| 60 | 41 |
| 61 bool UsePosition::HasHint() const { | 42 bool UsePosition::HasHint() const { |
| 62 return hint_ != NULL && !hint_->IsUnallocated(); | 43 return hint_ != NULL && !hint_->IsUnallocated(); |
| 63 } | 44 } |
| 64 | 45 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 return false; | 89 return false; |
| 109 } | 90 } |
| 110 | 91 |
| 111 | 92 |
| 112 #endif | 93 #endif |
| 113 | 94 |
| 114 | 95 |
| 115 LiveRange::LiveRange(int id, Zone* zone) | 96 LiveRange::LiveRange(int id, Zone* zone) |
| 116 : id_(id), | 97 : id_(id), |
| 117 spilled_(false), | 98 spilled_(false), |
| 99 is_phi_(false), |
| 100 is_non_loop_phi_(false), |
| 118 kind_(UNALLOCATED_REGISTERS), | 101 kind_(UNALLOCATED_REGISTERS), |
| 119 assigned_register_(kInvalidAssignment), | 102 assigned_register_(kInvalidAssignment), |
| 120 last_interval_(NULL), | 103 last_interval_(NULL), |
| 121 first_interval_(NULL), | 104 first_interval_(NULL), |
| 122 first_pos_(NULL), | 105 first_pos_(NULL), |
| 123 parent_(NULL), | 106 parent_(NULL), |
| 124 next_(NULL), | 107 next_(NULL), |
| 125 current_interval_(NULL), | 108 current_interval_(NULL), |
| 126 last_processed_use_(NULL), | 109 last_processed_use_(NULL), |
| 127 current_hint_operand_(NULL), | 110 current_hint_operand_(NULL), |
| 128 spill_operand_(new(zone) LOperand()), | 111 spill_operand_(new (zone) InstructionOperand()), |
| 129 spill_start_index_(kMaxInt) { } | 112 spill_start_index_(kMaxInt) {} |
| 130 | 113 |
| 131 | 114 |
| 132 void LiveRange::set_assigned_register(int reg, Zone* zone) { | 115 void LiveRange::set_assigned_register(int reg, Zone* zone) { |
| 133 ASSERT(!HasRegisterAssigned() && !IsSpilled()); | 116 ASSERT(!HasRegisterAssigned() && !IsSpilled()); |
| 134 assigned_register_ = reg; | 117 assigned_register_ = reg; |
| 135 ConvertOperands(zone); | 118 ConvertOperands(zone); |
| 136 } | 119 } |
| 137 | 120 |
| 138 | 121 |
| 139 void LiveRange::MakeSpilled(Zone* zone) { | 122 void LiveRange::MakeSpilled(Zone* zone) { |
| 140 ASSERT(!IsSpilled()); | 123 ASSERT(!IsSpilled()); |
| 141 ASSERT(TopLevel()->HasAllocatedSpillOperand()); | 124 ASSERT(TopLevel()->HasAllocatedSpillOperand()); |
| 142 spilled_ = true; | 125 spilled_ = true; |
| 143 assigned_register_ = kInvalidAssignment; | 126 assigned_register_ = kInvalidAssignment; |
| 144 ConvertOperands(zone); | 127 ConvertOperands(zone); |
| 145 } | 128 } |
| 146 | 129 |
| 147 | 130 |
| 148 bool LiveRange::HasAllocatedSpillOperand() const { | 131 bool LiveRange::HasAllocatedSpillOperand() const { |
| 149 ASSERT(spill_operand_ != NULL); | 132 ASSERT(spill_operand_ != NULL); |
| 150 return !spill_operand_->IsIgnored(); | 133 return !spill_operand_->IsIgnored(); |
| 151 } | 134 } |
| 152 | 135 |
| 153 | 136 |
| 154 void LiveRange::SetSpillOperand(LOperand* operand) { | 137 void LiveRange::SetSpillOperand(InstructionOperand* operand) { |
| 155 ASSERT(!operand->IsUnallocated()); | 138 ASSERT(!operand->IsUnallocated()); |
| 156 ASSERT(spill_operand_ != NULL); | 139 ASSERT(spill_operand_ != NULL); |
| 157 ASSERT(spill_operand_->IsIgnored()); | 140 ASSERT(spill_operand_->IsIgnored()); |
| 158 spill_operand_->ConvertTo(operand->kind(), operand->index()); | 141 spill_operand_->ConvertTo(operand->kind(), operand->index()); |
| 159 } | 142 } |
| 160 | 143 |
| 161 | 144 |
| 162 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { | 145 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
| 163 UsePosition* use_pos = last_processed_use_; | 146 UsePosition* use_pos = last_processed_use_; |
| 164 if (use_pos == NULL) use_pos = first_pos(); | 147 if (use_pos == NULL) use_pos = first_pos(); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 204 bool LiveRange::CanBeSpilled(LifetimePosition pos) { | 187 bool LiveRange::CanBeSpilled(LifetimePosition pos) { |
| 205 // We cannot spill a live range that has a use requiring a register | 188 // We cannot spill a live range that has a use requiring a register |
| 206 // at the current or the immediate next position. | 189 // at the current or the immediate next position. |
| 207 UsePosition* use_pos = NextRegisterPosition(pos); | 190 UsePosition* use_pos = NextRegisterPosition(pos); |
| 208 if (use_pos == NULL) return true; | 191 if (use_pos == NULL) return true; |
| 209 return | 192 return |
| 210 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); | 193 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); |
| 211 } | 194 } |
| 212 | 195 |
| 213 | 196 |
| 214 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { | 197 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) { |
| 215 LOperand* op = NULL; | 198 InstructionOperand* op = NULL; |
| 216 if (HasRegisterAssigned()) { | 199 if (HasRegisterAssigned()) { |
| 217 ASSERT(!IsSpilled()); | 200 ASSERT(!IsSpilled()); |
| 218 switch (Kind()) { | 201 switch (Kind()) { |
| 219 case GENERAL_REGISTERS: | 202 case GENERAL_REGISTERS: |
| 220 op = LRegister::Create(assigned_register(), zone); | 203 op = RegisterOperand::Create(assigned_register(), zone); |
| 221 break; | 204 break; |
| 222 case DOUBLE_REGISTERS: | 205 case DOUBLE_REGISTERS: |
| 223 op = LDoubleRegister::Create(assigned_register(), zone); | 206 op = DoubleRegisterOperand::Create(assigned_register(), zone); |
| 224 break; | 207 break; |
| 225 default: | 208 default: |
| 226 UNREACHABLE(); | 209 UNREACHABLE(); |
| 227 } | 210 } |
| 228 } else if (IsSpilled()) { | 211 } else if (IsSpilled()) { |
| 229 ASSERT(!HasRegisterAssigned()); | 212 ASSERT(!HasRegisterAssigned()); |
| 230 op = TopLevel()->GetSpillOperand(); | 213 op = TopLevel()->GetSpillOperand(); |
| 231 ASSERT(!op->IsUnallocated()); | 214 ASSERT(!op->IsUnallocated()); |
| 232 } else { | 215 } else { |
| 233 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); | 216 UnallocatedOperand* unalloc = |
| 217 new (zone) UnallocatedOperand(UnallocatedOperand::NONE); |
| 234 unalloc->set_virtual_register(id_); | 218 unalloc->set_virtual_register(id_); |
| 235 op = unalloc; | 219 op = unalloc; |
| 236 } | 220 } |
| 237 return op; | 221 return op; |
| 238 } | 222 } |
| 239 | 223 |
| 240 | 224 |
| 241 UseInterval* LiveRange::FirstSearchIntervalForPosition( | 225 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
| 242 LifetimePosition position) const { | 226 LifetimePosition position) const { |
| 243 if (current_interval_ == NULL) return first_interval_; | 227 if (current_interval_ == NULL) return first_interval_; |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 362 if (pos == NULL) return false; | 346 if (pos == NULL) return false; |
| 363 UsePosition* other_pos = other->first_pos(); | 347 UsePosition* other_pos = other->first_pos(); |
| 364 if (other_pos == NULL) return true; | 348 if (other_pos == NULL) return true; |
| 365 return pos->pos().Value() < other_pos->pos().Value(); | 349 return pos->pos().Value() < other_pos->pos().Value(); |
| 366 } | 350 } |
| 367 return start.Value() < other_start.Value(); | 351 return start.Value() < other_start.Value(); |
| 368 } | 352 } |
| 369 | 353 |
| 370 | 354 |
| 371 void LiveRange::ShortenTo(LifetimePosition start) { | 355 void LiveRange::ShortenTo(LifetimePosition start) { |
| 372 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); | 356 RegisterAllocator::TraceAlloc("Shorten live range %d to [%d\n", |
| 357 id_, start.Value()); |
| 373 ASSERT(first_interval_ != NULL); | 358 ASSERT(first_interval_ != NULL); |
| 374 ASSERT(first_interval_->start().Value() <= start.Value()); | 359 ASSERT(first_interval_->start().Value() <= start.Value()); |
| 375 ASSERT(start.Value() < first_interval_->end().Value()); | 360 ASSERT(start.Value() < first_interval_->end().Value()); |
| 376 first_interval_->set_start(start); | 361 first_interval_->set_start(start); |
| 377 } | 362 } |
| 378 | 363 |
| 379 | 364 |
| 380 void LiveRange::EnsureInterval(LifetimePosition start, | 365 void LiveRange::EnsureInterval(LifetimePosition start, |
| 381 LifetimePosition end, | 366 LifetimePosition end, |
| 382 Zone* zone) { | 367 Zone* zone) { |
| 383 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", | 368 RegisterAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", |
| 384 id_, | 369 id_, |
| 385 start.Value(), | 370 start.Value(), |
| 386 end.Value()); | 371 end.Value()); |
| 387 LifetimePosition new_end = end; | 372 LifetimePosition new_end = end; |
| 388 while (first_interval_ != NULL && | 373 while (first_interval_ != NULL && |
| 389 first_interval_->start().Value() <= end.Value()) { | 374 first_interval_->start().Value() <= end.Value()) { |
| 390 if (first_interval_->end().Value() > end.Value()) { | 375 if (first_interval_->end().Value() > end.Value()) { |
| 391 new_end = first_interval_->end(); | 376 new_end = first_interval_->end(); |
| 392 } | 377 } |
| 393 first_interval_ = first_interval_->next(); | 378 first_interval_ = first_interval_->next(); |
| 394 } | 379 } |
| 395 | 380 |
| 396 UseInterval* new_interval = new(zone) UseInterval(start, new_end); | 381 UseInterval* new_interval = new(zone) UseInterval(start, new_end); |
| 397 new_interval->next_ = first_interval_; | 382 new_interval->next_ = first_interval_; |
| 398 first_interval_ = new_interval; | 383 first_interval_ = new_interval; |
| 399 if (new_interval->next() == NULL) { | 384 if (new_interval->next() == NULL) { |
| 400 last_interval_ = new_interval; | 385 last_interval_ = new_interval; |
| 401 } | 386 } |
| 402 } | 387 } |
| 403 | 388 |
| 404 | 389 |
| 405 void LiveRange::AddUseInterval(LifetimePosition start, | 390 void LiveRange::AddUseInterval(LifetimePosition start, |
| 406 LifetimePosition end, | 391 LifetimePosition end, |
| 407 Zone* zone) { | 392 Zone* zone) { |
| 408 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", | 393 RegisterAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", |
| 409 id_, | 394 id_, |
| 410 start.Value(), | 395 start.Value(), |
| 411 end.Value()); | 396 end.Value()); |
| 412 if (first_interval_ == NULL) { | 397 if (first_interval_ == NULL) { |
| 413 UseInterval* interval = new(zone) UseInterval(start, end); | 398 UseInterval* interval = new(zone) UseInterval(start, end); |
| 414 first_interval_ = interval; | 399 first_interval_ = interval; |
| 415 last_interval_ = interval; | 400 last_interval_ = interval; |
| 416 } else { | 401 } else { |
| 417 if (end.Value() == first_interval_->start().Value()) { | 402 if (end.Value() == first_interval_->start().Value()) { |
| 418 first_interval_->set_start(start); | 403 first_interval_->set_start(start); |
| 419 } else if (end.Value() < first_interval_->start().Value()) { | 404 } else if (end.Value() < first_interval_->start().Value()) { |
| 420 UseInterval* interval = new(zone) UseInterval(start, end); | 405 UseInterval* interval = new(zone) UseInterval(start, end); |
| 421 interval->set_next(first_interval_); | 406 interval->set_next(first_interval_); |
| 422 first_interval_ = interval; | 407 first_interval_ = interval; |
| 423 } else { | 408 } else { |
| 424 // Order of instruction's processing (see ProcessInstructions) guarantees | 409 // Order of instruction's processing (see ProcessInstructions) guarantees |
| 425 // that each new use interval either precedes or intersects with | 410 // that each new use interval either precedes or intersects with |
| 426 // last added interval. | 411 // last added interval. |
| 427 ASSERT(start.Value() < first_interval_->end().Value()); | 412 ASSERT(start.Value() < first_interval_->end().Value()); |
| 428 first_interval_->start_ = Min(start, first_interval_->start_); | 413 first_interval_->start_ = Min(start, first_interval_->start_); |
| 429 first_interval_->end_ = Max(end, first_interval_->end_); | 414 first_interval_->end_ = Max(end, first_interval_->end_); |
| 430 } | 415 } |
| 431 } | 416 } |
| 432 } | 417 } |
| 433 | 418 |
| 434 | 419 |
| 435 void LiveRange::AddUsePosition(LifetimePosition pos, | 420 void LiveRange::AddUsePosition(LifetimePosition pos, |
| 436 LOperand* operand, | 421 InstructionOperand* operand, |
| 437 LOperand* hint, | 422 InstructionOperand* hint, Zone* zone) { |
| 438 Zone* zone) { | 423 RegisterAllocator::TraceAlloc("Add to live range %d use position %d\n", |
| 439 LAllocator::TraceAlloc("Add to live range %d use position %d\n", | 424 id_, |
| 440 id_, | 425 pos.Value()); |
| 441 pos.Value()); | |
| 442 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint); | 426 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint); |
| 443 UsePosition* prev_hint = NULL; | 427 UsePosition* prev_hint = NULL; |
| 444 UsePosition* prev = NULL; | 428 UsePosition* prev = NULL; |
| 445 UsePosition* current = first_pos_; | 429 UsePosition* current = first_pos_; |
| 446 while (current != NULL && current->pos().Value() < pos.Value()) { | 430 while (current != NULL && current->pos().Value() < pos.Value()) { |
| 447 prev_hint = current->HasHint() ? current : prev_hint; | 431 prev_hint = current->HasHint() ? current : prev_hint; |
| 448 prev = current; | 432 prev = current; |
| 449 current = current->next(); | 433 current = current->next(); |
| 450 } | 434 } |
| 451 | 435 |
| 452 if (prev == NULL) { | 436 if (prev == NULL) { |
| 453 use_pos->set_next(first_pos_); | 437 use_pos->set_next(first_pos_); |
| 454 first_pos_ = use_pos; | 438 first_pos_ = use_pos; |
| 455 } else { | 439 } else { |
| 456 use_pos->next_ = prev->next_; | 440 use_pos->next_ = prev->next_; |
| 457 prev->next_ = use_pos; | 441 prev->next_ = use_pos; |
| 458 } | 442 } |
| 459 | 443 |
| 460 if (prev_hint == NULL && use_pos->HasHint()) { | 444 if (prev_hint == NULL && use_pos->HasHint()) { |
| 461 current_hint_operand_ = hint; | 445 current_hint_operand_ = hint; |
| 462 } | 446 } |
| 463 } | 447 } |
| 464 | 448 |
| 465 | 449 |
| 466 void LiveRange::ConvertOperands(Zone* zone) { | 450 void LiveRange::ConvertOperands(Zone* zone) { |
| 467 LOperand* op = CreateAssignedOperand(zone); | 451 InstructionOperand* op = CreateAssignedOperand(zone); |
| 468 UsePosition* use_pos = first_pos(); | 452 UsePosition* use_pos = first_pos(); |
| 469 while (use_pos != NULL) { | 453 while (use_pos != NULL) { |
| 470 ASSERT(Start().Value() <= use_pos->pos().Value() && | 454 ASSERT(Start().Value() <= use_pos->pos().Value() && |
| 471 use_pos->pos().Value() <= End().Value()); | 455 use_pos->pos().Value() <= End().Value()); |
| 472 | 456 |
| 473 if (use_pos->HasOperand()) { | 457 if (use_pos->HasOperand()) { |
| 474 ASSERT(op->IsRegister() || op->IsDoubleRegister() || | 458 ASSERT(op->IsRegister() || op->IsDoubleRegister() || |
| 475 !use_pos->RequiresRegister()); | 459 !use_pos->RequiresRegister()); |
| 476 use_pos->operand()->ConvertTo(op->kind(), op->index()); | 460 use_pos->operand()->ConvertTo(op->kind(), op->index()); |
| 477 } | 461 } |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 520 if (a == NULL || a->start().Value() > other->End().Value()) break; | 504 if (a == NULL || a->start().Value() > other->End().Value()) break; |
| 521 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); | 505 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); |
| 522 } else { | 506 } else { |
| 523 b = b->next(); | 507 b = b->next(); |
| 524 } | 508 } |
| 525 } | 509 } |
| 526 return LifetimePosition::Invalid(); | 510 return LifetimePosition::Invalid(); |
| 527 } | 511 } |
| 528 | 512 |
| 529 | 513 |
| 530 LAllocator::LAllocator(int num_values, HGraph* graph) | 514 RegisterAllocator::RegisterAllocator(InstructionSequence* code) |
| 531 : zone_(graph->isolate()), | 515 : zone_(code->isolate()), |
| 532 chunk_(NULL), | 516 code_(code), |
| 533 live_in_sets_(graph->blocks()->length(), zone()), | 517 live_in_sets_(code->BasicBlockCount(), zone()), |
| 534 live_ranges_(num_values * 2, zone()), | 518 live_ranges_(code->VirtualRegisterCount() * 2, zone()), |
| 535 fixed_live_ranges_(NULL), | 519 fixed_live_ranges_(NULL), |
| 536 fixed_double_live_ranges_(NULL), | 520 fixed_double_live_ranges_(NULL), |
| 537 unhandled_live_ranges_(num_values * 2, zone()), | 521 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, zone()), |
| 538 active_live_ranges_(8, zone()), | 522 active_live_ranges_(8, zone()), |
| 539 inactive_live_ranges_(8, zone()), | 523 inactive_live_ranges_(8, zone()), |
| 540 reusable_slots_(8, zone()), | 524 reusable_slots_(8, zone()), |
| 541 next_virtual_register_(num_values), | |
| 542 first_artificial_register_(num_values), | |
| 543 mode_(UNALLOCATED_REGISTERS), | 525 mode_(UNALLOCATED_REGISTERS), |
| 544 num_registers_(-1), | 526 num_registers_(-1), |
| 545 graph_(graph), | 527 allocation_ok_(true) {} |
| 546 has_osr_entry_(false), | |
| 547 allocation_ok_(true) { } | |
| 548 | 528 |
| 549 | 529 |
| 550 void LAllocator::InitializeLivenessAnalysis() { | 530 void RegisterAllocator::InitializeLivenessAnalysis() { |
| 551 // Initialize the live_in sets for each block to NULL. | 531 // Initialize the live_in sets for each block to NULL. |
| 552 int block_count = graph_->blocks()->length(); | 532 int block_count = code()->BasicBlockCount(); |
| 553 live_in_sets_.Initialize(block_count, zone()); | 533 live_in_sets_.Initialize(block_count, zone()); |
| 554 live_in_sets_.AddBlock(NULL, block_count, zone()); | 534 live_in_sets_.AddBlock(NULL, block_count, zone()); |
| 555 } | 535 } |
| 556 | 536 |
| 557 | 537 |
| 558 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { | 538 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) { |
| 559 // Compute live out for the given block, except not including backward | 539 // Compute live out for the given block, except not including backward |
| 560 // successor edges. | 540 // successor edges. |
| 561 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone()); | 541 BitVector* live_out = |
| 542 new (zone()) BitVector(code()->VirtualRegisterCount(), zone()); |
| 562 | 543 |
| 563 // Process all successor blocks. | 544 // Process all successor blocks. |
| 564 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { | 545 BasicBlock::Successors successors = block->successors(); |
| 546 for (BasicBlock::Successors::iterator i = successors.begin(); |
| 547 i != successors.end(); ++i) { |
| 565 // Add values live on entry to the successor. Note the successor's | 548 // Add values live on entry to the successor. Note the successor's |
| 566 // live_in will not be computed yet for backwards edges. | 549 // live_in will not be computed yet for backwards edges. |
| 567 HBasicBlock* successor = it.Current(); | 550 BasicBlock* successor = *i; |
| 568 BitVector* live_in = live_in_sets_[successor->block_id()]; | 551 BitVector* live_in = live_in_sets_[successor->rpo_number_]; |
| 569 if (live_in != NULL) live_out->Union(*live_in); | 552 if (live_in != NULL) live_out->Union(*live_in); |
| 570 | 553 |
| 571 // All phi input operands corresponding to this successor edge are live | 554 // All phi input operands corresponding to this successor edge are live |
| 572 // out from this block. | 555 // out from this block. |
| 573 int index = successor->PredecessorIndexOf(block); | 556 int index = successor->PredecessorIndexOf(block); |
| 574 const ZoneList<HPhi*>* phis = successor->phis(); | 557 ASSERT(index >= 0); |
| 575 for (int i = 0; i < phis->length(); ++i) { | 558 ASSERT(index < static_cast<int>(successor->PredecessorCount())); |
| 576 HPhi* phi = phis->at(i); | 559 for (BasicBlock::const_iterator j = successor->begin(); |
| 577 if (!phi->OperandAt(index)->IsConstant()) { | 560 j != successor->end(); ++j) { |
| 578 live_out->Add(phi->OperandAt(index)->id()); | 561 Node* phi = *j; |
| 579 } | 562 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 563 Node* input = phi->InputAt(index); |
| 564 live_out->Add(input->id()); |
| 580 } | 565 } |
| 581 } | 566 } |
| 582 | 567 |
| 583 return live_out; | 568 return live_out; |
| 584 } | 569 } |
| 585 | 570 |
| 586 | 571 |
| 587 void LAllocator::AddInitialIntervals(HBasicBlock* block, | 572 void RegisterAllocator::AddInitialIntervals(BasicBlock* block, |
| 588 BitVector* live_out) { | 573 BitVector* live_out) { |
| 589 // Add an interval that includes the entire block to the live range for | 574 // Add an interval that includes the entire block to the live range for |
| 590 // each live_out value. | 575 // each live_out value. |
| 591 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 576 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 592 block->first_instruction_index()); | 577 block->first_instruction_index()); |
| 593 LifetimePosition end = LifetimePosition::FromInstructionIndex( | 578 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 594 block->last_instruction_index()).NextInstruction(); | 579 block->last_instruction_index()).NextInstruction(); |
| 595 BitVector::Iterator iterator(live_out); | 580 BitVector::Iterator iterator(live_out); |
| 596 while (!iterator.Done()) { | 581 while (!iterator.Done()) { |
| 597 int operand_index = iterator.Current(); | 582 int operand_index = iterator.Current(); |
| 598 LiveRange* range = LiveRangeFor(operand_index); | 583 LiveRange* range = LiveRangeFor(operand_index); |
| 599 range->AddUseInterval(start, end, zone()); | 584 range->AddUseInterval(start, end, zone()); |
| 600 iterator.Advance(); | 585 iterator.Advance(); |
| 601 } | 586 } |
| 602 } | 587 } |
| 603 | 588 |
| 604 | 589 |
| 605 int LAllocator::FixedDoubleLiveRangeID(int index) { | 590 int RegisterAllocator::FixedDoubleLiveRangeID(int index) { |
| 606 return -index - 1 - Register::kMaxNumAllocatableRegisters; | 591 return -index - 1 - Register::kMaxNumAllocatableRegisters; |
| 607 } | 592 } |
| 608 | 593 |
| 609 | 594 |
| 610 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, | 595 InstructionOperand* RegisterAllocator::AllocateFixed( |
| 611 int pos, | 596 UnallocatedOperand* operand, int pos, bool is_tagged) { |
| 612 bool is_tagged) { | |
| 613 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); | 597 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); |
| 614 ASSERT(operand->HasFixedPolicy()); | 598 ASSERT(operand->HasFixedPolicy()); |
| 615 if (operand->HasFixedSlotPolicy()) { | 599 if (operand->HasFixedSlotPolicy()) { |
| 616 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); | 600 operand->ConvertTo(InstructionOperand::STACK_SLOT, |
| 601 operand->fixed_slot_index()); |
| 617 } else if (operand->HasFixedRegisterPolicy()) { | 602 } else if (operand->HasFixedRegisterPolicy()) { |
| 618 int reg_index = operand->fixed_register_index(); | 603 int reg_index = operand->fixed_register_index(); |
| 619 operand->ConvertTo(LOperand::REGISTER, reg_index); | 604 operand->ConvertTo(InstructionOperand::REGISTER, reg_index); |
| 620 } else if (operand->HasFixedDoubleRegisterPolicy()) { | 605 } else if (operand->HasFixedDoubleRegisterPolicy()) { |
| 621 int reg_index = operand->fixed_register_index(); | 606 int reg_index = operand->fixed_register_index(); |
| 622 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); | 607 operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index); |
| 623 } else { | 608 } else { |
| 624 UNREACHABLE(); | 609 UNREACHABLE(); |
| 625 } | 610 } |
| 626 if (is_tagged) { | 611 if (is_tagged) { |
| 627 TraceAlloc("Fixed reg is tagged at %d\n", pos); | 612 TraceAlloc("Fixed reg is tagged at %d\n", pos); |
| 628 LInstruction* instr = InstructionAt(pos); | 613 Instruction* instr = InstructionAt(pos); |
| 629 if (instr->HasPointerMap()) { | 614 if (instr->HasPointerMap()) { |
| 630 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); | 615 instr->pointer_map()->RecordPointer(operand, code_zone()); |
| 631 } | 616 } |
| 632 } | 617 } |
| 633 return operand; | 618 return operand; |
| 634 } | 619 } |
| 635 | 620 |
| 636 | 621 |
| 637 LiveRange* LAllocator::FixedLiveRangeFor(int index) { | 622 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) { |
| 638 ASSERT(index < Register::kMaxNumAllocatableRegisters); | 623 ASSERT(index < Register::kMaxNumAllocatableRegisters); |
| 639 LiveRange* result = fixed_live_ranges_[index]; | 624 LiveRange* result = fixed_live_ranges_[index]; |
| 640 if (result == NULL) { | 625 if (result == NULL) { |
| 641 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); | 626 // TODO(titzer): add a utility method to allocate a new LiveRange: |
| 627 // The LiveRange object itself can go in this zone, but the |
| 628 // InstructionOperand needs |
| 629 // to go in the code zone, since it may survive register allocation. |
| 630 result = new(zone()) LiveRange(FixedLiveRangeID(index), code_zone()); |
| 642 ASSERT(result->IsFixed()); | 631 ASSERT(result->IsFixed()); |
| 643 result->kind_ = GENERAL_REGISTERS; | 632 result->kind_ = GENERAL_REGISTERS; |
| 644 SetLiveRangeAssignedRegister(result, index); | 633 SetLiveRangeAssignedRegister(result, index); |
| 645 fixed_live_ranges_[index] = result; | 634 fixed_live_ranges_[index] = result; |
| 646 } | 635 } |
| 647 return result; | 636 return result; |
| 648 } | 637 } |
| 649 | 638 |
| 650 | 639 |
| 651 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { | 640 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) { |
| 652 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); | 641 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); |
| 653 LiveRange* result = fixed_double_live_ranges_[index]; | 642 LiveRange* result = fixed_double_live_ranges_[index]; |
| 654 if (result == NULL) { | 643 if (result == NULL) { |
| 655 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), | 644 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), code_zone()); |
| 656 chunk()->zone()); | |
| 657 ASSERT(result->IsFixed()); | 645 ASSERT(result->IsFixed()); |
| 658 result->kind_ = DOUBLE_REGISTERS; | 646 result->kind_ = DOUBLE_REGISTERS; |
| 659 SetLiveRangeAssignedRegister(result, index); | 647 SetLiveRangeAssignedRegister(result, index); |
| 660 fixed_double_live_ranges_[index] = result; | 648 fixed_double_live_ranges_[index] = result; |
| 661 } | 649 } |
| 662 return result; | 650 return result; |
| 663 } | 651 } |
| 664 | 652 |
| 665 | 653 |
| 666 LiveRange* LAllocator::LiveRangeFor(int index) { | 654 LiveRange* RegisterAllocator::LiveRangeFor(int index) { |
| 667 if (index >= live_ranges_.length()) { | 655 if (index >= live_ranges_.length()) { |
| 668 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); | 656 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); |
| 669 } | 657 } |
| 670 LiveRange* result = live_ranges_[index]; | 658 LiveRange* result = live_ranges_[index]; |
| 671 if (result == NULL) { | 659 if (result == NULL) { |
| 672 result = new(zone()) LiveRange(index, chunk()->zone()); | 660 result = new(zone()) LiveRange(index, code_zone()); |
| 673 live_ranges_[index] = result; | 661 live_ranges_[index] = result; |
| 674 } | 662 } |
| 675 return result; | 663 return result; |
| 676 } | 664 } |
| 677 | 665 |
| 678 | 666 |
| 679 LGap* LAllocator::GetLastGap(HBasicBlock* block) { | 667 GapInstruction* RegisterAllocator::GetLastGap(BasicBlock* block) { |
| 680 int last_instruction = block->last_instruction_index(); | 668 int last_instruction = block->last_instruction_index(); |
| 681 int index = chunk_->NearestGapPos(last_instruction); | 669 return code()->GapAt(last_instruction - 1); |
| 682 return GapAt(index); | |
| 683 } | 670 } |
| 684 | 671 |
| 685 | 672 |
| 686 HPhi* LAllocator::LookupPhi(LOperand* operand) const { | 673 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) { |
| 687 if (!operand->IsUnallocated()) return NULL; | |
| 688 int index = LUnallocated::cast(operand)->virtual_register(); | |
| 689 HValue* instr = graph_->LookupValue(index); | |
| 690 if (instr != NULL && instr->IsPhi()) { | |
| 691 return HPhi::cast(instr); | |
| 692 } | |
| 693 return NULL; | |
| 694 } | |
| 695 | |
| 696 | |
| 697 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { | |
| 698 if (operand->IsUnallocated()) { | 674 if (operand->IsUnallocated()) { |
| 699 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); | 675 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); |
| 700 } else if (operand->IsRegister()) { | 676 } else if (operand->IsRegister()) { |
| 701 return FixedLiveRangeFor(operand->index()); | 677 return FixedLiveRangeFor(operand->index()); |
| 702 } else if (operand->IsDoubleRegister()) { | 678 } else if (operand->IsDoubleRegister()) { |
| 703 return FixedDoubleLiveRangeFor(operand->index()); | 679 return FixedDoubleLiveRangeFor(operand->index()); |
| 704 } else { | 680 } else { |
| 705 return NULL; | 681 return NULL; |
| 706 } | 682 } |
| 707 } | 683 } |
| 708 | 684 |
| 709 | 685 |
| 710 void LAllocator::Define(LifetimePosition position, | 686 void RegisterAllocator::Define(LifetimePosition position, |
| 711 LOperand* operand, | 687 InstructionOperand* operand, |
| 712 LOperand* hint) { | 688 InstructionOperand* hint) { |
| 713 LiveRange* range = LiveRangeFor(operand); | 689 LiveRange* range = LiveRangeFor(operand); |
| 714 if (range == NULL) return; | 690 if (range == NULL) return; |
| 715 | 691 |
| 716 if (range->IsEmpty() || range->Start().Value() > position.Value()) { | 692 if (range->IsEmpty() || range->Start().Value() > position.Value()) { |
| 717 // Can happen if there is a definition without use. | 693 // Can happen if there is a definition without use. |
| 718 range->AddUseInterval(position, position.NextInstruction(), zone()); | 694 range->AddUseInterval(position, position.NextInstruction(), zone()); |
| 719 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); | 695 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); |
| 720 } else { | 696 } else { |
| 721 range->ShortenTo(position); | 697 range->ShortenTo(position); |
| 722 } | 698 } |
| 723 | 699 |
| 724 if (operand->IsUnallocated()) { | 700 if (operand->IsUnallocated()) { |
| 725 LUnallocated* unalloc_operand = LUnallocated::cast(operand); | 701 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 726 range->AddUsePosition(position, unalloc_operand, hint, zone()); | 702 range->AddUsePosition(position, unalloc_operand, hint, zone()); |
| 727 } | 703 } |
| 728 } | 704 } |
| 729 | 705 |
| 730 | 706 |
| 731 void LAllocator::Use(LifetimePosition block_start, | 707 void RegisterAllocator::Use(LifetimePosition block_start, |
| 732 LifetimePosition position, | 708 LifetimePosition position, |
| 733 LOperand* operand, | 709 InstructionOperand* operand, |
| 734 LOperand* hint) { | 710 InstructionOperand* hint) { |
| 735 LiveRange* range = LiveRangeFor(operand); | 711 LiveRange* range = LiveRangeFor(operand); |
| 736 if (range == NULL) return; | 712 if (range == NULL) return; |
| 737 if (operand->IsUnallocated()) { | 713 if (operand->IsUnallocated()) { |
| 738 LUnallocated* unalloc_operand = LUnallocated::cast(operand); | 714 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); |
| 739 range->AddUsePosition(position, unalloc_operand, hint, zone()); | 715 range->AddUsePosition(position, unalloc_operand, hint, zone()); |
| 740 } | 716 } |
| 741 range->AddUseInterval(block_start, position, zone()); | 717 range->AddUseInterval(block_start, position, zone()); |
| 742 } | 718 } |
| 743 | 719 |
| 744 | 720 |
| 745 void LAllocator::AddConstraintsGapMove(int index, | 721 void RegisterAllocator::AddConstraintsGapMove(int index, |
| 746 LOperand* from, | 722 InstructionOperand* from, |
| 747 LOperand* to) { | 723 InstructionOperand* to) { |
| 748 LGap* gap = GapAt(index); | 724 GapInstruction* gap = code()->GapAt(index); |
| 749 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | 725 ParallelMove* move = |
| 750 chunk()->zone()); | 726 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 751 if (from->IsUnallocated()) { | 727 if (from->IsUnallocated()) { |
| 752 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | 728 const ZoneList<MoveOperands>* move_operands = move->move_operands(); |
| 753 for (int i = 0; i < move_operands->length(); ++i) { | 729 for (int i = 0; i < move_operands->length(); ++i) { |
| 754 LMoveOperands cur = move_operands->at(i); | 730 MoveOperands cur = move_operands->at(i); |
| 755 LOperand* cur_to = cur.destination(); | 731 InstructionOperand* cur_to = cur.destination(); |
| 756 if (cur_to->IsUnallocated()) { | 732 if (cur_to->IsUnallocated()) { |
| 757 if (LUnallocated::cast(cur_to)->virtual_register() == | 733 if (UnallocatedOperand::cast(cur_to)->virtual_register() == |
| 758 LUnallocated::cast(from)->virtual_register()) { | 734 UnallocatedOperand::cast(from)->virtual_register()) { |
| 759 move->AddMove(cur.source(), to, chunk()->zone()); | 735 move->AddMove(cur.source(), to, code_zone()); |
| 760 return; | 736 return; |
| 761 } | 737 } |
| 762 } | 738 } |
| 763 } | 739 } |
| 764 } | 740 } |
| 765 move->AddMove(from, to, chunk()->zone()); | 741 move->AddMove(from, to, code_zone()); |
| 766 } | 742 } |
| 767 | 743 |
| 768 | 744 |
| 769 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { | 745 void RegisterAllocator::MeetRegisterConstraints(BasicBlock* block) { |
| 770 int start = block->first_instruction_index(); | 746 int start = block->first_instruction_index(); |
| 771 int end = block->last_instruction_index(); | 747 int end = block->last_instruction_index(); |
| 772 if (start == -1) return; | 748 ASSERT_NE(-1, start); |
| 773 for (int i = start; i <= end; ++i) { | 749 for (int i = start; i <= end; ++i) { |
| 774 if (IsGapAt(i)) { | 750 if (code()->IsGapAt(i)) { |
| 775 LInstruction* instr = NULL; | 751 Instruction* instr = NULL; |
| 776 LInstruction* prev_instr = NULL; | 752 Instruction* prev_instr = NULL; |
| 777 if (i < end) instr = InstructionAt(i + 1); | 753 if (i < end) instr = InstructionAt(i + 1); |
| 778 if (i > start) prev_instr = InstructionAt(i - 1); | 754 if (i > start) prev_instr = InstructionAt(i - 1); |
| 779 MeetConstraintsBetween(prev_instr, instr, i); | 755 MeetConstraintsBetween(prev_instr, instr, i); |
| 780 if (!AllocationOk()) return; | 756 if (!AllocationOk()) return; |
| 781 } | 757 } |
| 782 } | 758 } |
| 783 } | 759 } |
| 784 | 760 |
| 785 | 761 |
| 786 void LAllocator::MeetConstraintsBetween(LInstruction* first, | 762 void RegisterAllocator::MeetConstraintsBetween(Instruction* first, |
| 787 LInstruction* second, | 763 Instruction* second, |
| 788 int gap_index) { | 764 int gap_index) { |
| 789 // Handle fixed temporaries. | |
| 790 if (first != NULL) { | 765 if (first != NULL) { |
| 791 for (TempIterator it(first); !it.Done(); it.Advance()) { | 766 // Handle fixed temporaries. |
| 792 LUnallocated* temp = LUnallocated::cast(it.Current()); | 767 for (size_t i = 0; i < first->TempCount(); i++) { |
| 768 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i)); |
| 793 if (temp->HasFixedPolicy()) { | 769 if (temp->HasFixedPolicy()) { |
| 794 AllocateFixed(temp, gap_index - 1, false); | 770 AllocateFixed(temp, gap_index - 1, false); |
| 795 } | 771 } |
| 796 } | 772 } |
| 797 } | 773 |
| 798 | 774 // Handle constant/fixed output operands. |
| 799 // Handle fixed output operand. | 775 for (size_t i = 0; i < first->OutputCount(); i++) { |
| 800 if (first != NULL && first->Output() != NULL) { | 776 InstructionOperand* output = first->OutputAt(i); |
| 801 LUnallocated* first_output = LUnallocated::cast(first->Output()); | 777 if (output->IsConstant()) { |
| 802 LiveRange* range = LiveRangeFor(first_output->virtual_register()); | 778 int output_vreg = output->index(); |
| 803 bool assigned = false; | 779 LiveRange* range = LiveRangeFor(output_vreg); |
| 804 if (first_output->HasFixedPolicy()) { | |
| 805 LUnallocated* output_copy = first_output->CopyUnconstrained( | |
| 806 chunk()->zone()); | |
| 807 bool is_tagged = HasTaggedValue(first_output->virtual_register()); | |
| 808 AllocateFixed(first_output, gap_index, is_tagged); | |
| 809 | |
| 810 // This value is produced on the stack, we never need to spill it. | |
| 811 if (first_output->IsStackSlot()) { | |
| 812 range->SetSpillOperand(first_output); | |
| 813 range->SetSpillStartIndex(gap_index - 1); | 780 range->SetSpillStartIndex(gap_index - 1); |
| 814 assigned = true; | 781 range->SetSpillOperand(output); |
| 815 } | 782 } else { |
| 816 chunk_->AddGapMove(gap_index, first_output, output_copy); | 783 UnallocatedOperand* first_output = UnallocatedOperand::cast(output); |
| 817 } | 784 LiveRange* range = LiveRangeFor(first_output->virtual_register()); |
| 818 | 785 bool assigned = false; |
| 819 if (!assigned) { | 786 if (first_output->HasFixedPolicy()) { |
| 820 range->SetSpillStartIndex(gap_index); | 787 UnallocatedOperand* output_copy = |
| 821 | 788 first_output->CopyUnconstrained(code_zone()); |
| 822 // This move to spill operand is not a real use. Liveness analysis | 789 bool is_tagged = HasTaggedValue(first_output->virtual_register()); |
| 823 // and splitting of live ranges do not account for it. | 790 AllocateFixed(first_output, gap_index, is_tagged); |
| 824 // Thus it should be inserted to a lifetime position corresponding to | 791 |
| 825 // the instruction end. | 792 // This value is produced on the stack, we never need to spill it. |
| 826 LGap* gap = GapAt(gap_index); | 793 if (first_output->IsStackSlot()) { |
| 827 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, | 794 range->SetSpillOperand(first_output); |
| 828 chunk()->zone()); | 795 range->SetSpillStartIndex(gap_index - 1); |
| 829 move->AddMove(first_output, range->GetSpillOperand(), | 796 assigned = true; |
| 830 chunk()->zone()); | 797 } |
| 831 } | 798 code()->AddGapMove(gap_index, first_output, output_copy); |
| 832 } | 799 } |
| 833 | 800 |
| 834 // Handle fixed input operands of second instruction. | 801 if (!assigned) { |
| 802 range->SetSpillStartIndex(gap_index); |
| 803 |
| 804 // This move to spill operand is not a real use. Liveness analysis |
| 805 // and splitting of live ranges do not account for it. |
| 806 // Thus it should be inserted to a lifetime position corresponding to |
| 807 // the instruction end. |
| 808 GapInstruction* gap = code()->GapAt(gap_index); |
| 809 ParallelMove* move = |
| 810 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone()); |
| 811 move->AddMove(first_output, range->GetSpillOperand(), code_zone()); |
| 812 } |
| 813 } |
| 814 } |
| 815 } |
| 816 |
| 835 if (second != NULL) { | 817 if (second != NULL) { |
| 836 for (UseIterator it(second); !it.Done(); it.Advance()) { | 818 // Handle fixed input operands of second instruction. |
| 837 LUnallocated* cur_input = LUnallocated::cast(it.Current()); | 819 for (size_t i = 0; i < second->InputCount(); i++) { |
| 820 InstructionOperand* input = second->InputAt(i); |
| 821 if (input->IsImmediate()) continue; // Ignore immediates. |
| 822 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input); |
| 838 if (cur_input->HasFixedPolicy()) { | 823 if (cur_input->HasFixedPolicy()) { |
| 839 LUnallocated* input_copy = cur_input->CopyUnconstrained( | 824 UnallocatedOperand* input_copy = |
| 840 chunk()->zone()); | 825 cur_input->CopyUnconstrained(code_zone()); |
| 841 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); | 826 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); |
| 842 AllocateFixed(cur_input, gap_index + 1, is_tagged); | 827 AllocateFixed(cur_input, gap_index + 1, is_tagged); |
| 843 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 828 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 844 } else if (cur_input->HasWritableRegisterPolicy()) { | 829 } |
| 845 // The live range of writable input registers always goes until the end | 830 } |
| 846 // of the instruction. | 831 |
| 847 ASSERT(!cur_input->IsUsedAtStart()); | 832 // Handle "output same as input" for second instruction. |
| 848 | 833 for (size_t i = 0; i < second->OutputCount(); i++) { |
| 849 LUnallocated* input_copy = cur_input->CopyUnconstrained( | 834 InstructionOperand* output = second->Output(); |
| 850 chunk()->zone()); | 835 if (!output->IsUnallocated()) continue; |
| 851 int vreg = GetVirtualRegister(); | 836 UnallocatedOperand* second_output = UnallocatedOperand::cast(output); |
| 852 if (!AllocationOk()) return; | 837 if (second_output->HasSameAsInputPolicy()) { |
| 853 cur_input->set_virtual_register(vreg); | 838 ASSERT(second->OutputCount() == 1); // Only valid for one output. |
| 854 | 839 UnallocatedOperand* cur_input = |
| 855 if (RequiredRegisterKind(input_copy->virtual_register()) == | 840 UnallocatedOperand::cast(second->InputAt(0)); |
| 856 DOUBLE_REGISTERS) { | 841 int output_vreg = second_output->virtual_register(); |
| 857 double_artificial_registers_.Add( | 842 int input_vreg = cur_input->virtual_register(); |
| 858 cur_input->virtual_register() - first_artificial_register_, | 843 |
| 859 zone()); | 844 UnallocatedOperand* input_copy = |
| 860 } | 845 cur_input->CopyUnconstrained(code_zone()); |
| 861 | 846 cur_input->set_virtual_register(second_output->virtual_register()); |
| 862 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 847 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 863 } | 848 |
| 864 } | 849 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { |
| 865 } | 850 int index = gap_index + 1; |
| 866 | 851 Instruction* instr = InstructionAt(index); |
| 867 // Handle "output same as input" for second instruction. | 852 if (instr->HasPointerMap()) { |
| 868 if (second != NULL && second->Output() != NULL) { | 853 instr->pointer_map()->RecordPointer(input_copy, code_zone()); |
| 869 LUnallocated* second_output = LUnallocated::cast(second->Output()); | 854 } |
| 870 if (second_output->HasSameAsInputPolicy()) { | 855 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { |
| 871 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); | 856 // The input is assumed to immediately have a tagged representation, |
| 872 int output_vreg = second_output->virtual_register(); | 857 // before the pointer map can be used. I.e. the pointer map at the |
| 873 int input_vreg = cur_input->virtual_register(); | 858 // instruction will include the output operand (whose value at the |
| 874 | 859 // beginning of the instruction is equal to the input operand). If |
| 875 LUnallocated* input_copy = cur_input->CopyUnconstrained( | 860 // this is not desired, then the pointer map at this instruction needs |
| 876 chunk()->zone()); | 861 // to be adjusted manually. |
| 877 cur_input->set_virtual_register(second_output->virtual_register()); | 862 } |
| 878 AddConstraintsGapMove(gap_index, input_copy, cur_input); | 863 } |
| 879 | 864 } |
| 880 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { | 865 } |
| 881 int index = gap_index + 1; | 866 } |
| 882 LInstruction* instr = InstructionAt(index); | 867 |
| 883 if (instr->HasPointerMap()) { | 868 |
| 884 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone()); | 869 bool RegisterAllocator::IsOutputRegisterOf( |
| 885 } | 870 Instruction* instr, int index) { |
| 886 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { | 871 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 887 // The input is assumed to immediately have a tagged representation, | 872 InstructionOperand* output = instr->OutputAt(i); |
| 888 // before the pointer map can be used. I.e. the pointer map at the | 873 if (output->IsRegister() && output->index() == index) return true; |
| 889 // instruction will include the output operand (whose value at the | 874 } |
| 890 // beginning of the instruction is equal to the input operand). If | 875 return false; |
| 891 // this is not desired, then the pointer map at this instruction needs | 876 } |
| 892 // to be adjusted manually. | 877 |
| 893 } | 878 |
| 894 } | 879 bool RegisterAllocator::IsOutputDoubleRegisterOf( |
| 895 } | 880 Instruction* instr, int index) { |
| 896 } | 881 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 897 | 882 InstructionOperand* output = instr->OutputAt(i); |
| 898 | 883 if (output->IsDoubleRegister() && output->index() == index) return true; |
| 899 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { | 884 } |
| 885 return false; |
| 886 } |
| 887 |
| 888 |
| 889 void RegisterAllocator::ProcessInstructions(BasicBlock* block, |
| 890 BitVector* live) { |
| 900 int block_start = block->first_instruction_index(); | 891 int block_start = block->first_instruction_index(); |
| 901 int index = block->last_instruction_index(); | |
| 902 | 892 |
| 903 LifetimePosition block_start_position = | 893 LifetimePosition block_start_position = |
| 904 LifetimePosition::FromInstructionIndex(block_start); | 894 LifetimePosition::FromInstructionIndex(block_start); |
| 905 | 895 |
| 906 while (index >= block_start) { | 896 for (int index = block->last_instruction_index(); |
| 897 index >= block_start; |
| 898 index--) { |
| 907 LifetimePosition curr_position = | 899 LifetimePosition curr_position = |
| 908 LifetimePosition::FromInstructionIndex(index); | 900 LifetimePosition::FromInstructionIndex(index); |
| 909 | 901 |
| 910 if (IsGapAt(index)) { | 902 Instruction* instr = InstructionAt(index); |
| 911 // We have a gap at this position. | 903 ASSERT(instr != NULL); |
| 912 LGap* gap = GapAt(index); | 904 if (instr->IsGapMoves()) { |
| 913 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | 905 // Process the moves of the gap instruction, making their sources live. |
| 914 chunk()->zone()); | 906 GapInstruction* gap = code()->GapAt(index); |
| 915 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); | 907 |
| 908 // TODO(titzer): no need to create the parallel move if it doesn't exist. |
| 909 ParallelMove* move = |
| 910 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 911 const ZoneList<MoveOperands>* move_operands = move->move_operands(); |
| 916 for (int i = 0; i < move_operands->length(); ++i) { | 912 for (int i = 0; i < move_operands->length(); ++i) { |
| 917 LMoveOperands* cur = &move_operands->at(i); | 913 MoveOperands* cur = &move_operands->at(i); |
| 918 if (cur->IsIgnored()) continue; | 914 if (cur->IsIgnored()) continue; |
| 919 LOperand* from = cur->source(); | 915 InstructionOperand* from = cur->source(); |
| 920 LOperand* to = cur->destination(); | 916 InstructionOperand* to = cur->destination(); |
| 921 HPhi* phi = LookupPhi(to); | 917 InstructionOperand* hint = to; |
| 922 LOperand* hint = to; | 918 if (to->IsUnallocated()) { |
| 923 if (phi != NULL) { | 919 int to_vreg = UnallocatedOperand::cast(to)->virtual_register(); |
| 924 // This is a phi resolving move. | 920 LiveRange* to_range = LiveRangeFor(to_vreg); |
| 925 if (!phi->block()->IsLoopHeader()) { | 921 if (to_range->is_phi()) { |
| 926 hint = LiveRangeFor(phi->id())->current_hint_operand(); | 922 if (to_range->is_non_loop_phi()) { |
| 927 } | 923 hint = to_range->current_hint_operand(); |
| 928 } else { | 924 } |
| 929 if (to->IsUnallocated()) { | 925 } else { |
| 930 if (live->Contains(LUnallocated::cast(to)->virtual_register())) { | 926 if (live->Contains(to_vreg)) { |
| 931 Define(curr_position, to, from); | 927 Define(curr_position, to, from); |
| 932 live->Remove(LUnallocated::cast(to)->virtual_register()); | 928 live->Remove(to_vreg); |
| 933 } else { | 929 } else { |
| 934 cur->Eliminate(); | 930 cur->Eliminate(); |
| 935 continue; | 931 continue; |
| 936 } | 932 } |
| 937 } else { | 933 } |
| 938 Define(curr_position, to, from); | 934 } else { |
| 939 } | 935 Define(curr_position, to, from); |
| 940 } | 936 } |
| 941 Use(block_start_position, curr_position, from, hint); | 937 Use(block_start_position, curr_position, from, hint); |
| 942 if (from->IsUnallocated()) { | 938 if (from->IsUnallocated()) { |
| 943 live->Add(LUnallocated::cast(from)->virtual_register()); | 939 live->Add(UnallocatedOperand::cast(from)->virtual_register()); |
| 944 } | 940 } |
| 945 } | 941 } |
| 946 } else { | 942 } else { |
| 947 ASSERT(!IsGapAt(index)); | 943 // Process output, inputs, and temps of this non-gap instruction. |
| 948 LInstruction* instr = InstructionAt(index); | 944 for (size_t i = 0; i < instr->OutputCount(); i++) { |
| 949 | 945 InstructionOperand* output = instr->OutputAt(i); |
| 950 if (instr != NULL) { | 946 if (output->IsUnallocated()) { |
| 951 LOperand* output = instr->Output(); | 947 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); |
| 952 if (output != NULL) { | 948 live->Remove(out_vreg); |
| 953 if (output->IsUnallocated()) { | 949 } else if (output->IsConstant()) { |
| 954 live->Remove(LUnallocated::cast(output)->virtual_register()); | 950 int out_vreg = output->index(); |
| 955 } | 951 live->Remove(out_vreg); |
| 956 Define(curr_position, output, NULL); | 952 } |
| 957 } | 953 Define(curr_position, output, NULL); |
| 958 | 954 } |
| 959 if (instr->ClobbersRegisters()) { | 955 |
| 960 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) { | 956 if (instr->ClobbersRegisters()) { |
| 961 if (output == NULL || !output->IsRegister() || | 957 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) { |
| 962 output->index() != i) { | 958 if (!IsOutputRegisterOf(instr, i)) { |
| 963 LiveRange* range = FixedLiveRangeFor(i); | 959 LiveRange* range = FixedLiveRangeFor(i); |
| 964 range->AddUseInterval(curr_position, | 960 range->AddUseInterval(curr_position, |
| 965 curr_position.InstructionEnd(), | 961 curr_position.InstructionEnd(), |
| 966 zone()); | 962 zone()); |
| 963 } |
| 964 } |
| 965 } |
| 966 |
| 967 if (instr->ClobbersDoubleRegisters()) { |
| 968 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { |
| 969 if (!IsOutputDoubleRegisterOf(instr, i)) { |
| 970 LiveRange* range = FixedDoubleLiveRangeFor(i); |
| 971 range->AddUseInterval(curr_position, |
| 972 curr_position.InstructionEnd(), |
| 973 zone()); |
| 974 } |
| 975 } |
| 976 } |
| 977 |
| 978 for (size_t i = 0; i < instr->InputCount(); i++) { |
| 979 InstructionOperand* input = instr->InputAt(i); |
| 980 if (input->IsImmediate()) continue; // Ignore immediates. |
| 981 LifetimePosition use_pos; |
| 982 if (input->IsUnallocated() && |
| 983 UnallocatedOperand::cast(input)->IsUsedAtStart()) { |
| 984 use_pos = curr_position; |
| 985 } else { |
| 986 use_pos = curr_position.InstructionEnd(); |
| 987 } |
| 988 |
| 989 Use(block_start_position, use_pos, input, NULL); |
| 990 if (input->IsUnallocated()) { |
| 991 live->Add(UnallocatedOperand::cast(input)->virtual_register()); |
| 992 } |
| 993 } |
| 994 |
| 995 for (size_t i = 0; i < instr->TempCount(); i++) { |
| 996 InstructionOperand* temp = instr->TempAt(i); |
| 997 if (instr->ClobbersTemps()) { |
| 998 if (temp->IsRegister()) continue; |
| 999 if (temp->IsUnallocated()) { |
| 1000 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); |
| 1001 if (temp_unalloc->HasFixedPolicy()) { |
| 1002 continue; |
| 967 } | 1003 } |
| 968 } | 1004 } |
| 969 } | 1005 } |
| 970 | 1006 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); |
| 971 if (instr->ClobbersDoubleRegisters(isolate())) { | 1007 Define(curr_position, temp, NULL); |
| 972 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { | 1008 } |
| 973 if (output == NULL || !output->IsDoubleRegister() || | 1009 } |
| 974 output->index() != i) { | 1010 } |
| 975 LiveRange* range = FixedDoubleLiveRangeFor(i); | 1011 } |
| 976 range->AddUseInterval(curr_position, | 1012 |
| 977 curr_position.InstructionEnd(), | 1013 |
| 978 zone()); | 1014 void RegisterAllocator::ResolvePhis(BasicBlock* block) { |
| 979 } | 1015 for (BasicBlock::const_iterator i = block->begin(); |
| 980 } | 1016 i != block->end(); ++i) { |
| 981 } | 1017 Node* phi = *i; |
| 982 | 1018 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 983 for (UseIterator it(instr); !it.Done(); it.Advance()) { | 1019 |
| 984 LOperand* input = it.Current(); | 1020 UnallocatedOperand* phi_operand = |
| 985 | 1021 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE); |
| 986 LifetimePosition use_pos; | |
| 987 if (input->IsUnallocated() && | |
| 988 LUnallocated::cast(input)->IsUsedAtStart()) { | |
| 989 use_pos = curr_position; | |
| 990 } else { | |
| 991 use_pos = curr_position.InstructionEnd(); | |
| 992 } | |
| 993 | |
| 994 Use(block_start_position, use_pos, input, NULL); | |
| 995 if (input->IsUnallocated()) { | |
| 996 live->Add(LUnallocated::cast(input)->virtual_register()); | |
| 997 } | |
| 998 } | |
| 999 | |
| 1000 for (TempIterator it(instr); !it.Done(); it.Advance()) { | |
| 1001 LOperand* temp = it.Current(); | |
| 1002 if (instr->ClobbersTemps()) { | |
| 1003 if (temp->IsRegister()) continue; | |
| 1004 if (temp->IsUnallocated()) { | |
| 1005 LUnallocated* temp_unalloc = LUnallocated::cast(temp); | |
| 1006 if (temp_unalloc->HasFixedPolicy()) { | |
| 1007 continue; | |
| 1008 } | |
| 1009 } | |
| 1010 } | |
| 1011 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL); | |
| 1012 Define(curr_position, temp, NULL); | |
| 1013 | |
| 1014 if (temp->IsUnallocated()) { | |
| 1015 LUnallocated* temp_unalloc = LUnallocated::cast(temp); | |
| 1016 if (temp_unalloc->HasDoubleRegisterPolicy()) { | |
| 1017 double_artificial_registers_.Add( | |
| 1018 temp_unalloc->virtual_register() - first_artificial_register_, | |
| 1019 zone()); | |
| 1020 } | |
| 1021 } | |
| 1022 } | |
| 1023 } | |
| 1024 } | |
| 1025 | |
| 1026 index = index - 1; | |
| 1027 } | |
| 1028 } | |
| 1029 | |
| 1030 | |
| 1031 void LAllocator::ResolvePhis(HBasicBlock* block) { | |
| 1032 const ZoneList<HPhi*>* phis = block->phis(); | |
| 1033 for (int i = 0; i < phis->length(); ++i) { | |
| 1034 HPhi* phi = phis->at(i); | |
| 1035 LUnallocated* phi_operand = | |
| 1036 new(chunk()->zone()) LUnallocated(LUnallocated::NONE); | |
| 1037 phi_operand->set_virtual_register(phi->id()); | 1022 phi_operand->set_virtual_register(phi->id()); |
| 1038 for (int j = 0; j < phi->OperandCount(); ++j) { | 1023 |
| 1039 HValue* op = phi->OperandAt(j); | 1024 int j = 0; |
| 1040 LOperand* operand = NULL; | 1025 Node::Inputs inputs = phi->inputs(); |
| 1041 if (op->IsConstant() && op->EmitAtUses()) { | 1026 for (Node::Inputs::iterator iter(inputs.begin()); |
| 1042 HConstant* constant = HConstant::cast(op); | 1027 iter != inputs.end(); ++iter, ++j) { |
| 1043 operand = chunk_->DefineConstantOperand(constant); | 1028 Node* op = *iter; |
| 1044 } else { | 1029 // TODO(mstarzinger): Use a ValueInputIterator instead. |
| 1045 ASSERT(!op->EmitAtUses()); | 1030 if (j >= block->PredecessorCount()) continue; |
| 1046 LUnallocated* unalloc = | 1031 UnallocatedOperand* operand = |
| 1047 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); | 1032 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY); |
| 1048 unalloc->set_virtual_register(op->id()); | 1033 operand->set_virtual_register(op->id()); |
| 1049 operand = unalloc; | 1034 BasicBlock* cur_block = block->PredecessorAt(j); |
| 1050 } | |
| 1051 HBasicBlock* cur_block = block->predecessors()->at(j); | |
| 1052 // The gap move must be added without any special processing as in | 1035 // The gap move must be added without any special processing as in |
| 1053 // the AddConstraintsGapMove. | 1036 // the AddConstraintsGapMove. |
| 1054 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, | 1037 code()->AddGapMove(cur_block->last_instruction_index() - 1, |
| 1055 operand, | 1038 operand, |
| 1056 phi_operand); | 1039 phi_operand); |
| 1057 | 1040 |
| 1058 // We are going to insert a move before the branch instruction. | 1041 Instruction* branch = InstructionAt(cur_block->last_instruction_index()); |
| 1059 // Some branch instructions (e.g. loops' back edges) | 1042 ASSERT(!branch->HasPointerMap()); |
| 1060 // can potentially cause a GC so they have a pointer map. | 1043 USE(branch); |
| 1061 // By inserting a move we essentially create a copy of a | |
| 1062 // value which is invisible to PopulatePointerMaps(), because we store | |
| 1063 // it into a location different from the operand of a live range | |
| 1064 // covering a branch instruction. | |
| 1065 // Thus we need to manually record a pointer. | |
| 1066 LInstruction* branch = | |
| 1067 InstructionAt(cur_block->last_instruction_index()); | |
| 1068 if (branch->HasPointerMap()) { | |
| 1069 if (phi->representation().IsTagged() && !phi->type().IsSmi()) { | |
| 1070 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone()); | |
| 1071 } else if (!phi->representation().IsDouble()) { | |
| 1072 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone()); | |
| 1073 } | |
| 1074 } | |
| 1075 } | 1044 } |
| 1076 | 1045 |
| 1077 LiveRange* live_range = LiveRangeFor(phi->id()); | 1046 LiveRange* live_range = LiveRangeFor(phi->id()); |
| 1078 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); | 1047 BlockStartInstruction* block_start = code()->GetBlockStart(block); |
| 1079 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> | 1048 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone()) |
| 1080 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); | 1049 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone()); |
| 1081 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); | 1050 live_range->SetSpillStartIndex(block->first_instruction_index()); |
| 1082 } | 1051 |
| 1083 } | 1052 // We use the phi-ness of some nodes in some later heuristics. |
| 1084 | 1053 live_range->set_is_phi(true); |
| 1085 | 1054 if (!block->IsLoopHeader()) { |
| 1086 bool LAllocator::Allocate(LChunk* chunk) { | 1055 live_range->set_is_non_loop_phi(true); |
| 1087 ASSERT(chunk_ == NULL); | 1056 } |
| 1088 chunk_ = static_cast<LPlatformChunk*>(chunk); | 1057 } |
| 1089 assigned_registers_ = | 1058 } |
| 1090 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(), | 1059 |
| 1091 chunk->zone()); | 1060 |
| 1092 assigned_double_registers_ = | 1061 bool RegisterAllocator::Allocate() { |
| 1093 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), | 1062 assigned_registers_ = new(code_zone()) |
| 1094 chunk->zone()); | 1063 BitVector(Register::NumAllocatableRegisters(), code_zone()); |
| 1064 assigned_double_registers_ = new(code_zone()) |
| 1065 BitVector(DoubleRegister::NumAllocatableRegisters(), code_zone()); |
| 1095 MeetRegisterConstraints(); | 1066 MeetRegisterConstraints(); |
| 1096 if (!AllocationOk()) return false; | 1067 if (!AllocationOk()) return false; |
| 1097 ResolvePhis(); | 1068 ResolvePhis(); |
| 1098 BuildLiveRanges(); | 1069 BuildLiveRanges(); |
| 1099 AllocateGeneralRegisters(); | 1070 AllocateGeneralRegisters(); |
| 1100 if (!AllocationOk()) return false; | 1071 if (!AllocationOk()) return false; |
| 1101 AllocateDoubleRegisters(); | 1072 AllocateDoubleRegisters(); |
| 1102 if (!AllocationOk()) return false; | 1073 if (!AllocationOk()) return false; |
| 1103 PopulatePointerMaps(); | 1074 PopulatePointerMaps(); |
| 1104 ConnectRanges(); | 1075 ConnectRanges(); |
| 1105 ResolveControlFlow(); | 1076 ResolveControlFlow(); |
| 1077 code()->frame()->SetAllocatedRegisters(assigned_registers_); |
| 1078 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); |
| 1106 return true; | 1079 return true; |
| 1107 } | 1080 } |
| 1108 | 1081 |
| 1109 | 1082 |
| 1110 void LAllocator::MeetRegisterConstraints() { | 1083 void RegisterAllocator::MeetRegisterConstraints() { |
| 1111 LAllocatorPhase phase("L_Register constraints", this); | 1084 RegisterAllocatorPhase phase("L_Register constraints", this); |
| 1112 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | 1085 for (int i = 0; i < code()->BasicBlockCount(); ++i) { |
| 1113 for (int i = 0; i < blocks->length(); ++i) { | 1086 MeetRegisterConstraints(code()->BlockAt(i)); |
| 1114 HBasicBlock* block = blocks->at(i); | |
| 1115 MeetRegisterConstraints(block); | |
| 1116 if (!AllocationOk()) return; | 1087 if (!AllocationOk()) return; |
| 1117 } | 1088 } |
| 1118 } | 1089 } |
| 1119 | 1090 |
| 1120 | 1091 |
| 1121 void LAllocator::ResolvePhis() { | 1092 void RegisterAllocator::ResolvePhis() { |
| 1122 LAllocatorPhase phase("L_Resolve phis", this); | 1093 RegisterAllocatorPhase phase("L_Resolve phis", this); |
| 1123 | 1094 |
| 1124 // Process the blocks in reverse order. | 1095 // Process the blocks in reverse order. |
| 1125 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | 1096 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) { |
| 1126 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | 1097 ResolvePhis(code()->BlockAt(i)); |
| 1127 HBasicBlock* block = blocks->at(block_id); | |
| 1128 ResolvePhis(block); | |
| 1129 } | 1098 } |
| 1130 } | 1099 } |
| 1131 | 1100 |
| 1132 | 1101 |
| 1133 void LAllocator::ResolveControlFlow(LiveRange* range, | 1102 void RegisterAllocator::ResolveControlFlow(LiveRange* range, |
| 1134 HBasicBlock* block, | 1103 BasicBlock* block, |
| 1135 HBasicBlock* pred) { | 1104 BasicBlock* pred) { |
| 1136 LifetimePosition pred_end = | 1105 LifetimePosition pred_end = |
| 1137 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); | 1106 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); |
| 1138 LifetimePosition cur_start = | 1107 LifetimePosition cur_start = |
| 1139 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); | 1108 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
| 1140 LiveRange* pred_cover = NULL; | 1109 LiveRange* pred_cover = NULL; |
| 1141 LiveRange* cur_cover = NULL; | 1110 LiveRange* cur_cover = NULL; |
| 1142 LiveRange* cur_range = range; | 1111 LiveRange* cur_range = range; |
| 1143 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { | 1112 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { |
| 1144 if (cur_range->CanCover(cur_start)) { | 1113 if (cur_range->CanCover(cur_start)) { |
| 1145 ASSERT(cur_cover == NULL); | 1114 ASSERT(cur_cover == NULL); |
| 1146 cur_cover = cur_range; | 1115 cur_cover = cur_range; |
| 1147 } | 1116 } |
| 1148 if (cur_range->CanCover(pred_end)) { | 1117 if (cur_range->CanCover(pred_end)) { |
| 1149 ASSERT(pred_cover == NULL); | 1118 ASSERT(pred_cover == NULL); |
| 1150 pred_cover = cur_range; | 1119 pred_cover = cur_range; |
| 1151 } | 1120 } |
| 1152 cur_range = cur_range->next(); | 1121 cur_range = cur_range->next(); |
| 1153 } | 1122 } |
| 1154 | 1123 |
| 1155 if (cur_cover->IsSpilled()) return; | 1124 if (cur_cover->IsSpilled()) return; |
| 1156 ASSERT(pred_cover != NULL && cur_cover != NULL); | 1125 ASSERT(pred_cover != NULL && cur_cover != NULL); |
| 1157 if (pred_cover != cur_cover) { | 1126 if (pred_cover != cur_cover) { |
| 1158 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); | 1127 InstructionOperand* pred_op = |
| 1159 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); | 1128 pred_cover->CreateAssignedOperand(code_zone()); |
| 1129 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone()); |
| 1160 if (!pred_op->Equals(cur_op)) { | 1130 if (!pred_op->Equals(cur_op)) { |
| 1161 LGap* gap = NULL; | 1131 GapInstruction* gap = NULL; |
| 1162 if (block->predecessors()->length() == 1) { | 1132 if (block->PredecessorCount() == 1) { |
| 1163 gap = GapAt(block->first_instruction_index()); | 1133 gap = code()->GapAt(block->first_instruction_index()); |
| 1164 } else { | 1134 } else { |
| 1165 ASSERT(pred->end()->SecondSuccessor() == NULL); | 1135 ASSERT(pred->SuccessorCount() == 1); |
| 1166 gap = GetLastGap(pred); | 1136 gap = GetLastGap(pred); |
| 1167 | 1137 |
| 1168 // We are going to insert a move before the branch instruction. | 1138 Instruction* branch = InstructionAt(pred->last_instruction_index()); |
| 1169 // Some branch instructions (e.g. loops' back edges) | 1139 ASSERT(!branch->HasPointerMap()); |
| 1170 // can potentially cause a GC so they have a pointer map. | 1140 USE(branch); |
| 1171 // By inserting a move we essentially create a copy of a | |
| 1172 // value which is invisible to PopulatePointerMaps(), because we store | |
| 1173 // it into a location different from the operand of a live range | |
| 1174 // covering a branch instruction. | |
| 1175 // Thus we need to manually record a pointer. | |
| 1176 LInstruction* branch = InstructionAt(pred->last_instruction_index()); | |
| 1177 if (branch->HasPointerMap()) { | |
| 1178 if (HasTaggedValue(range->id())) { | |
| 1179 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone()); | |
| 1180 } else if (!cur_op->IsDoubleStackSlot() && | |
| 1181 !cur_op->IsDoubleRegister()) { | |
| 1182 branch->pointer_map()->RemovePointer(cur_op); | |
| 1183 } | |
| 1184 } | |
| 1185 } | 1141 } |
| 1186 gap->GetOrCreateParallelMove( | 1142 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()) |
| 1187 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op, | 1143 ->AddMove(pred_op, cur_op, code_zone()); |
| 1188 chunk()->zone()); | |
| 1189 } | 1144 } |
| 1190 } | 1145 } |
| 1191 } | 1146 } |
| 1192 | 1147 |
| 1193 | 1148 |
| 1194 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { | 1149 ParallelMove* RegisterAllocator::GetConnectingParallelMove( |
| 1150 LifetimePosition pos) { |
| 1195 int index = pos.InstructionIndex(); | 1151 int index = pos.InstructionIndex(); |
| 1196 if (IsGapAt(index)) { | 1152 if (code()->IsGapAt(index)) { |
| 1197 LGap* gap = GapAt(index); | 1153 GapInstruction* gap = code()->GapAt(index); |
| 1198 return gap->GetOrCreateParallelMove( | 1154 return gap->GetOrCreateParallelMove( |
| 1199 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone()); | 1155 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, |
| 1156 code_zone()); |
| 1200 } | 1157 } |
| 1201 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); | 1158 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); |
| 1202 return GapAt(gap_pos)->GetOrCreateParallelMove( | 1159 return code()->GapAt(gap_pos)->GetOrCreateParallelMove( |
| 1203 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone()); | 1160 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE, |
| 1161 code_zone()); |
| 1204 } | 1162 } |
| 1205 | 1163 |
| 1206 | 1164 |
| 1207 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { | 1165 BasicBlock* RegisterAllocator::GetBlock(LifetimePosition pos) { |
| 1208 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); | 1166 return code()->GetBasicBlock(pos.InstructionIndex()); |
| 1209 return gap->block(); | |
| 1210 } | 1167 } |
| 1211 | 1168 |
| 1212 | 1169 |
| 1213 void LAllocator::ConnectRanges() { | 1170 void RegisterAllocator::ConnectRanges() { |
| 1214 LAllocatorPhase phase("L_Connect ranges", this); | 1171 RegisterAllocatorPhase phase("L_Connect ranges", this); |
| 1215 for (int i = 0; i < live_ranges()->length(); ++i) { | 1172 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 1216 LiveRange* first_range = live_ranges()->at(i); | 1173 LiveRange* first_range = live_ranges()->at(i); |
| 1217 if (first_range == NULL || first_range->parent() != NULL) continue; | 1174 if (first_range == NULL || first_range->parent() != NULL) continue; |
| 1218 | 1175 |
| 1219 LiveRange* second_range = first_range->next(); | 1176 LiveRange* second_range = first_range->next(); |
| 1220 while (second_range != NULL) { | 1177 while (second_range != NULL) { |
| 1221 LifetimePosition pos = second_range->Start(); | 1178 LifetimePosition pos = second_range->Start(); |
| 1222 | 1179 |
| 1223 if (!second_range->IsSpilled()) { | 1180 if (!second_range->IsSpilled()) { |
| 1224 // Add gap move if the two live ranges touch and there is no block | 1181 // Add gap move if the two live ranges touch and there is no block |
| 1225 // boundary. | 1182 // boundary. |
| 1226 if (first_range->End().Value() == pos.Value()) { | 1183 if (first_range->End().Value() == pos.Value()) { |
| 1227 bool should_insert = true; | 1184 bool should_insert = true; |
| 1228 if (IsBlockBoundary(pos)) { | 1185 if (IsBlockBoundary(pos)) { |
| 1229 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); | 1186 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); |
| 1230 } | 1187 } |
| 1231 if (should_insert) { | 1188 if (should_insert) { |
| 1232 LParallelMove* move = GetConnectingParallelMove(pos); | 1189 ParallelMove* move = GetConnectingParallelMove(pos); |
| 1233 LOperand* prev_operand = first_range->CreateAssignedOperand( | 1190 InstructionOperand* prev_operand = |
| 1234 chunk()->zone()); | 1191 first_range->CreateAssignedOperand(code_zone()); |
| 1235 LOperand* cur_operand = second_range->CreateAssignedOperand( | 1192 InstructionOperand* cur_operand = |
| 1236 chunk()->zone()); | 1193 second_range->CreateAssignedOperand(code_zone()); |
| 1237 move->AddMove(prev_operand, cur_operand, | 1194 move->AddMove(prev_operand, cur_operand, |
| 1238 chunk()->zone()); | 1195 code_zone()); |
| 1239 } | 1196 } |
| 1240 } | 1197 } |
| 1241 } | 1198 } |
| 1242 | 1199 |
| 1243 first_range = second_range; | 1200 first_range = second_range; |
| 1244 second_range = second_range->next(); | 1201 second_range = second_range->next(); |
| 1245 } | 1202 } |
| 1246 } | 1203 } |
| 1247 } | 1204 } |
| 1248 | 1205 |
| 1249 | 1206 |
| 1250 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { | 1207 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const { |
| 1251 if (block->predecessors()->length() != 1) return false; | 1208 if (block->PredecessorCount() != 1) return false; |
| 1252 return block->predecessors()->first()->block_id() == block->block_id() - 1; | 1209 return block->PredecessorAt(0)->rpo_number_ == block->rpo_number_ - 1; |
| 1253 } | 1210 } |
| 1254 | 1211 |
| 1255 | 1212 |
| 1256 void LAllocator::ResolveControlFlow() { | 1213 void RegisterAllocator::ResolveControlFlow() { |
| 1257 LAllocatorPhase phase("L_Resolve control flow", this); | 1214 RegisterAllocatorPhase phase("L_Resolve control flow", this); |
| 1258 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | 1215 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) { |
| 1259 for (int block_id = 1; block_id < blocks->length(); ++block_id) { | 1216 BasicBlock* block = code()->BlockAt(block_id); |
| 1260 HBasicBlock* block = blocks->at(block_id); | |
| 1261 if (CanEagerlyResolveControlFlow(block)) continue; | 1217 if (CanEagerlyResolveControlFlow(block)) continue; |
| 1262 BitVector* live = live_in_sets_[block->block_id()]; | 1218 BitVector* live = live_in_sets_[block->rpo_number_]; |
| 1263 BitVector::Iterator iterator(live); | 1219 BitVector::Iterator iterator(live); |
| 1264 while (!iterator.Done()) { | 1220 while (!iterator.Done()) { |
| 1265 int operand_index = iterator.Current(); | 1221 int operand_index = iterator.Current(); |
| 1266 for (int i = 0; i < block->predecessors()->length(); ++i) { | 1222 BasicBlock::Predecessors predecessors = block->predecessors(); |
| 1267 HBasicBlock* cur = block->predecessors()->at(i); | 1223 for (BasicBlock::Predecessors::iterator i = predecessors.begin(); |
| 1224 i != predecessors.end(); ++i) { |
| 1225 BasicBlock* cur = *i; |
| 1268 LiveRange* cur_range = LiveRangeFor(operand_index); | 1226 LiveRange* cur_range = LiveRangeFor(operand_index); |
| 1269 ResolveControlFlow(cur_range, block, cur); | 1227 ResolveControlFlow(cur_range, block, cur); |
| 1270 } | 1228 } |
| 1271 iterator.Advance(); | 1229 iterator.Advance(); |
| 1272 } | 1230 } |
| 1273 } | 1231 } |
| 1274 } | 1232 } |
| 1275 | 1233 |
| 1276 | 1234 |
| 1277 void LAllocator::BuildLiveRanges() { | 1235 void RegisterAllocator::BuildLiveRanges() { |
| 1278 LAllocatorPhase phase("L_Build live ranges", this); | 1236 RegisterAllocatorPhase phase("L_Build live ranges", this); |
| 1279 InitializeLivenessAnalysis(); | 1237 InitializeLivenessAnalysis(); |
| 1280 // Process the blocks in reverse order. | 1238 // Process the blocks in reverse order. |
| 1281 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); | 1239 for (int block_id = code()->BasicBlockCount() - 1; |
| 1282 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { | 1240 block_id >= 0; --block_id) { |
| 1283 HBasicBlock* block = blocks->at(block_id); | 1241 BasicBlock* block = code()->BlockAt(block_id); |
| 1284 BitVector* live = ComputeLiveOut(block); | 1242 BitVector* live = ComputeLiveOut(block); |
| 1285 // Initially consider all live_out values live for the entire block. We | 1243 // Initially consider all live_out values live for the entire block. We |
| 1286 // will shorten these intervals if necessary. | 1244 // will shorten these intervals if necessary. |
| 1287 AddInitialIntervals(block, live); | 1245 AddInitialIntervals(block, live); |
| 1288 | 1246 |
| 1289 // Process the instructions in reverse order, generating and killing | 1247 // Process the instructions in reverse order, generating and killing |
| 1290 // live values. | 1248 // live values. |
| 1291 ProcessInstructions(block, live); | 1249 ProcessInstructions(block, live); |
| 1292 // All phi output operands are killed by this block. | 1250 // All phi output operands are killed by this block. |
| 1293 const ZoneList<HPhi*>* phis = block->phis(); | 1251 for (BasicBlock::const_iterator i = block->begin(); |
| 1294 for (int i = 0; i < phis->length(); ++i) { | 1252 i != block->end(); ++i) { |
| 1253 Node* phi = *i; |
| 1254 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 1255 |
| 1295 // The live range interval already ends at the first instruction of the | 1256 // The live range interval already ends at the first instruction of the |
| 1296 // block. | 1257 // block. |
| 1297 HPhi* phi = phis->at(i); | |
| 1298 live->Remove(phi->id()); | 1258 live->Remove(phi->id()); |
| 1299 | 1259 |
| 1300 LOperand* hint = NULL; | 1260 InstructionOperand* hint = NULL; |
| 1301 LOperand* phi_operand = NULL; | 1261 InstructionOperand* phi_operand = NULL; |
| 1302 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); | 1262 GapInstruction* gap = GetLastGap(block->PredecessorAt(0)); |
| 1303 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, | 1263 |
| 1304 chunk()->zone()); | 1264 // TODO(titzer): no need to create the parallel move if it doesn't exit. |
| 1265 ParallelMove* move = |
| 1266 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone()); |
| 1305 for (int j = 0; j < move->move_operands()->length(); ++j) { | 1267 for (int j = 0; j < move->move_operands()->length(); ++j) { |
| 1306 LOperand* to = move->move_operands()->at(j).destination(); | 1268 InstructionOperand* to = move->move_operands()->at(j).destination(); |
| 1307 if (to->IsUnallocated() && | 1269 if (to->IsUnallocated() && |
| 1308 LUnallocated::cast(to)->virtual_register() == phi->id()) { | 1270 UnallocatedOperand::cast(to)->virtual_register() == phi->id()) { |
| 1309 hint = move->move_operands()->at(j).source(); | 1271 hint = move->move_operands()->at(j).source(); |
| 1310 phi_operand = to; | 1272 phi_operand = to; |
| 1311 break; | 1273 break; |
| 1312 } | 1274 } |
| 1313 } | 1275 } |
| 1314 ASSERT(hint != NULL); | 1276 ASSERT(hint != NULL); |
| 1315 | 1277 |
| 1316 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( | 1278 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( |
| 1317 block->first_instruction_index()); | 1279 block->first_instruction_index()); |
| 1318 Define(block_start, phi_operand, hint); | 1280 Define(block_start, phi_operand, hint); |
| 1319 } | 1281 } |
| 1320 | 1282 |
| 1321 // Now live is live_in for this block except not including values live | 1283 // Now live is live_in for this block except not including values live |
| 1322 // out on backward successor edges. | 1284 // out on backward successor edges. |
| 1323 live_in_sets_[block_id] = live; | 1285 live_in_sets_[block_id] = live; |
| 1324 | 1286 |
| 1325 // If this block is a loop header go back and patch up the necessary | |
| 1326 // predecessor blocks. | |
| 1327 if (block->IsLoopHeader()) { | 1287 if (block->IsLoopHeader()) { |
| 1328 // TODO(kmillikin): Need to be able to get the last block of the loop | 1288 // Add a live range stretching from the first loop instruction to the last |
| 1329 // in the loop information. Add a live range stretching from the first | 1289 // for each value live on entry to the header. |
| 1330 // loop instruction to the last for each value live on entry to the | |
| 1331 // header. | |
| 1332 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); | |
| 1333 BitVector::Iterator iterator(live); | 1290 BitVector::Iterator iterator(live); |
| 1334 LifetimePosition start = LifetimePosition::FromInstructionIndex( | 1291 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 1335 block->first_instruction_index()); | 1292 block->first_instruction_index()); |
| 1293 int end_index = |
| 1294 code()->BlockAt(block->loop_end_)->last_instruction_index(); |
| 1336 LifetimePosition end = LifetimePosition::FromInstructionIndex( | 1295 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 1337 back_edge->last_instruction_index()).NextInstruction(); | 1296 end_index).NextInstruction(); |
| 1338 while (!iterator.Done()) { | 1297 while (!iterator.Done()) { |
| 1339 int operand_index = iterator.Current(); | 1298 int operand_index = iterator.Current(); |
| 1340 LiveRange* range = LiveRangeFor(operand_index); | 1299 LiveRange* range = LiveRangeFor(operand_index); |
| 1341 range->EnsureInterval(start, end, zone()); | 1300 range->EnsureInterval(start, end, zone()); |
| 1342 iterator.Advance(); | 1301 iterator.Advance(); |
| 1343 } | 1302 } |
| 1344 | 1303 |
| 1345 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { | 1304 // Insert all values into the live in sets of all blocks in the loop. |
| 1305 for (int i = block->rpo_number_ + 1; i < block->loop_end_; ++i) { |
| 1346 live_in_sets_[i]->Union(*live); | 1306 live_in_sets_[i]->Union(*live); |
| 1347 } | 1307 } |
| 1348 } | 1308 } |
| 1349 | 1309 |
| 1350 #ifdef DEBUG | 1310 #ifdef DEBUG |
| 1351 if (block_id == 0) { | 1311 if (block_id == 0) { |
| 1352 BitVector::Iterator iterator(live); | 1312 BitVector::Iterator iterator(live); |
| 1353 bool found = false; | 1313 bool found = false; |
| 1354 while (!iterator.Done()) { | 1314 while (!iterator.Done()) { |
| 1355 found = true; | 1315 found = true; |
| 1356 int operand_index = iterator.Current(); | 1316 int operand_index = iterator.Current(); |
| 1357 if (chunk_->info()->IsStub()) { | 1317 PrintF("Register allocator error: live v%d reached first block.\n", |
| 1358 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey(); | 1318 operand_index); |
| 1359 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false)); | 1319 LiveRange* range = LiveRangeFor(operand_index); |
| 1320 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value()); |
| 1321 CompilationInfo* info = code()->linkage()->info(); |
| 1322 if (info->IsStub()) { |
| 1323 if (info->code_stub() == NULL) { |
| 1324 PrintF("\n"); |
| 1325 } else { |
| 1326 CodeStub::Major major_key = info->code_stub()->MajorKey(); |
| 1327 PrintF(" (function: %s)\n", CodeStub::MajorName(major_key, false)); |
| 1328 } |
| 1360 } else { | 1329 } else { |
| 1361 ASSERT(chunk_->info()->IsOptimizing()); | 1330 ASSERT(info->IsOptimizing()); |
| 1362 AllowHandleDereference allow_deref; | 1331 AllowHandleDereference allow_deref; |
| 1363 PrintF("Function: %s\n", | 1332 PrintF(" (function: %s)\n", |
| 1364 chunk_->info()->function()->debug_name()->ToCString().get()); | 1333 info->function()->debug_name()->ToCString().get()); |
| 1365 } | 1334 } |
| 1366 PrintF("Value %d used before first definition!\n", operand_index); | |
| 1367 LiveRange* range = LiveRangeFor(operand_index); | |
| 1368 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); | |
| 1369 iterator.Advance(); | 1335 iterator.Advance(); |
| 1370 } | 1336 } |
| 1371 ASSERT(!found); | 1337 ASSERT(!found); |
| 1372 } | 1338 } |
| 1373 #endif | 1339 #endif |
| 1374 } | 1340 } |
| 1375 | 1341 |
| 1376 for (int i = 0; i < live_ranges_.length(); ++i) { | 1342 for (int i = 0; i < live_ranges_.length(); ++i) { |
| 1377 if (live_ranges_[i] != NULL) { | 1343 if (live_ranges_[i] != NULL) { |
| 1378 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); | 1344 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); |
| 1345 |
| 1346 // TODO(bmeurer): This is a horrible hack to make sure that for constant |
| 1347 // live ranges, every use requires the constant to be in a register. |
| 1348 // Without this hack, all uses with "any" policy would get the constant |
| 1349 // operand assigned. |
| 1350 LiveRange* range = live_ranges_[i]; |
| 1351 if (range->HasAllocatedSpillOperand() && |
| 1352 range->GetSpillOperand()->IsConstant()) { |
| 1353 for (UsePosition* pos = range->first_pos(); pos != NULL; |
| 1354 pos = pos->next_) { |
| 1355 pos->register_beneficial_ = true; |
| 1356 pos->requires_reg_ = true; |
| 1357 } |
| 1358 } |
| 1379 } | 1359 } |
| 1380 } | 1360 } |
| 1381 } | 1361 } |
| 1382 | 1362 |
| 1383 | 1363 |
| 1384 bool LAllocator::SafePointsAreInOrder() const { | 1364 bool RegisterAllocator::SafePointsAreInOrder() const { |
| 1385 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | |
| 1386 int safe_point = 0; | 1365 int safe_point = 0; |
| 1387 for (int i = 0; i < pointer_maps->length(); ++i) { | 1366 const PointerMapDeque* pointer_maps = code()->pointer_maps(); |
| 1388 LPointerMap* map = pointer_maps->at(i); | 1367 for (PointerMapDeque::const_iterator it = pointer_maps->begin(); |
| 1389 if (safe_point > map->lithium_position()) return false; | 1368 it != pointer_maps->end(); ++it) { |
| 1390 safe_point = map->lithium_position(); | 1369 PointerMap* map = *it; |
| 1370 if (safe_point > map->instruction_position()) return false; |
| 1371 safe_point = map->instruction_position(); |
| 1391 } | 1372 } |
| 1392 return true; | 1373 return true; |
| 1393 } | 1374 } |
| 1394 | 1375 |
| 1395 | 1376 |
| 1396 void LAllocator::PopulatePointerMaps() { | 1377 void RegisterAllocator::PopulatePointerMaps() { |
| 1397 LAllocatorPhase phase("L_Populate pointer maps", this); | 1378 RegisterAllocatorPhase phase("L_Populate pointer maps", this); |
| 1398 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); | |
| 1399 | 1379 |
| 1400 ASSERT(SafePointsAreInOrder()); | 1380 ASSERT(SafePointsAreInOrder()); |
| 1401 | 1381 |
| 1402 // Iterate over all safe point positions and record a pointer | 1382 // Iterate over all safe point positions and record a pointer |
| 1403 // for all spilled live ranges at this point. | 1383 // for all spilled live ranges at this point. |
| 1404 int first_safe_point_index = 0; | |
| 1405 int last_range_start = 0; | 1384 int last_range_start = 0; |
| 1385 const PointerMapDeque* pointer_maps = code()->pointer_maps(); |
| 1386 PointerMapDeque::const_iterator first_it = pointer_maps->begin(); |
| 1406 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { | 1387 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { |
| 1407 LiveRange* range = live_ranges()->at(range_idx); | 1388 LiveRange* range = live_ranges()->at(range_idx); |
| 1408 if (range == NULL) continue; | 1389 if (range == NULL) continue; |
| 1409 // Iterate over the first parts of multi-part live ranges. | 1390 // Iterate over the first parts of multi-part live ranges. |
| 1410 if (range->parent() != NULL) continue; | 1391 if (range->parent() != NULL) continue; |
| 1411 // Skip non-pointer values. | 1392 // Skip non-reference values. |
| 1412 if (!HasTaggedValue(range->id())) continue; | 1393 if (!HasTaggedValue(range->id())) continue; |
| 1413 // Skip empty live ranges. | 1394 // Skip empty live ranges. |
| 1414 if (range->IsEmpty()) continue; | 1395 if (range->IsEmpty()) continue; |
| 1415 | 1396 |
| 1416 // Find the extent of the range and its children. | 1397 // Find the extent of the range and its children. |
| 1417 int start = range->Start().InstructionIndex(); | 1398 int start = range->Start().InstructionIndex(); |
| 1418 int end = 0; | 1399 int end = 0; |
| 1419 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { | 1400 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { |
| 1420 LifetimePosition this_end = cur->End(); | 1401 LifetimePosition this_end = cur->End(); |
| 1421 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); | 1402 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); |
| 1422 ASSERT(cur->Start().InstructionIndex() >= start); | 1403 ASSERT(cur->Start().InstructionIndex() >= start); |
| 1423 } | 1404 } |
| 1424 | 1405 |
| 1425 // Most of the ranges are in order, but not all. Keep an eye on when | 1406 // Most of the ranges are in order, but not all. Keep an eye on when they |
| 1426 // they step backwards and reset the first_safe_point_index so we don't | 1407 // step backwards and reset the first_it so we don't miss any safe points. |
| 1427 // miss any safe points. | 1408 if (start < last_range_start) first_it = pointer_maps->begin(); |
| 1428 if (start < last_range_start) { | |
| 1429 first_safe_point_index = 0; | |
| 1430 } | |
| 1431 last_range_start = start; | 1409 last_range_start = start; |
| 1432 | 1410 |
| 1433 // Step across all the safe points that are before the start of this range, | 1411 // Step across all the safe points that are before the start of this range, |
| 1434 // recording how far we step in order to save doing this for the next range. | 1412 // recording how far we step in order to save doing this for the next range. |
| 1435 while (first_safe_point_index < pointer_maps->length()) { | 1413 for (; first_it != pointer_maps->end(); ++first_it) { |
| 1436 LPointerMap* map = pointer_maps->at(first_safe_point_index); | 1414 PointerMap* map = *first_it; |
| 1437 int safe_point = map->lithium_position(); | 1415 if (map->instruction_position() >= start) break; |
| 1438 if (safe_point >= start) break; | |
| 1439 first_safe_point_index++; | |
| 1440 } | 1416 } |
| 1441 | 1417 |
| 1442 // Step through the safe points to see whether they are in the range. | 1418 // Step through the safe points to see whether they are in the range. |
| 1443 for (int safe_point_index = first_safe_point_index; | 1419 for (PointerMapDeque::const_iterator it = first_it; |
| 1444 safe_point_index < pointer_maps->length(); | 1420 it != pointer_maps->end(); ++it) { |
| 1445 ++safe_point_index) { | 1421 PointerMap* map = *it; |
| 1446 LPointerMap* map = pointer_maps->at(safe_point_index); | 1422 int safe_point = map->instruction_position(); |
| 1447 int safe_point = map->lithium_position(); | |
| 1448 | 1423 |
| 1449 // The safe points are sorted so we can stop searching here. | 1424 // The safe points are sorted so we can stop searching here. |
| 1450 if (safe_point - 1 > end) break; | 1425 if (safe_point - 1 > end) break; |
| 1451 | 1426 |
| 1452 // Advance to the next active range that covers the current | 1427 // Advance to the next active range that covers the current |
| 1453 // safe point position. | 1428 // safe point position. |
| 1454 LifetimePosition safe_point_pos = | 1429 LifetimePosition safe_point_pos = |
| 1455 LifetimePosition::FromInstructionIndex(safe_point); | 1430 LifetimePosition::FromInstructionIndex(safe_point); |
| 1456 LiveRange* cur = range; | 1431 LiveRange* cur = range; |
| 1457 while (cur != NULL && !cur->Covers(safe_point_pos)) { | 1432 while (cur != NULL && !cur->Covers(safe_point_pos)) { |
| 1458 cur = cur->next(); | 1433 cur = cur->next(); |
| 1459 } | 1434 } |
| 1460 if (cur == NULL) continue; | 1435 if (cur == NULL) continue; |
| 1461 | 1436 |
| 1462 // Check if the live range is spilled and the safe point is after | 1437 // Check if the live range is spilled and the safe point is after |
| 1463 // the spill position. | 1438 // the spill position. |
| 1464 if (range->HasAllocatedSpillOperand() && | 1439 if (range->HasAllocatedSpillOperand() && |
| 1465 safe_point >= range->spill_start_index()) { | 1440 safe_point >= range->spill_start_index() && |
| 1441 !range->GetSpillOperand()->IsConstant()) { |
| 1466 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", | 1442 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", |
| 1467 range->id(), range->spill_start_index(), safe_point); | 1443 range->id(), range->spill_start_index(), safe_point); |
| 1468 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); | 1444 map->RecordPointer(range->GetSpillOperand(), code_zone()); |
| 1469 } | 1445 } |
| 1470 | 1446 |
| 1471 if (!cur->IsSpilled()) { | 1447 if (!cur->IsSpilled()) { |
| 1472 TraceAlloc("Pointer in register for range %d (start at %d) " | 1448 TraceAlloc("Pointer in register for range %d (start at %d) " |
| 1473 "at safe point %d\n", | 1449 "at safe point %d\n", |
| 1474 cur->id(), cur->Start().Value(), safe_point); | 1450 cur->id(), cur->Start().Value(), safe_point); |
| 1475 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); | 1451 InstructionOperand* operand = cur->CreateAssignedOperand(code_zone()); |
| 1476 ASSERT(!operand->IsStackSlot()); | 1452 ASSERT(!operand->IsStackSlot()); |
| 1477 map->RecordPointer(operand, chunk()->zone()); | 1453 map->RecordPointer(operand, code_zone()); |
| 1478 } | 1454 } |
| 1479 } | 1455 } |
| 1480 } | 1456 } |
| 1481 } | 1457 } |
| 1482 | 1458 |
| 1483 | 1459 |
| 1484 void LAllocator::AllocateGeneralRegisters() { | 1460 void RegisterAllocator::AllocateGeneralRegisters() { |
| 1485 LAllocatorPhase phase("L_Allocate general registers", this); | 1461 RegisterAllocatorPhase phase("L_Allocate general registers", this); |
| 1486 num_registers_ = Register::NumAllocatableRegisters(); | 1462 num_registers_ = Register::NumAllocatableRegisters(); |
| 1487 mode_ = GENERAL_REGISTERS; | 1463 mode_ = GENERAL_REGISTERS; |
| 1488 AllocateRegisters(); | 1464 AllocateRegisters(); |
| 1489 } | 1465 } |
| 1490 | 1466 |
| 1491 | 1467 |
| 1492 void LAllocator::AllocateDoubleRegisters() { | 1468 void RegisterAllocator::AllocateDoubleRegisters() { |
| 1493 LAllocatorPhase phase("L_Allocate double registers", this); | 1469 RegisterAllocatorPhase phase("L_Allocate double registers", this); |
| 1494 num_registers_ = DoubleRegister::NumAllocatableRegisters(); | 1470 num_registers_ = DoubleRegister::NumAllocatableRegisters(); |
| 1495 mode_ = DOUBLE_REGISTERS; | 1471 mode_ = DOUBLE_REGISTERS; |
| 1496 AllocateRegisters(); | 1472 AllocateRegisters(); |
| 1497 } | 1473 } |
| 1498 | 1474 |
| 1499 | 1475 |
| 1500 void LAllocator::AllocateRegisters() { | 1476 void RegisterAllocator::AllocateRegisters() { |
| 1501 ASSERT(unhandled_live_ranges_.is_empty()); | 1477 ASSERT(unhandled_live_ranges_.is_empty()); |
| 1502 | 1478 |
| 1503 for (int i = 0; i < live_ranges_.length(); ++i) { | 1479 for (int i = 0; i < live_ranges_.length(); ++i) { |
| 1504 if (live_ranges_[i] != NULL) { | 1480 if (live_ranges_[i] != NULL) { |
| 1505 if (live_ranges_[i]->Kind() == mode_) { | 1481 if (live_ranges_[i]->Kind() == mode_) { |
| 1506 AddToUnhandledUnsorted(live_ranges_[i]); | 1482 AddToUnhandledUnsorted(live_ranges_[i]); |
| 1507 } | 1483 } |
| 1508 } | 1484 } |
| 1509 } | 1485 } |
| 1510 SortUnhandled(); | 1486 SortUnhandled(); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1539 #ifdef DEBUG | 1515 #ifdef DEBUG |
| 1540 allocation_finger_ = position; | 1516 allocation_finger_ = position; |
| 1541 #endif | 1517 #endif |
| 1542 TraceAlloc("Processing interval %d start=%d\n", | 1518 TraceAlloc("Processing interval %d start=%d\n", |
| 1543 current->id(), | 1519 current->id(), |
| 1544 position.Value()); | 1520 position.Value()); |
| 1545 | 1521 |
| 1546 if (current->HasAllocatedSpillOperand()) { | 1522 if (current->HasAllocatedSpillOperand()) { |
| 1547 TraceAlloc("Live range %d already has a spill operand\n", current->id()); | 1523 TraceAlloc("Live range %d already has a spill operand\n", current->id()); |
| 1548 LifetimePosition next_pos = position; | 1524 LifetimePosition next_pos = position; |
| 1549 if (IsGapAt(next_pos.InstructionIndex())) { | 1525 if (code()->IsGapAt(next_pos.InstructionIndex())) { |
| 1550 next_pos = next_pos.NextInstruction(); | 1526 next_pos = next_pos.NextInstruction(); |
| 1551 } | 1527 } |
| 1552 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); | 1528 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1553 // If the range already has a spill operand and it doesn't need a | 1529 // If the range already has a spill operand and it doesn't need a |
| 1554 // register immediately, split it and spill the first part of the range. | 1530 // register immediately, split it and spill the first part of the range. |
| 1555 if (pos == NULL) { | 1531 if (pos == NULL) { |
| 1556 Spill(current); | 1532 Spill(current); |
| 1557 continue; | 1533 continue; |
| 1558 } else if (pos->pos().Value() > | 1534 } else if (pos->pos().Value() > |
| 1559 current->Start().NextInstruction().Value()) { | 1535 current->Start().NextInstruction().Value()) { |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1600 AddToActive(current); | 1576 AddToActive(current); |
| 1601 } | 1577 } |
| 1602 } | 1578 } |
| 1603 | 1579 |
| 1604 reusable_slots_.Rewind(0); | 1580 reusable_slots_.Rewind(0); |
| 1605 active_live_ranges_.Rewind(0); | 1581 active_live_ranges_.Rewind(0); |
| 1606 inactive_live_ranges_.Rewind(0); | 1582 inactive_live_ranges_.Rewind(0); |
| 1607 } | 1583 } |
| 1608 | 1584 |
| 1609 | 1585 |
| 1610 const char* LAllocator::RegisterName(int allocation_index) { | 1586 const char* RegisterAllocator::RegisterName(int allocation_index) { |
| 1611 if (mode_ == GENERAL_REGISTERS) { | 1587 if (mode_ == GENERAL_REGISTERS) { |
| 1612 return Register::AllocationIndexToString(allocation_index); | 1588 return Register::AllocationIndexToString(allocation_index); |
| 1613 } else { | 1589 } else { |
| 1614 return DoubleRegister::AllocationIndexToString(allocation_index); | 1590 return DoubleRegister::AllocationIndexToString(allocation_index); |
| 1615 } | 1591 } |
| 1616 } | 1592 } |
| 1617 | 1593 |
| 1618 | 1594 |
| 1619 void LAllocator::TraceAlloc(const char* msg, ...) { | 1595 void RegisterAllocator::TraceAlloc(const char* msg, ...) { |
| 1620 if (FLAG_trace_alloc) { | 1596 if (FLAG_trace_alloc) { |
| 1621 va_list arguments; | 1597 va_list arguments; |
| 1622 va_start(arguments, msg); | 1598 va_start(arguments, msg); |
| 1623 base::OS::VPrint(msg, arguments); | 1599 base::OS::VPrint(msg, arguments); |
| 1624 va_end(arguments); | 1600 va_end(arguments); |
| 1625 } | 1601 } |
| 1626 } | 1602 } |
| 1627 | 1603 |
| 1628 | 1604 |
| 1629 bool LAllocator::HasTaggedValue(int virtual_register) const { | 1605 bool RegisterAllocator::HasTaggedValue(int virtual_register) const { |
| 1630 HValue* value = graph_->LookupValue(virtual_register); | 1606 return code()->IsReference(virtual_register); |
| 1631 if (value == NULL) return false; | |
| 1632 return value->representation().IsTagged() && !value->type().IsSmi(); | |
| 1633 } | 1607 } |
| 1634 | 1608 |
| 1635 | 1609 |
| 1636 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { | 1610 RegisterKind RegisterAllocator::RequiredRegisterKind( |
| 1637 if (virtual_register < first_artificial_register_) { | 1611 int virtual_register) const { |
| 1638 HValue* value = graph_->LookupValue(virtual_register); | 1612 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS |
| 1639 if (value != NULL && value->representation().IsDouble()) { | 1613 : GENERAL_REGISTERS; |
| 1640 return DOUBLE_REGISTERS; | |
| 1641 } | |
| 1642 } else if (double_artificial_registers_.Contains( | |
| 1643 virtual_register - first_artificial_register_)) { | |
| 1644 return DOUBLE_REGISTERS; | |
| 1645 } | |
| 1646 | |
| 1647 return GENERAL_REGISTERS; | |
| 1648 } | 1614 } |
| 1649 | 1615 |
| 1650 | 1616 |
| 1651 void LAllocator::AddToActive(LiveRange* range) { | 1617 void RegisterAllocator::AddToActive(LiveRange* range) { |
| 1652 TraceAlloc("Add live range %d to active\n", range->id()); | 1618 TraceAlloc("Add live range %d to active\n", range->id()); |
| 1653 active_live_ranges_.Add(range, zone()); | 1619 active_live_ranges_.Add(range, zone()); |
| 1654 } | 1620 } |
| 1655 | 1621 |
| 1656 | 1622 |
| 1657 void LAllocator::AddToInactive(LiveRange* range) { | 1623 void RegisterAllocator::AddToInactive(LiveRange* range) { |
| 1658 TraceAlloc("Add live range %d to inactive\n", range->id()); | 1624 TraceAlloc("Add live range %d to inactive\n", range->id()); |
| 1659 inactive_live_ranges_.Add(range, zone()); | 1625 inactive_live_ranges_.Add(range, zone()); |
| 1660 } | 1626 } |
| 1661 | 1627 |
| 1662 | 1628 |
| 1663 void LAllocator::AddToUnhandledSorted(LiveRange* range) { | 1629 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) { |
| 1664 if (range == NULL || range->IsEmpty()) return; | 1630 if (range == NULL || range->IsEmpty()) return; |
| 1665 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1631 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1666 ASSERT(allocation_finger_.Value() <= range->Start().Value()); | 1632 ASSERT(allocation_finger_.Value() <= range->Start().Value()); |
| 1667 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { | 1633 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { |
| 1668 LiveRange* cur_range = unhandled_live_ranges_.at(i); | 1634 LiveRange* cur_range = unhandled_live_ranges_.at(i); |
| 1669 if (range->ShouldBeAllocatedBefore(cur_range)) { | 1635 if (range->ShouldBeAllocatedBefore(cur_range)) { |
| 1670 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); | 1636 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 1671 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); | 1637 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); |
| 1672 ASSERT(UnhandledIsSorted()); | 1638 ASSERT(UnhandledIsSorted()); |
| 1673 return; | 1639 return; |
| 1674 } | 1640 } |
| 1675 } | 1641 } |
| 1676 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); | 1642 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); |
| 1677 unhandled_live_ranges_.InsertAt(0, range, zone()); | 1643 unhandled_live_ranges_.InsertAt(0, range, zone()); |
| 1678 ASSERT(UnhandledIsSorted()); | 1644 ASSERT(UnhandledIsSorted()); |
| 1679 } | 1645 } |
| 1680 | 1646 |
| 1681 | 1647 |
| 1682 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { | 1648 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| 1683 if (range == NULL || range->IsEmpty()) return; | 1649 if (range == NULL || range->IsEmpty()) return; |
| 1684 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); | 1650 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1685 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); | 1651 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 1686 unhandled_live_ranges_.Add(range, zone()); | 1652 unhandled_live_ranges_.Add(range, zone()); |
| 1687 } | 1653 } |
| 1688 | 1654 |
| 1689 | 1655 |
| 1690 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { | 1656 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { |
| 1691 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || | 1657 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || |
| 1692 !(*b)->ShouldBeAllocatedBefore(*a)); | 1658 !(*b)->ShouldBeAllocatedBefore(*a)); |
| 1693 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; | 1659 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; |
| 1694 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; | 1660 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; |
| 1695 return (*a)->id() - (*b)->id(); | 1661 return (*a)->id() - (*b)->id(); |
| 1696 } | 1662 } |
| 1697 | 1663 |
| 1698 | 1664 |
| 1699 // Sort the unhandled live ranges so that the ranges to be processed first are | 1665 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 1700 // at the end of the array list. This is convenient for the register allocation | 1666 // at the end of the array list. This is convenient for the register allocation |
| 1701 // algorithm because it is efficient to remove elements from the end. | 1667 // algorithm because it is efficient to remove elements from the end. |
| 1702 void LAllocator::SortUnhandled() { | 1668 void RegisterAllocator::SortUnhandled() { |
| 1703 TraceAlloc("Sort unhandled\n"); | 1669 TraceAlloc("Sort unhandled\n"); |
| 1704 unhandled_live_ranges_.Sort(&UnhandledSortHelper); | 1670 unhandled_live_ranges_.Sort(&UnhandledSortHelper); |
| 1705 } | 1671 } |
| 1706 | 1672 |
| 1707 | 1673 |
| 1708 bool LAllocator::UnhandledIsSorted() { | 1674 bool RegisterAllocator::UnhandledIsSorted() { |
| 1709 int len = unhandled_live_ranges_.length(); | 1675 int len = unhandled_live_ranges_.length(); |
| 1710 for (int i = 1; i < len; i++) { | 1676 for (int i = 1; i < len; i++) { |
| 1711 LiveRange* a = unhandled_live_ranges_.at(i - 1); | 1677 LiveRange* a = unhandled_live_ranges_.at(i - 1); |
| 1712 LiveRange* b = unhandled_live_ranges_.at(i); | 1678 LiveRange* b = unhandled_live_ranges_.at(i); |
| 1713 if (a->Start().Value() < b->Start().Value()) return false; | 1679 if (a->Start().Value() < b->Start().Value()) return false; |
| 1714 } | 1680 } |
| 1715 return true; | 1681 return true; |
| 1716 } | 1682 } |
| 1717 | 1683 |
| 1718 | 1684 |
| 1719 void LAllocator::FreeSpillSlot(LiveRange* range) { | 1685 void RegisterAllocator::FreeSpillSlot(LiveRange* range) { |
| 1720 // Check that we are the last range. | 1686 // Check that we are the last range. |
| 1721 if (range->next() != NULL) return; | 1687 if (range->next() != NULL) return; |
| 1722 | 1688 |
| 1723 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; | 1689 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; |
| 1724 | 1690 |
| 1725 int index = range->TopLevel()->GetSpillOperand()->index(); | 1691 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand(); |
| 1726 if (index >= 0) { | 1692 if (spill_operand->IsConstant()) return; |
| 1693 if (spill_operand->index() >= 0) { |
| 1727 reusable_slots_.Add(range, zone()); | 1694 reusable_slots_.Add(range, zone()); |
| 1728 } | 1695 } |
| 1729 } | 1696 } |
| 1730 | 1697 |
| 1731 | 1698 |
| 1732 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { | 1699 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) { |
| 1733 if (reusable_slots_.is_empty()) return NULL; | 1700 if (reusable_slots_.is_empty()) return NULL; |
| 1734 if (reusable_slots_.first()->End().Value() > | 1701 if (reusable_slots_.first()->End().Value() > |
| 1735 range->TopLevel()->Start().Value()) { | 1702 range->TopLevel()->Start().Value()) { |
| 1736 return NULL; | 1703 return NULL; |
| 1737 } | 1704 } |
| 1738 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); | 1705 InstructionOperand* result = |
| 1706 reusable_slots_.first()->TopLevel()->GetSpillOperand(); |
| 1739 reusable_slots_.Remove(0); | 1707 reusable_slots_.Remove(0); |
| 1740 return result; | 1708 return result; |
| 1741 } | 1709 } |
| 1742 | 1710 |
| 1743 | 1711 |
| 1744 void LAllocator::ActiveToHandled(LiveRange* range) { | 1712 void RegisterAllocator::ActiveToHandled(LiveRange* range) { |
| 1745 ASSERT(active_live_ranges_.Contains(range)); | 1713 ASSERT(active_live_ranges_.Contains(range)); |
| 1746 active_live_ranges_.RemoveElement(range); | 1714 active_live_ranges_.RemoveElement(range); |
| 1747 TraceAlloc("Moving live range %d from active to handled\n", range->id()); | 1715 TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
| 1748 FreeSpillSlot(range); | 1716 FreeSpillSlot(range); |
| 1749 } | 1717 } |
| 1750 | 1718 |
| 1751 | 1719 |
| 1752 void LAllocator::ActiveToInactive(LiveRange* range) { | 1720 void RegisterAllocator::ActiveToInactive(LiveRange* range) { |
| 1753 ASSERT(active_live_ranges_.Contains(range)); | 1721 ASSERT(active_live_ranges_.Contains(range)); |
| 1754 active_live_ranges_.RemoveElement(range); | 1722 active_live_ranges_.RemoveElement(range); |
| 1755 inactive_live_ranges_.Add(range, zone()); | 1723 inactive_live_ranges_.Add(range, zone()); |
| 1756 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); | 1724 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); |
| 1757 } | 1725 } |
| 1758 | 1726 |
| 1759 | 1727 |
| 1760 void LAllocator::InactiveToHandled(LiveRange* range) { | 1728 void RegisterAllocator::InactiveToHandled(LiveRange* range) { |
| 1761 ASSERT(inactive_live_ranges_.Contains(range)); | 1729 ASSERT(inactive_live_ranges_.Contains(range)); |
| 1762 inactive_live_ranges_.RemoveElement(range); | 1730 inactive_live_ranges_.RemoveElement(range); |
| 1763 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); | 1731 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
| 1764 FreeSpillSlot(range); | 1732 FreeSpillSlot(range); |
| 1765 } | 1733 } |
| 1766 | 1734 |
| 1767 | 1735 |
| 1768 void LAllocator::InactiveToActive(LiveRange* range) { | 1736 void RegisterAllocator::InactiveToActive(LiveRange* range) { |
| 1769 ASSERT(inactive_live_ranges_.Contains(range)); | 1737 ASSERT(inactive_live_ranges_.Contains(range)); |
| 1770 inactive_live_ranges_.RemoveElement(range); | 1738 inactive_live_ranges_.RemoveElement(range); |
| 1771 active_live_ranges_.Add(range, zone()); | 1739 active_live_ranges_.Add(range, zone()); |
| 1772 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); | 1740 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
| 1773 } | 1741 } |
| 1774 | 1742 |
| 1775 | 1743 |
| 1776 // TryAllocateFreeReg and AllocateBlockedReg assume this | 1744 // TryAllocateFreeReg and AllocateBlockedReg assume this |
| 1777 // when allocating local arrays. | 1745 // when allocating local arrays. |
| 1778 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= | 1746 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= |
| 1779 Register::kMaxNumAllocatableRegisters); | 1747 Register::kMaxNumAllocatableRegisters); |
| 1780 | 1748 |
| 1781 | 1749 |
| 1782 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { | 1750 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 1783 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; | 1751 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; |
| 1784 | 1752 |
| 1785 for (int i = 0; i < num_registers_; i++) { | 1753 for (int i = 0; i < num_registers_; i++) { |
| 1786 free_until_pos[i] = LifetimePosition::MaxPosition(); | 1754 free_until_pos[i] = LifetimePosition::MaxPosition(); |
| 1787 } | 1755 } |
| 1788 | 1756 |
| 1789 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1757 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1790 LiveRange* cur_active = active_live_ranges_.at(i); | 1758 LiveRange* cur_active = active_live_ranges_.at(i); |
| 1791 free_until_pos[cur_active->assigned_register()] = | 1759 free_until_pos[cur_active->assigned_register()] = |
| 1792 LifetimePosition::FromInstructionIndex(0); | 1760 LifetimePosition::FromInstructionIndex(0); |
| 1793 } | 1761 } |
| 1794 | 1762 |
| 1795 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { | 1763 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
| 1796 LiveRange* cur_inactive = inactive_live_ranges_.at(i); | 1764 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
| 1797 ASSERT(cur_inactive->End().Value() > current->Start().Value()); | 1765 ASSERT(cur_inactive->End().Value() > current->Start().Value()); |
| 1798 LifetimePosition next_intersection = | 1766 LifetimePosition next_intersection = |
| 1799 cur_inactive->FirstIntersection(current); | 1767 cur_inactive->FirstIntersection(current); |
| 1800 if (!next_intersection.IsValid()) continue; | 1768 if (!next_intersection.IsValid()) continue; |
| 1801 int cur_reg = cur_inactive->assigned_register(); | 1769 int cur_reg = cur_inactive->assigned_register(); |
| 1802 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); | 1770 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); |
| 1803 } | 1771 } |
| 1804 | 1772 |
| 1805 LOperand* hint = current->FirstHint(); | 1773 InstructionOperand* hint = current->FirstHint(); |
| 1806 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { | 1774 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { |
| 1807 int register_index = hint->index(); | 1775 int register_index = hint->index(); |
| 1808 TraceAlloc( | 1776 TraceAlloc( |
| 1809 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", | 1777 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", |
| 1810 RegisterName(register_index), | 1778 RegisterName(register_index), |
| 1811 free_until_pos[register_index].Value(), | 1779 free_until_pos[register_index].Value(), |
| 1812 current->id(), | 1780 current->id(), |
| 1813 current->End().Value()); | 1781 current->End().Value()); |
| 1814 | 1782 |
| 1815 // The desired register is free until the end of the current live range. | 1783 // The desired register is free until the end of the current live range. |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1851 ASSERT(pos.Value() >= current->End().Value()); | 1819 ASSERT(pos.Value() >= current->End().Value()); |
| 1852 TraceAlloc("Assigning free reg %s to live range %d\n", | 1820 TraceAlloc("Assigning free reg %s to live range %d\n", |
| 1853 RegisterName(reg), | 1821 RegisterName(reg), |
| 1854 current->id()); | 1822 current->id()); |
| 1855 SetLiveRangeAssignedRegister(current, reg); | 1823 SetLiveRangeAssignedRegister(current, reg); |
| 1856 | 1824 |
| 1857 return true; | 1825 return true; |
| 1858 } | 1826 } |
| 1859 | 1827 |
| 1860 | 1828 |
| 1861 void LAllocator::AllocateBlockedReg(LiveRange* current) { | 1829 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) { |
| 1862 UsePosition* register_use = current->NextRegisterPosition(current->Start()); | 1830 UsePosition* register_use = current->NextRegisterPosition(current->Start()); |
| 1863 if (register_use == NULL) { | 1831 if (register_use == NULL) { |
| 1864 // There is no use in the current live range that requires a register. | 1832 // There is no use in the current live range that requires a register. |
| 1865 // We can just spill it. | 1833 // We can just spill it. |
| 1866 Spill(current); | 1834 Spill(current); |
| 1867 return; | 1835 return; |
| 1868 } | 1836 } |
| 1869 | 1837 |
| 1870 | 1838 |
| 1871 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters]; | 1839 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters]; |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1939 current->id()); | 1907 current->id()); |
| 1940 SetLiveRangeAssignedRegister(current, reg); | 1908 SetLiveRangeAssignedRegister(current, reg); |
| 1941 | 1909 |
| 1942 // This register was not free. Thus we need to find and spill | 1910 // This register was not free. Thus we need to find and spill |
| 1943 // parts of active and inactive live regions that use the same register | 1911 // parts of active and inactive live regions that use the same register |
| 1944 // at the same lifetime positions as current. | 1912 // at the same lifetime positions as current. |
| 1945 SplitAndSpillIntersecting(current); | 1913 SplitAndSpillIntersecting(current); |
| 1946 } | 1914 } |
| 1947 | 1915 |
| 1948 | 1916 |
| 1949 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range, | 1917 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(LiveRange* range, |
| 1950 LifetimePosition pos) { | 1918 LifetimePosition pos) { |
| 1951 HBasicBlock* block = GetBlock(pos.InstructionStart()); | 1919 BasicBlock* block = GetBlock(pos.InstructionStart()); |
| 1952 HBasicBlock* loop_header = | 1920 BasicBlock* loop_header = |
| 1953 block->IsLoopHeader() ? block : block->parent_loop_header(); | 1921 block->IsLoopHeader() ? block : code()->GetContainingLoop(block); |
| 1954 | 1922 |
| 1955 if (loop_header == NULL) return pos; | 1923 if (loop_header == NULL) return pos; |
| 1956 | 1924 |
| 1957 UsePosition* prev_use = | 1925 UsePosition* prev_use = |
| 1958 range->PreviousUsePositionRegisterIsBeneficial(pos); | 1926 range->PreviousUsePositionRegisterIsBeneficial(pos); |
| 1959 | 1927 |
| 1960 while (loop_header != NULL) { | 1928 while (loop_header != NULL) { |
| 1961 // We are going to spill live range inside the loop. | 1929 // We are going to spill live range inside the loop. |
| 1962 // If possible try to move spilling position backwards to loop header. | 1930 // If possible try to move spilling position backwards to loop header. |
| 1963 // This will reduce number of memory moves on the back edge. | 1931 // This will reduce number of memory moves on the back edge. |
| 1964 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( | 1932 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( |
| 1965 loop_header->first_instruction_index()); | 1933 loop_header->first_instruction_index()); |
| 1966 | 1934 |
| 1967 if (range->Covers(loop_start)) { | 1935 if (range->Covers(loop_start)) { |
| 1968 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { | 1936 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { |
| 1969 // No register beneficial use inside the loop before the pos. | 1937 // No register beneficial use inside the loop before the pos. |
| 1970 pos = loop_start; | 1938 pos = loop_start; |
| 1971 } | 1939 } |
| 1972 } | 1940 } |
| 1973 | 1941 |
| 1974 // Try hoisting out to an outer loop. | 1942 // Try hoisting out to an outer loop. |
| 1975 loop_header = loop_header->parent_loop_header(); | 1943 loop_header = code()->GetContainingLoop(loop_header); |
| 1976 } | 1944 } |
| 1977 | 1945 |
| 1978 return pos; | 1946 return pos; |
| 1979 } | 1947 } |
| 1980 | 1948 |
| 1981 | 1949 |
| 1982 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { | 1950 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| 1983 ASSERT(current->HasRegisterAssigned()); | 1951 ASSERT(current->HasRegisterAssigned()); |
| 1984 int reg = current->assigned_register(); | 1952 int reg = current->assigned_register(); |
| 1985 LifetimePosition split_pos = current->Start(); | 1953 LifetimePosition split_pos = current->Start(); |
| 1986 for (int i = 0; i < active_live_ranges_.length(); ++i) { | 1954 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1987 LiveRange* range = active_live_ranges_[i]; | 1955 LiveRange* range = active_live_ranges_[i]; |
| 1988 if (range->assigned_register() == reg) { | 1956 if (range->assigned_register() == reg) { |
| 1989 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); | 1957 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
| 1990 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); | 1958 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); |
| 1991 if (next_pos == NULL) { | 1959 if (next_pos == NULL) { |
| 1992 SpillAfter(range, spill_pos); | 1960 SpillAfter(range, spill_pos); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 2022 } | 1990 } |
| 2023 if (!AllocationOk()) return; | 1991 if (!AllocationOk()) return; |
| 2024 InactiveToHandled(range); | 1992 InactiveToHandled(range); |
| 2025 --i; | 1993 --i; |
| 2026 } | 1994 } |
| 2027 } | 1995 } |
| 2028 } | 1996 } |
| 2029 } | 1997 } |
| 2030 | 1998 |
| 2031 | 1999 |
| 2032 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { | 2000 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) { |
| 2033 return pos.IsInstructionStart() && | 2001 return pos.IsInstructionStart() && |
| 2034 InstructionAt(pos.InstructionIndex())->IsLabel(); | 2002 InstructionAt(pos.InstructionIndex())->IsBlockStart(); |
| 2035 } | 2003 } |
| 2036 | 2004 |
| 2037 | 2005 |
| 2038 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { | 2006 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
| 2007 LifetimePosition pos) { |
| 2039 ASSERT(!range->IsFixed()); | 2008 ASSERT(!range->IsFixed()); |
| 2040 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); | 2009 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 2041 | 2010 |
| 2042 if (pos.Value() <= range->Start().Value()) return range; | 2011 if (pos.Value() <= range->Start().Value()) return range; |
| 2043 | 2012 |
| 2044 // We can't properly connect liveranges if split occured at the end | 2013 // We can't properly connect liveranges if split occured at the end |
| 2045 // of control instruction. | 2014 // of control instruction. |
| 2046 ASSERT(pos.IsInstructionStart() || | 2015 ASSERT(pos.IsInstructionStart() || |
| 2047 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); | 2016 !InstructionAt(pos.InstructionIndex())->IsControl()); |
| 2048 | 2017 |
| 2049 int vreg = GetVirtualRegister(); | 2018 int vreg = GetVirtualRegister(); |
| 2050 if (!AllocationOk()) return NULL; | 2019 if (!AllocationOk()) return NULL; |
| 2051 LiveRange* result = LiveRangeFor(vreg); | 2020 LiveRange* result = LiveRangeFor(vreg); |
| 2052 range->SplitAt(pos, result, zone()); | 2021 range->SplitAt(pos, result, zone()); |
| 2053 return result; | 2022 return result; |
| 2054 } | 2023 } |
| 2055 | 2024 |
| 2056 | 2025 |
| 2057 LiveRange* LAllocator::SplitBetween(LiveRange* range, | 2026 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, |
| 2058 LifetimePosition start, | 2027 LifetimePosition start, |
| 2059 LifetimePosition end) { | 2028 LifetimePosition end) { |
| 2060 ASSERT(!range->IsFixed()); | 2029 ASSERT(!range->IsFixed()); |
| 2061 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", | 2030 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", |
| 2062 range->id(), | 2031 range->id(), |
| 2063 start.Value(), | 2032 start.Value(), |
| 2064 end.Value()); | 2033 end.Value()); |
| 2065 | 2034 |
| 2066 LifetimePosition split_pos = FindOptimalSplitPos(start, end); | 2035 LifetimePosition split_pos = FindOptimalSplitPos(start, end); |
| 2067 ASSERT(split_pos.Value() >= start.Value()); | 2036 ASSERT(split_pos.Value() >= start.Value()); |
| 2068 return SplitRangeAt(range, split_pos); | 2037 return SplitRangeAt(range, split_pos); |
| 2069 } | 2038 } |
| 2070 | 2039 |
| 2071 | 2040 |
| 2072 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, | 2041 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 2073 LifetimePosition end) { | 2042 LifetimePosition end) { |
| 2074 int start_instr = start.InstructionIndex(); | 2043 int start_instr = start.InstructionIndex(); |
| 2075 int end_instr = end.InstructionIndex(); | 2044 int end_instr = end.InstructionIndex(); |
| 2076 ASSERT(start_instr <= end_instr); | 2045 ASSERT(start_instr <= end_instr); |
| 2077 | 2046 |
| 2078 // We have no choice | 2047 // We have no choice |
| 2079 if (start_instr == end_instr) return end; | 2048 if (start_instr == end_instr) return end; |
| 2080 | 2049 |
| 2081 HBasicBlock* start_block = GetBlock(start); | 2050 BasicBlock* start_block = GetBlock(start); |
| 2082 HBasicBlock* end_block = GetBlock(end); | 2051 BasicBlock* end_block = GetBlock(end); |
| 2083 | 2052 |
| 2084 if (end_block == start_block) { | 2053 if (end_block == start_block) { |
| 2085 // The interval is split in the same basic block. Split at the latest | 2054 // The interval is split in the same basic block. Split at the latest |
| 2086 // possible position. | 2055 // possible position. |
| 2087 return end; | 2056 return end; |
| 2088 } | 2057 } |
| 2089 | 2058 |
| 2090 HBasicBlock* block = end_block; | 2059 BasicBlock* block = end_block; |
| 2091 // Find header of outermost loop. | 2060 // Find header of outermost loop. |
| 2092 while (block->parent_loop_header() != NULL && | 2061 // TODO(titzer): fix redundancy below. |
| 2093 block->parent_loop_header()->block_id() > start_block->block_id()) { | 2062 while (code()->GetContainingLoop(block) != NULL && |
| 2094 block = block->parent_loop_header(); | 2063 code()->GetContainingLoop(block)->rpo_number_ > |
| 2064 start_block->rpo_number_) { |
| 2065 block = code()->GetContainingLoop(block); |
| 2095 } | 2066 } |
| 2096 | 2067 |
| 2097 // We did not find any suitable outer loop. Split at the latest possible | 2068 // We did not find any suitable outer loop. Split at the latest possible |
| 2098 // position unless end_block is a loop header itself. | 2069 // position unless end_block is a loop header itself. |
| 2099 if (block == end_block && !end_block->IsLoopHeader()) return end; | 2070 if (block == end_block && !end_block->IsLoopHeader()) return end; |
| 2100 | 2071 |
| 2101 return LifetimePosition::FromInstructionIndex( | 2072 return LifetimePosition::FromInstructionIndex( |
| 2102 block->first_instruction_index()); | 2073 block->first_instruction_index()); |
| 2103 } | 2074 } |
| 2104 | 2075 |
| 2105 | 2076 |
| 2106 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { | 2077 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { |
| 2107 LiveRange* second_part = SplitRangeAt(range, pos); | 2078 LiveRange* second_part = SplitRangeAt(range, pos); |
| 2108 if (!AllocationOk()) return; | 2079 if (!AllocationOk()) return; |
| 2109 Spill(second_part); | 2080 Spill(second_part); |
| 2110 } | 2081 } |
| 2111 | 2082 |
| 2112 | 2083 |
| 2113 void LAllocator::SpillBetween(LiveRange* range, | 2084 void RegisterAllocator::SpillBetween(LiveRange* range, |
| 2114 LifetimePosition start, | 2085 LifetimePosition start, |
| 2115 LifetimePosition end) { | 2086 LifetimePosition end) { |
| 2116 SpillBetweenUntil(range, start, start, end); | 2087 SpillBetweenUntil(range, start, start, end); |
| 2117 } | 2088 } |
| 2118 | 2089 |
| 2119 | 2090 |
| 2120 void LAllocator::SpillBetweenUntil(LiveRange* range, | 2091 void RegisterAllocator::SpillBetweenUntil(LiveRange* range, |
| 2121 LifetimePosition start, | 2092 LifetimePosition start, |
| 2122 LifetimePosition until, | 2093 LifetimePosition until, |
| 2123 LifetimePosition end) { | 2094 LifetimePosition end) { |
| 2124 CHECK(start.Value() < end.Value()); | 2095 CHECK(start.Value() < end.Value()); |
| 2125 LiveRange* second_part = SplitRangeAt(range, start); | 2096 LiveRange* second_part = SplitRangeAt(range, start); |
| 2126 if (!AllocationOk()) return; | 2097 if (!AllocationOk()) return; |
| 2127 | 2098 |
| 2128 if (second_part->Start().Value() < end.Value()) { | 2099 if (second_part->Start().Value() < end.Value()) { |
| 2129 // The split result intersects with [start, end[. | 2100 // The split result intersects with [start, end[. |
| 2130 // Split it at position between ]start+1, end[, spill the middle part | 2101 // Split it at position between ]start+1, end[, spill the middle part |
| 2131 // and put the rest to unhandled. | 2102 // and put the rest to unhandled. |
| 2132 LiveRange* third_part = SplitBetween( | 2103 LiveRange* third_part = SplitBetween( |
| 2133 second_part, | 2104 second_part, |
| 2134 Max(second_part->Start().InstructionEnd(), until), | 2105 Max(second_part->Start().InstructionEnd(), until), |
| 2135 end.PrevInstruction().InstructionEnd()); | 2106 end.PrevInstruction().InstructionEnd()); |
| 2136 if (!AllocationOk()) return; | 2107 if (!AllocationOk()) return; |
| 2137 | 2108 |
| 2138 ASSERT(third_part != second_part); | 2109 ASSERT(third_part != second_part); |
| 2139 | 2110 |
| 2140 Spill(second_part); | 2111 Spill(second_part); |
| 2141 AddToUnhandledSorted(third_part); | 2112 AddToUnhandledSorted(third_part); |
| 2142 } else { | 2113 } else { |
| 2143 // The split result does not intersect with [start, end[. | 2114 // The split result does not intersect with [start, end[. |
| 2144 // Nothing to spill. Just put it to unhandled as whole. | 2115 // Nothing to spill. Just put it to unhandled as whole. |
| 2145 AddToUnhandledSorted(second_part); | 2116 AddToUnhandledSorted(second_part); |
| 2146 } | 2117 } |
| 2147 } | 2118 } |
| 2148 | 2119 |
| 2149 | 2120 |
| 2150 void LAllocator::Spill(LiveRange* range) { | 2121 void RegisterAllocator::Spill(LiveRange* range) { |
| 2151 ASSERT(!range->IsSpilled()); | 2122 ASSERT(!range->IsSpilled()); |
| 2152 TraceAlloc("Spilling live range %d\n", range->id()); | 2123 TraceAlloc("Spilling live range %d\n", range->id()); |
| 2153 LiveRange* first = range->TopLevel(); | 2124 LiveRange* first = range->TopLevel(); |
| 2154 | 2125 |
| 2155 if (!first->HasAllocatedSpillOperand()) { | 2126 if (!first->HasAllocatedSpillOperand()) { |
| 2156 LOperand* op = TryReuseSpillSlot(range); | 2127 InstructionOperand* op = TryReuseSpillSlot(range); |
| 2157 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); | 2128 if (op == NULL) { |
| 2129 // Allocate a new operand referring to the spill slot. |
| 2130 RegisterKind kind = range->Kind(); |
| 2131 int index = code()->frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS); |
| 2132 if (kind == DOUBLE_REGISTERS) { |
| 2133 op = DoubleStackSlotOperand::Create(index, zone()); |
| 2134 } else { |
| 2135 ASSERT(kind == GENERAL_REGISTERS); |
| 2136 op = StackSlotOperand::Create(index, zone()); |
| 2137 } |
| 2138 } |
| 2158 first->SetSpillOperand(op); | 2139 first->SetSpillOperand(op); |
| 2159 } | 2140 } |
| 2160 range->MakeSpilled(chunk()->zone()); | 2141 range->MakeSpilled(code_zone()); |
| 2161 } | 2142 } |
| 2162 | 2143 |
| 2163 | 2144 |
| 2164 int LAllocator::RegisterCount() const { | 2145 int RegisterAllocator::RegisterCount() const { |
| 2165 return num_registers_; | 2146 return num_registers_; |
| 2166 } | 2147 } |
| 2167 | 2148 |
| 2168 | 2149 |
| 2169 #ifdef DEBUG | 2150 #ifdef DEBUG |
| 2170 | 2151 |
| 2171 | 2152 |
| 2172 void LAllocator::Verify() const { | 2153 void RegisterAllocator::Verify() const { |
| 2173 for (int i = 0; i < live_ranges()->length(); ++i) { | 2154 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 2174 LiveRange* current = live_ranges()->at(i); | 2155 LiveRange* current = live_ranges()->at(i); |
| 2175 if (current != NULL) current->Verify(); | 2156 if (current != NULL) current->Verify(); |
| 2176 } | 2157 } |
| 2177 } | 2158 } |
| 2178 | 2159 |
| 2179 | 2160 |
| 2180 #endif | 2161 #endif |
| 2181 | 2162 |
| 2182 | 2163 |
| 2183 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator) | 2164 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, |
| 2184 : CompilationPhase(name, allocator->graph()->info()), | 2165 int reg) { |
| 2166 if (range->Kind() == DOUBLE_REGISTERS) { |
| 2167 assigned_double_registers_->Add(reg); |
| 2168 } else { |
| 2169 ASSERT(range->Kind() == GENERAL_REGISTERS); |
| 2170 assigned_registers_->Add(reg); |
| 2171 } |
| 2172 range->set_assigned_register(reg, code_zone()); |
| 2173 } |
| 2174 |
| 2175 |
| 2176 RegisterAllocatorPhase::RegisterAllocatorPhase(const char* name, |
| 2177 RegisterAllocator* allocator) |
| 2178 : CompilationPhase(name, allocator->code()->linkage()->info()), |
| 2185 allocator_(allocator) { | 2179 allocator_(allocator) { |
| 2186 if (FLAG_hydrogen_stats) { | 2180 if (FLAG_turbo_stats) { |
| 2187 allocator_zone_start_allocation_size_ = | 2181 allocator_zone_start_allocation_size_ = |
| 2188 allocator->zone()->allocation_size(); | 2182 allocator->zone()->allocation_size(); |
| 2189 } | 2183 } |
| 2190 } | 2184 } |
| 2191 | 2185 |
| 2192 | 2186 |
| 2193 LAllocatorPhase::~LAllocatorPhase() { | 2187 RegisterAllocatorPhase::~RegisterAllocatorPhase() { |
| 2194 if (FLAG_hydrogen_stats) { | 2188 if (FLAG_turbo_stats) { |
| 2195 unsigned size = allocator_->zone()->allocation_size() - | 2189 unsigned size = allocator_->zone()->allocation_size() - |
| 2196 allocator_zone_start_allocation_size_; | 2190 allocator_zone_start_allocation_size_; |
| 2197 isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size); | 2191 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size); |
| 2198 } | 2192 } |
| 2199 | |
| 2200 if (ShouldProduceTraceOutput()) { | |
| 2201 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk()); | |
| 2202 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); | |
| 2203 } | |
| 2204 | |
| 2205 #ifdef DEBUG | 2193 #ifdef DEBUG |
| 2206 if (allocator_ != NULL) allocator_->Verify(); | 2194 if (allocator_ != NULL) allocator_->Verify(); |
| 2207 #endif | 2195 #endif |
| 2208 } | 2196 } |
| 2209 | 2197 |
| 2210 | 2198 } } } // namespace v8::internal::compiler |
| 2211 } } // namespace v8::internal | |
| OLD | NEW |