| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #include "lithium-allocator.h" |
| 29 |
| 30 #include "data-flow.h" |
| 31 #include "hydrogen.h" |
| 32 #include "string-stream.h" |
| 33 |
| 34 #if V8_TARGET_ARCH_IA32 |
| 35 #include "ia32/lithium-ia32.h" |
| 36 #elif V8_TARGET_ARCH_X64 |
| 37 #include "x64/lithium-x64.h" |
| 38 #elif V8_TARGET_ARCH_ARM |
| 39 #include "arm/lithium-arm.h" |
| 40 #else |
| 41 #error "Unknown architecture." |
| 42 #endif |
| 43 |
| 44 namespace v8 { |
| 45 namespace internal { |
| 46 |
| 47 |
| 48 #define DEFINE_OPERAND_CACHE(name, type) \ |
| 49 name name::cache[name::kNumCachedOperands]; \ |
| 50 void name::SetupCache() { \ |
| 51 for (int i = 0; i < kNumCachedOperands; i++) { \ |
| 52 cache[i].ConvertTo(type, i); \ |
| 53 } \ |
| 54 } |
| 55 |
| 56 DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND) |
| 57 DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT) |
| 58 DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT) |
| 59 DEFINE_OPERAND_CACHE(LRegister, REGISTER) |
| 60 DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER) |
| 61 |
| 62 #undef DEFINE_OPERAND_CACHE |
| 63 |
| 64 |
| 65 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
| 66 return a.Value() < b.Value() ? a : b; |
| 67 } |
| 68 |
| 69 |
| 70 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
| 71 return a.Value() > b.Value() ? a : b; |
| 72 } |
| 73 |
| 74 |
| 75 void LOperand::PrintTo(StringStream* stream) { |
| 76 LUnallocated* unalloc = NULL; |
| 77 switch (kind()) { |
| 78 case INVALID: |
| 79 break; |
| 80 case UNALLOCATED: |
| 81 unalloc = LUnallocated::cast(this); |
| 82 stream->Add("v%d", unalloc->virtual_register()); |
| 83 switch (unalloc->policy()) { |
| 84 case LUnallocated::NONE: |
| 85 break; |
| 86 case LUnallocated::FIXED_REGISTER: { |
| 87 const char* register_name = |
| 88 Register::AllocationIndexToString(unalloc->fixed_index()); |
| 89 stream->Add("(=%s)", register_name); |
| 90 break; |
| 91 } |
| 92 case LUnallocated::FIXED_DOUBLE_REGISTER: { |
| 93 const char* double_register_name = |
| 94 DoubleRegister::AllocationIndexToString(unalloc->fixed_index()); |
| 95 stream->Add("(=%s)", double_register_name); |
| 96 break; |
| 97 } |
| 98 case LUnallocated::FIXED_SLOT: |
| 99 stream->Add("(=%dS)", unalloc->fixed_index()); |
| 100 break; |
| 101 case LUnallocated::MUST_HAVE_REGISTER: |
| 102 stream->Add("(R)"); |
| 103 break; |
| 104 case LUnallocated::WRITABLE_REGISTER: |
| 105 stream->Add("(WR)"); |
| 106 break; |
| 107 case LUnallocated::SAME_AS_FIRST_INPUT: |
| 108 stream->Add("(1)"); |
| 109 break; |
| 110 case LUnallocated::SAME_AS_ANY_INPUT: |
| 111 stream->Add("(A)"); |
| 112 break; |
| 113 case LUnallocated::ANY: |
| 114 stream->Add("(-)"); |
| 115 break; |
| 116 case LUnallocated::IGNORE: |
| 117 stream->Add("(0)"); |
| 118 break; |
| 119 } |
| 120 break; |
| 121 case CONSTANT_OPERAND: |
| 122 stream->Add("[constant:%d]", index()); |
| 123 break; |
| 124 case STACK_SLOT: |
| 125 stream->Add("[stack:%d]", index()); |
| 126 break; |
| 127 case DOUBLE_STACK_SLOT: |
| 128 stream->Add("[double_stack:%d]", index()); |
| 129 break; |
| 130 case REGISTER: |
| 131 stream->Add("[%s|R]", Register::AllocationIndexToString(index())); |
| 132 break; |
| 133 case DOUBLE_REGISTER: |
| 134 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index())); |
| 135 break; |
| 136 case ARGUMENT: |
| 137 stream->Add("[arg:%d]", index()); |
| 138 break; |
| 139 } |
| 140 } |
| 141 |
| 142 int LOperand::VirtualRegister() { |
| 143 LUnallocated* unalloc = LUnallocated::cast(this); |
| 144 return unalloc->virtual_register(); |
| 145 } |
| 146 |
| 147 |
| 148 bool UsePosition::RequiresRegister() const { |
| 149 return requires_reg_; |
| 150 } |
| 151 |
| 152 |
| 153 bool UsePosition::RegisterIsBeneficial() const { |
| 154 return register_beneficial_; |
| 155 } |
| 156 |
| 157 |
| 158 void UseInterval::SplitAt(LifetimePosition pos) { |
| 159 ASSERT(Contains(pos) && pos.Value() != start().Value()); |
| 160 UseInterval* after = new UseInterval(pos, end_); |
| 161 after->next_ = next_; |
| 162 next_ = after; |
| 163 end_ = pos; |
| 164 } |
| 165 |
| 166 |
| 167 #ifdef DEBUG |
| 168 |
| 169 |
| 170 void LiveRange::Verify() const { |
| 171 UsePosition* cur = first_pos_; |
| 172 while (cur != NULL) { |
| 173 ASSERT(Start().Value() <= cur->pos().Value() && |
| 174 cur->pos().Value() <= End().Value()); |
| 175 cur = cur->next(); |
| 176 } |
| 177 } |
| 178 |
| 179 |
| 180 bool LiveRange::HasOverlap(UseInterval* target) const { |
| 181 UseInterval* current_interval = first_interval_; |
| 182 while (current_interval != NULL) { |
| 183 // Intervals overlap if the start of one is contained in the other. |
| 184 if (current_interval->Contains(target->start()) || |
| 185 target->Contains(current_interval->start())) { |
| 186 return true; |
| 187 } |
| 188 current_interval = current_interval->next(); |
| 189 } |
| 190 return false; |
| 191 } |
| 192 |
| 193 |
| 194 #endif |
| 195 |
| 196 |
| 197 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { |
| 198 UsePosition* use_pos = last_processed_use_; |
| 199 if (use_pos == NULL) use_pos = first_pos(); |
| 200 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) { |
| 201 use_pos = use_pos->next(); |
| 202 } |
| 203 last_processed_use_ = use_pos; |
| 204 return use_pos; |
| 205 } |
| 206 |
| 207 |
| 208 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial( |
| 209 LifetimePosition start) { |
| 210 UsePosition* pos = NextUsePosition(start); |
| 211 while (pos != NULL && !pos->RegisterIsBeneficial()) { |
| 212 pos = pos->next(); |
| 213 } |
| 214 return pos; |
| 215 } |
| 216 |
| 217 |
| 218 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) { |
| 219 UsePosition* pos = NextUsePosition(start); |
| 220 while (pos != NULL && !pos->RequiresRegister()) { |
| 221 pos = pos->next(); |
| 222 } |
| 223 return pos; |
| 224 } |
| 225 |
| 226 |
| 227 bool LiveRange::CanBeSpilled(LifetimePosition pos) { |
| 228 // TODO(kmillikin): Comment. Now. |
| 229 if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false; |
| 230 |
| 231 // We cannot spill a live range that has a use requiring a register |
| 232 // at the current or the immediate next position. |
| 233 UsePosition* use_pos = NextRegisterPosition(pos); |
| 234 if (use_pos == NULL) return true; |
| 235 return use_pos->pos().Value() > pos.NextInstruction().Value(); |
| 236 } |
| 237 |
| 238 |
| 239 UsePosition* LiveRange::FirstPosWithHint() const { |
| 240 UsePosition* pos = first_pos_; |
| 241 while (pos != NULL && !pos->HasHint()) pos = pos->next(); |
| 242 return pos; |
| 243 } |
| 244 |
| 245 |
| 246 LOperand* LiveRange::CreateAssignedOperand() { |
| 247 LOperand* op = NULL; |
| 248 if (HasRegisterAssigned()) { |
| 249 ASSERT(!IsSpilled()); |
| 250 if (assigned_double_) { |
| 251 op = LDoubleRegister::Create(assigned_register()); |
| 252 } else { |
| 253 op = LRegister::Create(assigned_register()); |
| 254 } |
| 255 } else if (IsSpilled()) { |
| 256 ASSERT(!HasRegisterAssigned()); |
| 257 op = TopLevel()->GetSpillOperand(); |
| 258 ASSERT(!op->IsUnallocated()); |
| 259 } else { |
| 260 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE); |
| 261 unalloc->set_virtual_register(id_); |
| 262 op = unalloc; |
| 263 } |
| 264 return op; |
| 265 } |
| 266 |
| 267 |
| 268 UseInterval* LiveRange::FirstSearchIntervalForPosition( |
| 269 LifetimePosition position) const { |
| 270 if (current_interval_ == NULL) return first_interval_; |
| 271 if (current_interval_->start().Value() > position.Value()) { |
| 272 current_interval_ = NULL; |
| 273 return first_interval_; |
| 274 } |
| 275 return current_interval_; |
| 276 } |
| 277 |
| 278 |
| 279 void LiveRange::AdvanceLastProcessedMarker( |
| 280 UseInterval* to_start_of, LifetimePosition but_not_past) const { |
| 281 if (to_start_of == NULL) return; |
| 282 if (to_start_of->start().Value() > but_not_past.Value()) return; |
| 283 LifetimePosition start = |
| 284 current_interval_ == NULL ? LifetimePosition::Invalid() |
| 285 : current_interval_->start(); |
| 286 if (to_start_of->start().Value() > start.Value()) { |
| 287 current_interval_ = to_start_of; |
| 288 } |
| 289 } |
| 290 |
| 291 |
| 292 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) { |
| 293 ASSERT(Start().Value() <= position.Value()); |
| 294 ASSERT(result->IsEmpty()); |
| 295 // Find the last interval that ends before the position. If the |
| 296 // position is contained in one of the intervals in the chain, we |
| 297 // split that interval and use the first part. |
| 298 UseInterval* current = FirstSearchIntervalForPosition(position); |
| 299 while (current != NULL) { |
| 300 if (current->Contains(position)) { |
| 301 current->SplitAt(position); |
| 302 break; |
| 303 } |
| 304 UseInterval* next = current->next(); |
| 305 if (next->start().Value() >= position.Value()) break; |
| 306 current = next; |
| 307 } |
| 308 |
| 309 // Partition original use intervals to the two live ranges. |
| 310 UseInterval* before = current; |
| 311 UseInterval* after = before->next(); |
| 312 result->last_interval_ = (last_interval_ == before) |
| 313 ? after // Only interval in the range after split. |
| 314 : last_interval_; // Last interval of the original range. |
| 315 result->first_interval_ = after; |
| 316 last_interval_ = before; |
| 317 |
| 318 // Find the last use position before the split and the first use |
| 319 // position after it. |
| 320 UsePosition* use_after = first_pos_; |
| 321 UsePosition* use_before = NULL; |
| 322 while (use_after != NULL && use_after->pos().Value() <= position.Value()) { |
| 323 use_before = use_after; |
| 324 use_after = use_after->next(); |
| 325 } |
| 326 |
| 327 // Partition original use positions to the two live ranges. |
| 328 if (use_before != NULL) { |
| 329 use_before->next_ = NULL; |
| 330 } else { |
| 331 first_pos_ = NULL; |
| 332 } |
| 333 result->first_pos_ = use_after; |
| 334 |
| 335 // Link the new live range in the chain before any of the other |
| 336 // ranges linked from the range before the split. |
| 337 result->parent_ = (parent_ == NULL) ? this : parent_; |
| 338 result->next_ = next_; |
| 339 next_ = result; |
| 340 |
| 341 #ifdef DEBUG |
| 342 Verify(); |
| 343 result->Verify(); |
| 344 #endif |
| 345 } |
| 346 |
| 347 |
| 348 // This implements an ordering on live ranges so that they are ordered by their |
| 349 // start positions. This is needed for the correctness of the register |
| 350 // allocation algorithm. If two live ranges start at the same offset then there |
| 351 // is a tie breaker based on where the value is first used. This part of the |
| 352 // ordering is merely a heuristic. |
| 353 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const { |
| 354 LifetimePosition start = Start(); |
| 355 LifetimePosition other_start = other->Start(); |
| 356 if (start.Value() == other_start.Value()) { |
| 357 UsePosition* pos = FirstPosWithHint(); |
| 358 if (pos == NULL) return false; |
| 359 UsePosition* other_pos = other->first_pos(); |
| 360 if (other_pos == NULL) return true; |
| 361 return pos->pos().Value() < other_pos->pos().Value(); |
| 362 } |
| 363 return start.Value() < other_start.Value(); |
| 364 } |
| 365 |
| 366 |
| 367 void LiveRange::ShortenTo(LifetimePosition start) { |
| 368 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); |
| 369 ASSERT(first_interval_ != NULL); |
| 370 ASSERT(first_interval_->start().Value() <= start.Value()); |
| 371 ASSERT(start.Value() < first_interval_->end().Value()); |
| 372 first_interval_->set_start(start); |
| 373 } |
| 374 |
| 375 |
| 376 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) { |
| 377 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", |
| 378 id_, |
| 379 start.Value(), |
| 380 end.Value()); |
| 381 LifetimePosition new_end = end; |
| 382 while (first_interval_ != NULL && |
| 383 first_interval_->start().Value() <= end.Value()) { |
| 384 if (first_interval_->end().Value() > end.Value()) { |
| 385 new_end = first_interval_->end(); |
| 386 } |
| 387 first_interval_ = first_interval_->next(); |
| 388 } |
| 389 |
| 390 UseInterval* new_interval = new UseInterval(start, new_end); |
| 391 new_interval->next_ = first_interval_; |
| 392 first_interval_ = new_interval; |
| 393 if (new_interval->next() == NULL) { |
| 394 last_interval_ = new_interval; |
| 395 } |
| 396 } |
| 397 |
| 398 |
| 399 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) { |
| 400 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", |
| 401 id_, |
| 402 start.Value(), |
| 403 end.Value()); |
| 404 if (first_interval_ == NULL) { |
| 405 UseInterval* interval = new UseInterval(start, end); |
| 406 first_interval_ = interval; |
| 407 last_interval_ = interval; |
| 408 } else { |
| 409 if (end.Value() == first_interval_->start().Value()) { |
| 410 first_interval_->set_start(start); |
| 411 } else if (end.Value() < first_interval_->start().Value()) { |
| 412 UseInterval* interval = new UseInterval(start, end); |
| 413 interval->set_next(first_interval_); |
| 414 first_interval_ = interval; |
| 415 } else { |
| 416 // Order of instruction's processing (see ProcessInstructions) guarantees |
| 417 // that each new use interval either precedes or intersects with |
| 418 // last added interval. |
| 419 ASSERT(start.Value() < first_interval_->end().Value()); |
| 420 first_interval_->start_ = Min(start, first_interval_->start_); |
| 421 first_interval_->end_ = Max(end, first_interval_->end_); |
| 422 } |
| 423 } |
| 424 } |
| 425 |
| 426 |
| 427 UsePosition* LiveRange::AddUsePosition(LifetimePosition pos, |
| 428 LOperand* operand) { |
| 429 LAllocator::TraceAlloc("Add to live range %d use position %d\n", |
| 430 id_, |
| 431 pos.Value()); |
| 432 UsePosition* use_pos = new UsePosition(pos, operand); |
| 433 UsePosition* prev = NULL; |
| 434 UsePosition* current = first_pos_; |
| 435 while (current != NULL && current->pos().Value() < pos.Value()) { |
| 436 prev = current; |
| 437 current = current->next(); |
| 438 } |
| 439 |
| 440 if (prev == NULL) { |
| 441 use_pos->set_next(first_pos_); |
| 442 first_pos_ = use_pos; |
| 443 } else { |
| 444 use_pos->next_ = prev->next_; |
| 445 prev->next_ = use_pos; |
| 446 } |
| 447 |
| 448 return use_pos; |
| 449 } |
| 450 |
| 451 |
| 452 void LiveRange::ConvertOperands() { |
| 453 LOperand* op = CreateAssignedOperand(); |
| 454 UsePosition* use_pos = first_pos(); |
| 455 while (use_pos != NULL) { |
| 456 ASSERT(Start().Value() <= use_pos->pos().Value() && |
| 457 use_pos->pos().Value() <= End().Value()); |
| 458 |
| 459 if (use_pos->HasOperand()) { |
| 460 ASSERT(op->IsRegister() || op->IsDoubleRegister() || |
| 461 !use_pos->RequiresRegister()); |
| 462 use_pos->operand()->ConvertTo(op->kind(), op->index()); |
| 463 } |
| 464 use_pos = use_pos->next(); |
| 465 } |
| 466 } |
| 467 |
| 468 |
| 469 UsePosition* LiveRange::AddUsePosition(LifetimePosition pos) { |
| 470 return AddUsePosition(pos, CreateAssignedOperand()); |
| 471 } |
| 472 |
| 473 |
| 474 bool LiveRange::CanCover(LifetimePosition position) const { |
| 475 if (IsEmpty()) return false; |
| 476 return Start().Value() <= position.Value() && |
| 477 position.Value() < End().Value(); |
| 478 } |
| 479 |
| 480 |
| 481 bool LiveRange::Covers(LifetimePosition position) { |
| 482 if (!CanCover(position)) return false; |
| 483 UseInterval* start_search = FirstSearchIntervalForPosition(position); |
| 484 for (UseInterval* interval = start_search; |
| 485 interval != NULL; |
| 486 interval = interval->next()) { |
| 487 ASSERT(interval->next() == NULL || |
| 488 interval->next()->start().Value() >= interval->start().Value()); |
| 489 AdvanceLastProcessedMarker(interval, position); |
| 490 if (interval->Contains(position)) return true; |
| 491 if (interval->start().Value() > position.Value()) return false; |
| 492 } |
| 493 return false; |
| 494 } |
| 495 |
| 496 |
| 497 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { |
| 498 UseInterval* b = other->first_interval(); |
| 499 if (b == NULL) return LifetimePosition::Invalid(); |
| 500 LifetimePosition advance_last_processed_up_to = b->start(); |
| 501 UseInterval* a = FirstSearchIntervalForPosition(b->start()); |
| 502 while (a != NULL && b != NULL) { |
| 503 if (a->start().Value() > other->End().Value()) break; |
| 504 if (b->start().Value() > End().Value()) break; |
| 505 LifetimePosition cur_intersection = a->Intersect(b); |
| 506 if (cur_intersection.IsValid()) { |
| 507 return cur_intersection; |
| 508 } |
| 509 if (a->start().Value() < b->start().Value()) { |
| 510 a = a->next(); |
| 511 if (a == NULL && a->start().Value() > other->End().Value()) break; |
| 512 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); |
| 513 } else { |
| 514 b = b->next(); |
| 515 } |
| 516 } |
| 517 return LifetimePosition::Invalid(); |
| 518 } |
| 519 |
| 520 |
| 521 void LAllocator::InitializeLivenessAnalysis() { |
| 522 // Initialize the live_in sets for each block to NULL. |
| 523 int block_count = graph()->blocks()->length(); |
| 524 live_in_sets_.Initialize(block_count); |
| 525 live_in_sets_.AddBlock(NULL, block_count); |
| 526 } |
| 527 |
| 528 |
| 529 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { |
| 530 // Compute live out for the given block, except not including backward |
| 531 // successor edges. |
| 532 BitVector* live_out = new BitVector(next_virtual_register_); |
| 533 |
| 534 // Process all successor blocks. |
| 535 HBasicBlock* successor = block->end()->FirstSuccessor(); |
| 536 while (successor != NULL) { |
| 537 // Add values live on entry to the successor. Note the successor's |
| 538 // live_in will not be computed yet for backwards edges. |
| 539 BitVector* live_in = live_in_sets_[successor->block_id()]; |
| 540 if (live_in != NULL) live_out->Union(*live_in); |
| 541 |
| 542 // All phi input operands corresponding to this successor edge are live |
| 543 // out from this block. |
| 544 int index = successor->PredecessorIndexOf(block); |
| 545 const ZoneList<HPhi*>* phis = successor->phis(); |
| 546 for (int i = 0; i < phis->length(); ++i) { |
| 547 HPhi* phi = phis->at(i); |
| 548 if (!phi->OperandAt(index)->IsConstant()) { |
| 549 live_out->Add(phi->OperandAt(index)->id()); |
| 550 } |
| 551 } |
| 552 |
| 553 // Check if we are done with second successor. |
| 554 if (successor == block->end()->SecondSuccessor()) break; |
| 555 |
| 556 successor = block->end()->SecondSuccessor(); |
| 557 } |
| 558 |
| 559 return live_out; |
| 560 } |
| 561 |
| 562 |
| 563 void LAllocator::AddInitialIntervals(HBasicBlock* block, |
| 564 BitVector* live_out) { |
| 565 // Add an interval that includes the entire block to the live range for |
| 566 // each live_out value. |
| 567 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 568 block->first_instruction_index()); |
| 569 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 570 block->last_instruction_index()); |
| 571 BitVector::Iterator iterator(live_out); |
| 572 while (!iterator.Done()) { |
| 573 int operand_index = iterator.Current(); |
| 574 LiveRange* range = LiveRangeFor(operand_index); |
| 575 if (!range->IsEmpty() && |
| 576 range->Start().Value() == end.NextInstruction().Value()) { |
| 577 range->AddUseInterval(start, end.NextInstruction()); |
| 578 } else { |
| 579 range->AddUseInterval(start, end); |
| 580 } |
| 581 iterator.Advance(); |
| 582 } |
| 583 } |
| 584 |
| 585 |
| 586 int LAllocator::FixedDoubleLiveRangeID(int index) { |
| 587 return -index - 1 - Register::kNumAllocatableRegisters; |
| 588 } |
| 589 |
| 590 |
| 591 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, |
| 592 int pos, |
| 593 bool is_tagged) { |
| 594 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); |
| 595 ASSERT(operand->HasFixedPolicy()); |
| 596 if (operand->policy() == LUnallocated::FIXED_SLOT) { |
| 597 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index()); |
| 598 } else if (operand->policy() == LUnallocated::FIXED_REGISTER) { |
| 599 int reg_index = operand->fixed_index(); |
| 600 operand->ConvertTo(LOperand::REGISTER, reg_index); |
| 601 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) { |
| 602 int reg_index = operand->fixed_index(); |
| 603 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); |
| 604 } else { |
| 605 UNREACHABLE(); |
| 606 } |
| 607 if (is_tagged) { |
| 608 TraceAlloc("Fixed reg is tagged at %d\n", pos); |
| 609 LInstruction* instr = chunk_->instructions()->at(pos); |
| 610 if (instr->HasPointerMap()) { |
| 611 instr->pointer_map()->RecordPointer(operand); |
| 612 } |
| 613 } |
| 614 return operand; |
| 615 } |
| 616 |
| 617 |
| 618 LiveRange* LAllocator::FixedLiveRangeFor(int index) { |
| 619 if (index >= fixed_live_ranges_.length()) { |
| 620 fixed_live_ranges_.AddBlock(NULL, |
| 621 index - fixed_live_ranges_.length() + 1); |
| 622 } |
| 623 |
| 624 LiveRange* result = fixed_live_ranges_[index]; |
| 625 if (result == NULL) { |
| 626 result = new LiveRange(FixedLiveRangeID(index)); |
| 627 ASSERT(result->IsFixed()); |
| 628 result->set_assigned_register(index, false); |
| 629 fixed_live_ranges_[index] = result; |
| 630 } |
| 631 return result; |
| 632 } |
| 633 |
| 634 |
| 635 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { |
| 636 if (index >= fixed_double_live_ranges_.length()) { |
| 637 fixed_double_live_ranges_.AddBlock(NULL, |
| 638 index - fixed_double_live_ranges_.length() + 1); |
| 639 } |
| 640 |
| 641 LiveRange* result = fixed_double_live_ranges_[index]; |
| 642 if (result == NULL) { |
| 643 result = new LiveRange(FixedDoubleLiveRangeID(index)); |
| 644 ASSERT(result->IsFixed()); |
| 645 result->set_assigned_register(index, true); |
| 646 fixed_double_live_ranges_[index] = result; |
| 647 } |
| 648 return result; |
| 649 } |
| 650 |
| 651 LiveRange* LAllocator::LiveRangeFor(int index) { |
| 652 if (index >= live_ranges_.length()) { |
| 653 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1); |
| 654 } |
| 655 LiveRange* result = live_ranges_[index]; |
| 656 if (result == NULL) { |
| 657 result = new LiveRange(index); |
| 658 live_ranges_[index] = result; |
| 659 } |
| 660 return result; |
| 661 } |
| 662 |
| 663 |
| 664 LGap* LAllocator::GetLastGap(HBasicBlock* block) const { |
| 665 int last_instruction = block->last_instruction_index(); |
| 666 int index = chunk_->NearestGapPos(last_instruction); |
| 667 return chunk_->GetGapAt(index); |
| 668 } |
| 669 |
| 670 |
| 671 HPhi* LAllocator::LookupPhi(LOperand* operand) const { |
| 672 if (!operand->IsUnallocated()) return NULL; |
| 673 int index = operand->VirtualRegister(); |
| 674 HValue* instr = graph()->LookupValue(index); |
| 675 if (instr != NULL && instr->IsPhi()) { |
| 676 return HPhi::cast(instr); |
| 677 } |
| 678 return NULL; |
| 679 } |
| 680 |
| 681 |
| 682 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) { |
| 683 if (operand->IsUnallocated()) { |
| 684 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); |
| 685 } else if (operand->IsRegister()) { |
| 686 return FixedLiveRangeFor(operand->index()); |
| 687 } else if (operand->IsDoubleRegister()) { |
| 688 return FixedDoubleLiveRangeFor(operand->index()); |
| 689 } else { |
| 690 return NULL; |
| 691 } |
| 692 } |
| 693 |
| 694 |
| 695 void LAllocator::Define(LifetimePosition position, |
| 696 LOperand* operand, |
| 697 LOperand* hint) { |
| 698 LiveRange* range = LiveRangeFor(operand); |
| 699 if (range == NULL) return; |
| 700 |
| 701 if (range->IsEmpty() || range->Start().Value() > position.Value()) { |
| 702 // Can happen if there is a definition without use. |
| 703 range->AddUseInterval(position, position.NextInstruction()); |
| 704 range->AddUsePosition(position.NextInstruction(), NULL); |
| 705 } else { |
| 706 range->ShortenTo(position); |
| 707 } |
| 708 |
| 709 if (operand->IsUnallocated()) { |
| 710 LUnallocated* unalloc_operand = LUnallocated::cast(operand); |
| 711 range->AddUsePosition(position, unalloc_operand)->set_hint(hint); |
| 712 } |
| 713 } |
| 714 |
| 715 |
| 716 void LAllocator::Use(LifetimePosition block_start, |
| 717 LifetimePosition position, |
| 718 LOperand* operand, |
| 719 LOperand* hint) { |
| 720 LiveRange* range = LiveRangeFor(operand); |
| 721 if (range == NULL) return; |
| 722 if (operand->IsUnallocated()) { |
| 723 LUnallocated* unalloc_operand = LUnallocated::cast(operand); |
| 724 range->AddUsePosition(position, unalloc_operand)->set_hint(hint); |
| 725 } |
| 726 range->AddUseInterval(block_start, position); |
| 727 } |
| 728 |
| 729 |
| 730 void LAllocator::AddConstraintsGapMove(int index, |
| 731 LOperand* from, |
| 732 LOperand* to) { |
| 733 LGap* gap = chunk_->GetGapAt(index); |
| 734 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 735 if (from->IsUnallocated()) { |
| 736 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); |
| 737 for (int i = 0; i < move_operands->length(); ++i) { |
| 738 LMoveOperands cur = move_operands->at(i); |
| 739 LOperand* cur_to = cur.to(); |
| 740 if (cur_to->IsUnallocated()) { |
| 741 if (cur_to->VirtualRegister() == from->VirtualRegister()) { |
| 742 move->AddMove(cur.from(), to); |
| 743 return; |
| 744 } |
| 745 } |
| 746 } |
| 747 } |
| 748 move->AddMove(from, to); |
| 749 } |
| 750 |
| 751 |
| 752 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { |
| 753 int start = block->first_instruction_index(); |
| 754 int end = block->last_instruction_index(); |
| 755 for (int i = start; i <= end; ++i) { |
| 756 if (chunk_->IsGapAt(i)) { |
| 757 InstructionSummary* summary = NULL; |
| 758 InstructionSummary* prev_summary = NULL; |
| 759 if (i < end) summary = GetSummary(i + 1); |
| 760 if (i > start) prev_summary = GetSummary(i - 1); |
| 761 MeetConstraintsBetween(prev_summary, summary, i); |
| 762 } |
| 763 } |
| 764 } |
| 765 |
| 766 |
| 767 void LAllocator::MeetConstraintsBetween(InstructionSummary* first, |
| 768 InstructionSummary* second, |
| 769 int gap_index) { |
| 770 // Handle fixed temporaries. |
| 771 if (first != NULL) { |
| 772 for (int i = 0; i < first->TempCount(); ++i) { |
| 773 LUnallocated* temp = LUnallocated::cast(first->TempAt(i)); |
| 774 if (temp->HasFixedPolicy()) { |
| 775 AllocateFixed(temp, gap_index - 1, false); |
| 776 } |
| 777 } |
| 778 } |
| 779 |
| 780 // Handle fixed output operand. |
| 781 if (first != NULL && first->Output() != NULL) { |
| 782 LUnallocated* first_output = LUnallocated::cast(first->Output()); |
| 783 LiveRange* range = LiveRangeFor(first_output->VirtualRegister()); |
| 784 bool assigned = false; |
| 785 if (first_output->HasFixedPolicy()) { |
| 786 LUnallocated* output_copy = first_output->CopyUnconstrained(); |
| 787 bool is_tagged = HasTaggedValue(first_output->VirtualRegister()); |
| 788 AllocateFixed(first_output, gap_index, is_tagged); |
| 789 |
| 790 // This value is produced on the stack, we never need to spill it. |
| 791 if (first_output->IsStackSlot()) { |
| 792 range->SetSpillOperand(first_output); |
| 793 range->SetSpillStartIndex(gap_index - 1); |
| 794 assigned = true; |
| 795 } |
| 796 chunk_->AddGapMove(gap_index, first_output, output_copy); |
| 797 } |
| 798 |
| 799 if (!assigned) { |
| 800 range->SetSpillStartIndex(gap_index); |
| 801 |
| 802 // This move to spill operand is not a real use. Liveness analysis |
| 803 // and splitting of live ranges do not account for it. |
| 804 // Thus it should be inserted to a lifetime position corresponding to |
| 805 // the instruction end. |
| 806 LGap* gap = chunk_->GetGapAt(gap_index); |
| 807 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE); |
| 808 move->AddMove(first_output, range->GetSpillOperand()); |
| 809 } |
| 810 } |
| 811 |
| 812 // Handle fixed input operands of second instruction. |
| 813 if (second != NULL) { |
| 814 for (int i = 0; i < second->InputCount(); ++i) { |
| 815 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(i)); |
| 816 if (cur_input->HasFixedPolicy()) { |
| 817 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 818 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister()); |
| 819 AllocateFixed(cur_input, gap_index + 1, is_tagged); |
| 820 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 821 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) { |
| 822 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 823 cur_input->set_virtual_register(next_virtual_register_++); |
| 824 second->AddTemp(cur_input); |
| 825 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 826 } |
| 827 } |
| 828 } |
| 829 |
| 830 // Handle "output same as input" for second instruction. |
| 831 if (second != NULL && second->Output() != NULL) { |
| 832 LUnallocated* second_output = LUnallocated::cast(second->Output()); |
| 833 if (second_output->HasSameAsInputPolicy()) { |
| 834 LUnallocated* cur_input = LUnallocated::cast(second->InputAt(0)); |
| 835 int output_vreg = second_output->VirtualRegister(); |
| 836 int input_vreg = cur_input->VirtualRegister(); |
| 837 |
| 838 LUnallocated* input_copy = cur_input->CopyUnconstrained(); |
| 839 cur_input->set_virtual_register(second_output->virtual_register()); |
| 840 AddConstraintsGapMove(gap_index, input_copy, cur_input); |
| 841 |
| 842 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { |
| 843 int index = gap_index + 1; |
| 844 LInstruction* instr = chunk_->instructions()->at(index); |
| 845 if (instr->HasPointerMap()) { |
| 846 instr->pointer_map()->RecordPointer(input_copy); |
| 847 } |
| 848 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { |
| 849 // The input is assumed to immediately have a tagged representation, |
| 850 // before the pointer map can be used. I.e. the pointer map at the |
| 851 // instruction will include the output operand (whose value at the |
| 852 // beginning of the instruction is equal to the input operand). If |
| 853 // this is not desired, then the pointer map at this instruction needs |
| 854 // to be adjusted manually. |
| 855 } |
| 856 } |
| 857 } |
| 858 } |
| 859 |
| 860 |
| 861 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { |
| 862 int block_start = block->first_instruction_index(); |
| 863 int index = block->last_instruction_index(); |
| 864 |
| 865 LifetimePosition block_start_position = |
| 866 LifetimePosition::FromInstructionIndex(block_start); |
| 867 |
| 868 while (index >= block_start) { |
| 869 LifetimePosition curr_position = |
| 870 LifetimePosition::FromInstructionIndex(index); |
| 871 |
| 872 if (chunk_->IsGapAt(index)) { |
| 873 // We have a gap at this position. |
| 874 LGap* gap = chunk_->GetGapAt(index); |
| 875 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 876 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); |
| 877 for (int i = 0; i < move_operands->length(); ++i) { |
| 878 LMoveOperands* cur = &move_operands->at(i); |
| 879 if (cur->IsIgnored()) continue; |
| 880 LOperand* from = cur->from(); |
| 881 LOperand* to = cur->to(); |
| 882 HPhi* phi = LookupPhi(to); |
| 883 LOperand* hint = to; |
| 884 if (phi != NULL) { |
| 885 // This is a phi resolving move. |
| 886 if (!phi->block()->IsLoopHeader()) { |
| 887 hint = LiveRangeFor(phi->id())->FirstHint(); |
| 888 } |
| 889 } else { |
| 890 if (to->IsUnallocated()) { |
| 891 if (live->Contains(to->VirtualRegister())) { |
| 892 Define(curr_position, to, from); |
| 893 live->Remove(to->VirtualRegister()); |
| 894 } else { |
| 895 cur->Eliminate(); |
| 896 continue; |
| 897 } |
| 898 } else { |
| 899 Define(curr_position, to, from); |
| 900 } |
| 901 } |
| 902 Use(block_start_position, curr_position, from, hint); |
| 903 if (from->IsUnallocated()) { |
| 904 live->Add(from->VirtualRegister()); |
| 905 } |
| 906 } |
| 907 } else { |
| 908 ASSERT(!chunk_->IsGapAt(index)); |
| 909 InstructionSummary* summary = GetSummary(index); |
| 910 |
| 911 if (summary != NULL) { |
| 912 LOperand* output = summary->Output(); |
| 913 if (output != NULL) { |
| 914 if (output->IsUnallocated()) live->Remove(output->VirtualRegister()); |
| 915 Define(curr_position, output, NULL); |
| 916 } |
| 917 |
| 918 if (summary->IsCall()) { |
| 919 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) { |
| 920 if (output == NULL || !output->IsRegister() || |
| 921 output->index() != i) { |
| 922 LiveRange* range = FixedLiveRangeFor(i); |
| 923 range->AddUseInterval(curr_position, |
| 924 curr_position.InstructionEnd()); |
| 925 } |
| 926 } |
| 927 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) { |
| 928 if (output == NULL || !output->IsDoubleRegister() || |
| 929 output->index() != i) { |
| 930 LiveRange* range = FixedDoubleLiveRangeFor(i); |
| 931 range->AddUseInterval(curr_position, |
| 932 curr_position.InstructionEnd()); |
| 933 } |
| 934 } |
| 935 } |
| 936 |
| 937 for (int i = 0; i < summary->InputCount(); ++i) { |
| 938 LOperand* input = summary->InputAt(i); |
| 939 |
| 940 LifetimePosition use_pos; |
| 941 if (input->IsUnallocated() && |
| 942 LUnallocated::cast(input)->IsUsedAtStart()) { |
| 943 use_pos = curr_position; |
| 944 } else { |
| 945 use_pos = curr_position.InstructionEnd(); |
| 946 } |
| 947 |
| 948 Use(block_start_position, use_pos, input, NULL); |
| 949 if (input->IsUnallocated()) live->Add(input->VirtualRegister()); |
| 950 } |
| 951 |
| 952 for (int i = 0; i < summary->TempCount(); ++i) { |
| 953 LOperand* temp = summary->TempAt(i); |
| 954 if (summary->IsCall()) { |
| 955 if (temp->IsRegister()) continue; |
| 956 if (temp->IsUnallocated()) { |
| 957 LUnallocated* temp_unalloc = LUnallocated::cast(temp); |
| 958 if (temp_unalloc->HasFixedPolicy()) { |
| 959 continue; |
| 960 } |
| 961 } |
| 962 } |
| 963 Use(block_start_position, curr_position, temp, NULL); |
| 964 Define(curr_position.PrevInstruction(), temp, NULL); |
| 965 } |
| 966 } |
| 967 } |
| 968 |
| 969 index = index - 1; |
| 970 } |
| 971 } |
| 972 |
| 973 |
| 974 void LAllocator::ResolvePhis(HBasicBlock* block) { |
| 975 const ZoneList<HPhi*>* phis = block->phis(); |
| 976 for (int i = 0; i < phis->length(); ++i) { |
| 977 HPhi* phi = phis->at(i); |
| 978 LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE); |
| 979 phi_operand->set_virtual_register(phi->id()); |
| 980 for (int j = 0; j < phi->OperandCount(); ++j) { |
| 981 HValue* op = phi->OperandAt(j); |
| 982 LOperand* operand = NULL; |
| 983 if (op->IsConstant() && op->EmitAtUses()) { |
| 984 HConstant* constant = HConstant::cast(op); |
| 985 operand = chunk_->DefineConstantOperand(constant); |
| 986 } else { |
| 987 ASSERT(!op->EmitAtUses()); |
| 988 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE); |
| 989 unalloc->set_virtual_register(op->id()); |
| 990 operand = unalloc; |
| 991 } |
| 992 HBasicBlock* cur_block = block->predecessors()->at(j); |
| 993 // The gap move must be added without any special processing as in |
| 994 // the AddConstraintsGapMove. |
| 995 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, |
| 996 operand, |
| 997 phi_operand); |
| 998 } |
| 999 |
| 1000 LiveRange* live_range = LiveRangeFor(phi->id()); |
| 1001 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); |
| 1002 label->GetOrCreateParallelMove(LGap::START)-> |
| 1003 AddMove(phi_operand, live_range->GetSpillOperand()); |
| 1004 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); |
| 1005 } |
| 1006 } |
| 1007 |
| 1008 |
| 1009 void LAllocator::Allocate(LChunk* chunk) { |
| 1010 ASSERT(chunk_ == NULL); |
| 1011 chunk_ = chunk; |
| 1012 MeetRegisterConstraints(); |
| 1013 ResolvePhis(); |
| 1014 BuildLiveRanges(); |
| 1015 AllocateGeneralRegisters(); |
| 1016 AllocateDoubleRegisters(); |
| 1017 PopulatePointerMaps(); |
| 1018 if (has_osr_entry_) ProcessOsrEntry(); |
| 1019 ConnectRanges(); |
| 1020 ResolveControlFlow(); |
| 1021 } |
| 1022 |
| 1023 |
| 1024 void LAllocator::MeetRegisterConstraints() { |
| 1025 HPhase phase("Register constraints", chunk()); |
| 1026 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 1027 for (int i = 0; i < blocks->length(); ++i) { |
| 1028 HBasicBlock* block = blocks->at(i); |
| 1029 MeetRegisterConstraints(block); |
| 1030 } |
| 1031 } |
| 1032 |
| 1033 |
| 1034 void LAllocator::ResolvePhis() { |
| 1035 HPhase phase("Resolve phis", chunk()); |
| 1036 |
| 1037 // Process the blocks in reverse order. |
| 1038 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 1039 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { |
| 1040 HBasicBlock* block = blocks->at(block_id); |
| 1041 ResolvePhis(block); |
| 1042 } |
| 1043 } |
| 1044 |
| 1045 |
| 1046 void LAllocator::ResolveControlFlow(LiveRange* range, |
| 1047 HBasicBlock* block, |
| 1048 HBasicBlock* pred) { |
| 1049 LifetimePosition pred_end = |
| 1050 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()). |
| 1051 PrevInstruction(); |
| 1052 |
| 1053 LifetimePosition cur_start = |
| 1054 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); |
| 1055 LiveRange* pred_cover = NULL; |
| 1056 LiveRange* cur_cover = NULL; |
| 1057 LiveRange* cur_range = range; |
| 1058 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { |
| 1059 if (cur_range->CanCover(cur_start)) { |
| 1060 ASSERT(cur_cover == NULL); |
| 1061 cur_cover = cur_range; |
| 1062 } |
| 1063 if (cur_range->CanCover(pred_end)) { |
| 1064 ASSERT(pred_cover == NULL); |
| 1065 pred_cover = cur_range; |
| 1066 } |
| 1067 cur_range = cur_range->next(); |
| 1068 } |
| 1069 |
| 1070 if (cur_cover->IsSpilled()) return; |
| 1071 ASSERT(pred_cover != NULL && cur_cover != NULL); |
| 1072 if (pred_cover != cur_cover) { |
| 1073 LOperand* pred_op = pred_cover->CreateAssignedOperand(); |
| 1074 LOperand* cur_op = cur_cover->CreateAssignedOperand(); |
| 1075 if (!pred_op->Equals(cur_op)) { |
| 1076 LGap* gap = NULL; |
| 1077 if (block->predecessors()->length() == 1) { |
| 1078 gap = chunk_->GetGapAt(block->first_instruction_index()); |
| 1079 } else { |
| 1080 ASSERT(pred->end()->SecondSuccessor() == NULL); |
| 1081 gap = GetLastGap(pred); |
| 1082 } |
| 1083 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op); |
| 1084 } |
| 1085 } |
| 1086 } |
| 1087 |
| 1088 |
| 1089 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { |
| 1090 int index = pos.InstructionIndex(); |
| 1091 if (chunk_->IsGapAt(index)) { |
| 1092 LGap* gap = chunk_->GetGapAt(index); |
| 1093 return gap->GetOrCreateParallelMove( |
| 1094 pos.IsInstructionStart() ? LGap::START : LGap::END); |
| 1095 } |
| 1096 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); |
| 1097 return chunk_->GetGapAt(gap_pos)->GetOrCreateParallelMove( |
| 1098 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE); |
| 1099 } |
| 1100 |
| 1101 |
| 1102 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { |
| 1103 LGap* gap = chunk_->GetGapAt(chunk_->NearestGapPos(pos.InstructionIndex())); |
| 1104 return gap->block(); |
| 1105 } |
| 1106 |
| 1107 |
| 1108 void LAllocator::ConnectRanges() { |
| 1109 HPhase phase("Connect ranges", this); |
| 1110 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 1111 LiveRange* first_range = live_ranges()->at(i); |
| 1112 if (first_range == NULL || first_range->parent() != NULL) continue; |
| 1113 |
| 1114 LiveRange* second_range = first_range->next(); |
| 1115 while (second_range != NULL) { |
| 1116 LifetimePosition pos = second_range->Start(); |
| 1117 |
| 1118 if (!second_range->IsSpilled()) { |
| 1119 // Add gap move if the two live ranges touch and there is no block |
| 1120 // boundary. |
| 1121 if (first_range->End().Value() == pos.Value()) { |
| 1122 bool should_insert = true; |
| 1123 if (IsBlockBoundary(pos)) { |
| 1124 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); |
| 1125 } |
| 1126 if (should_insert) { |
| 1127 LParallelMove* move = GetConnectingParallelMove(pos); |
| 1128 LOperand* prev_operand = first_range->CreateAssignedOperand(); |
| 1129 LOperand* cur_operand = second_range->CreateAssignedOperand(); |
| 1130 move->AddMove(prev_operand, cur_operand); |
| 1131 } |
| 1132 } |
| 1133 } |
| 1134 |
| 1135 first_range = second_range; |
| 1136 second_range = second_range->next(); |
| 1137 } |
| 1138 } |
| 1139 } |
| 1140 |
| 1141 |
| 1142 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { |
| 1143 if (block->predecessors()->length() != 1) return false; |
| 1144 return block->predecessors()->first()->block_id() == block->block_id() - 1; |
| 1145 } |
| 1146 |
| 1147 |
| 1148 void LAllocator::ResolveControlFlow() { |
| 1149 HPhase phase("Resolve control flow", this); |
| 1150 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 1151 for (int block_id = 1; block_id < blocks->length(); ++block_id) { |
| 1152 HBasicBlock* block = blocks->at(block_id); |
| 1153 if (CanEagerlyResolveControlFlow(block)) continue; |
| 1154 BitVector* live = live_in_sets_[block->block_id()]; |
| 1155 BitVector::Iterator iterator(live); |
| 1156 while (!iterator.Done()) { |
| 1157 int operand_index = iterator.Current(); |
| 1158 for (int i = 0; i < block->predecessors()->length(); ++i) { |
| 1159 HBasicBlock* cur = block->predecessors()->at(i); |
| 1160 LiveRange* cur_range = LiveRangeFor(operand_index); |
| 1161 ResolveControlFlow(cur_range, block, cur); |
| 1162 } |
| 1163 iterator.Advance(); |
| 1164 } |
| 1165 } |
| 1166 } |
| 1167 |
| 1168 |
| 1169 void LAllocator::BuildLiveRanges() { |
| 1170 HPhase phase("Build live ranges", this); |
| 1171 InitializeLivenessAnalysis(); |
| 1172 // Process the blocks in reverse order. |
| 1173 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 1174 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { |
| 1175 HBasicBlock* block = blocks->at(block_id); |
| 1176 BitVector* live = ComputeLiveOut(block); |
| 1177 // Initially consider all live_out values live for the entire block. We |
| 1178 // will shorten these intervals if necessary. |
| 1179 AddInitialIntervals(block, live); |
| 1180 |
| 1181 // Process the instructions in reverse order, generating and killing |
| 1182 // live values. |
| 1183 ProcessInstructions(block, live); |
| 1184 // All phi output operands are killed by this block. |
| 1185 const ZoneList<HPhi*>* phis = block->phis(); |
| 1186 for (int i = 0; i < phis->length(); ++i) { |
| 1187 // The live range interval already ends at the first instruction of the |
| 1188 // block. |
| 1189 HPhi* phi = phis->at(i); |
| 1190 live->Remove(phi->id()); |
| 1191 |
| 1192 LOperand* hint = NULL; |
| 1193 LOperand* phi_operand = NULL; |
| 1194 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); |
| 1195 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START); |
| 1196 for (int j = 0; j < move->move_operands()->length(); ++j) { |
| 1197 LOperand* to = move->move_operands()->at(j).to(); |
| 1198 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) { |
| 1199 hint = move->move_operands()->at(j).from(); |
| 1200 phi_operand = to; |
| 1201 break; |
| 1202 } |
| 1203 } |
| 1204 ASSERT(hint != NULL); |
| 1205 |
| 1206 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( |
| 1207 block->first_instruction_index()); |
| 1208 Define(block_start, phi_operand, hint); |
| 1209 } |
| 1210 |
| 1211 // Now live is live_in for this block except not including values live |
| 1212 // out on backward successor edges. |
| 1213 live_in_sets_[block_id] = live; |
| 1214 |
| 1215 // If this block is a loop header go back and patch up the necessary |
| 1216 // predecessor blocks. |
| 1217 if (block->IsLoopHeader()) { |
| 1218 // TODO(kmillikin): Need to be able to get the last block of the loop |
| 1219 // in the loop information. Add a live range stretching from the first |
| 1220 // loop instruction to the last for each value live on entry to the |
| 1221 // header. |
| 1222 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); |
| 1223 BitVector::Iterator iterator(live); |
| 1224 LifetimePosition start = LifetimePosition::FromInstructionIndex( |
| 1225 block->first_instruction_index()); |
| 1226 LifetimePosition end = LifetimePosition::FromInstructionIndex( |
| 1227 back_edge->last_instruction_index()); |
| 1228 while (!iterator.Done()) { |
| 1229 int operand_index = iterator.Current(); |
| 1230 LiveRange* range = LiveRangeFor(operand_index); |
| 1231 range->EnsureInterval(start, end); |
| 1232 iterator.Advance(); |
| 1233 } |
| 1234 |
| 1235 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { |
| 1236 live_in_sets_[i]->Union(*live); |
| 1237 } |
| 1238 } |
| 1239 |
| 1240 #ifdef DEBUG |
| 1241 if (block_id == 0) { |
| 1242 BitVector::Iterator iterator(live); |
| 1243 bool found = false; |
| 1244 while (!iterator.Done()) { |
| 1245 found = true; |
| 1246 int operand_index = iterator.Current(); |
| 1247 PrintF("Function: %s\n", |
| 1248 *graph()->info()->function()->debug_name()->ToCString()); |
| 1249 PrintF("Value %d used before first definition!\n", operand_index); |
| 1250 LiveRange* range = LiveRangeFor(operand_index); |
| 1251 PrintF("First use is at %d\n", range->first_pos()->pos().Value()); |
| 1252 iterator.Advance(); |
| 1253 } |
| 1254 ASSERT(!found); |
| 1255 } |
| 1256 #endif |
| 1257 } |
| 1258 } |
| 1259 |
| 1260 |
| 1261 void LAllocator::AllocateGeneralRegisters() { |
| 1262 HPhase phase("Allocate general registers", this); |
| 1263 num_registers_ = Register::kNumAllocatableRegisters; |
| 1264 mode_ = CPU_REGISTERS; |
| 1265 AllocateRegisters(); |
| 1266 } |
| 1267 |
| 1268 |
| 1269 bool LAllocator::SafePointsAreInOrder() const { |
| 1270 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); |
| 1271 int safe_point = 0; |
| 1272 for (int i = 0; i < pointer_maps->length(); ++i) { |
| 1273 LPointerMap* map = pointer_maps->at(i); |
| 1274 if (safe_point > map->lithium_position()) return false; |
| 1275 safe_point = map->lithium_position(); |
| 1276 } |
| 1277 return true; |
| 1278 } |
| 1279 |
| 1280 |
| 1281 void LAllocator::PopulatePointerMaps() { |
| 1282 HPhase phase("Populate pointer maps", this); |
| 1283 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps(); |
| 1284 |
| 1285 ASSERT(SafePointsAreInOrder()); |
| 1286 |
| 1287 // Iterate over all safe point positions and record a pointer |
| 1288 // for all spilled live ranges at this point. |
| 1289 int first_safe_point_index = 0; |
| 1290 int last_range_start = 0; |
| 1291 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { |
| 1292 LiveRange* range = live_ranges()->at(range_idx); |
| 1293 if (range == NULL) continue; |
| 1294 // Iterate over the first parts of multi-part live ranges. |
| 1295 if (range->parent() != NULL) continue; |
| 1296 // Skip non-pointer values. |
| 1297 if (!HasTaggedValue(range->id())) continue; |
| 1298 // Skip empty live ranges. |
| 1299 if (range->IsEmpty()) continue; |
| 1300 |
| 1301 // Find the extent of the range and its children. |
| 1302 int start = range->Start().InstructionIndex(); |
| 1303 int end = 0; |
| 1304 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { |
| 1305 LifetimePosition this_end = cur->End(); |
| 1306 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); |
| 1307 ASSERT(cur->Start().InstructionIndex() >= start); |
| 1308 } |
| 1309 |
| 1310 // Most of the ranges are in order, but not all. Keep an eye on when |
| 1311 // they step backwards and reset the first_safe_point_index so we don't |
| 1312 // miss any safe points. |
| 1313 if (start < last_range_start) { |
| 1314 first_safe_point_index = 0; |
| 1315 } |
| 1316 last_range_start = start; |
| 1317 |
| 1318 // Step across all the safe points that are before the start of this range, |
| 1319 // recording how far we step in order to save doing this for the next range. |
| 1320 while (first_safe_point_index < pointer_maps->length()) { |
| 1321 LPointerMap* map = pointer_maps->at(first_safe_point_index); |
| 1322 int safe_point = map->lithium_position(); |
| 1323 if (safe_point >= start) break; |
| 1324 first_safe_point_index++; |
| 1325 } |
| 1326 |
| 1327 // Step through the safe points to see whether they are in the range. |
| 1328 for (int safe_point_index = first_safe_point_index; |
| 1329 safe_point_index < pointer_maps->length(); |
| 1330 ++safe_point_index) { |
| 1331 LPointerMap* map = pointer_maps->at(safe_point_index); |
| 1332 int safe_point = map->lithium_position(); |
| 1333 |
| 1334 // The safe points are sorted so we can stop searching here. |
| 1335 if (safe_point - 1 > end) break; |
| 1336 |
| 1337 // Advance to the next active range that covers the current |
| 1338 // safe point position. |
| 1339 LifetimePosition safe_point_pos = |
| 1340 LifetimePosition::FromInstructionIndex(safe_point); |
| 1341 LiveRange* cur = range; |
| 1342 while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) { |
| 1343 cur = cur->next(); |
| 1344 } |
| 1345 if (cur == NULL) continue; |
| 1346 |
| 1347 // Check if the live range is spilled and the safe point is after |
| 1348 // the spill position. |
| 1349 if (range->HasAllocatedSpillOperand() && |
| 1350 safe_point >= range->spill_start_index()) { |
| 1351 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", |
| 1352 range->id(), range->spill_start_index(), safe_point); |
| 1353 map->RecordPointer(range->GetSpillOperand()); |
| 1354 } |
| 1355 |
| 1356 if (!cur->IsSpilled()) { |
| 1357 TraceAlloc("Pointer in register for range %d (start at %d) " |
| 1358 "at safe point %d\n", |
| 1359 cur->id(), cur->Start().Value(), safe_point); |
| 1360 LOperand* operand = cur->CreateAssignedOperand(); |
| 1361 ASSERT(!operand->IsStackSlot()); |
| 1362 map->RecordPointer(operand); |
| 1363 } |
| 1364 } |
| 1365 } |
| 1366 } |
| 1367 |
| 1368 |
| 1369 void LAllocator::ProcessOsrEntry() { |
| 1370 const ZoneList<LInstruction*>* instrs = chunk_->instructions(); |
| 1371 |
| 1372 // Linear search for the OSR entry instruction in the chunk. |
| 1373 int index = -1; |
| 1374 while (++index < instrs->length() && |
| 1375 !instrs->at(index)->IsOsrEntry()) { |
| 1376 } |
| 1377 ASSERT(index < instrs->length()); |
| 1378 LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index)); |
| 1379 |
| 1380 LifetimePosition position = LifetimePosition::FromInstructionIndex(index); |
| 1381 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 1382 LiveRange* range = live_ranges()->at(i); |
| 1383 if (range != NULL) { |
| 1384 if (range->Covers(position) && |
| 1385 range->HasRegisterAssigned() && |
| 1386 range->TopLevel()->HasAllocatedSpillOperand()) { |
| 1387 int reg_index = range->assigned_register(); |
| 1388 LOperand* spill_operand = range->TopLevel()->GetSpillOperand(); |
| 1389 if (range->IsDouble()) { |
| 1390 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand); |
| 1391 } else { |
| 1392 instruction->MarkSpilledRegister(reg_index, spill_operand); |
| 1393 } |
| 1394 } |
| 1395 } |
| 1396 } |
| 1397 } |
| 1398 |
| 1399 |
| 1400 void LAllocator::AllocateDoubleRegisters() { |
| 1401 HPhase phase("Allocate double registers", this); |
| 1402 num_registers_ = DoubleRegister::kNumAllocatableRegisters; |
| 1403 mode_ = XMM_REGISTERS; |
| 1404 AllocateRegisters(); |
| 1405 } |
| 1406 |
| 1407 |
| 1408 void LAllocator::AllocateRegisters() { |
| 1409 ASSERT(mode_ != NONE); |
| 1410 reusable_slots_.Clear(); |
| 1411 |
| 1412 for (int i = 0; i < live_ranges_.length(); ++i) { |
| 1413 if (live_ranges_[i] != NULL) { |
| 1414 if (HasDoubleValue(live_ranges_[i]->id()) == (mode_ == XMM_REGISTERS)) { |
| 1415 AddToUnhandledUnsorted(live_ranges_[i]); |
| 1416 } |
| 1417 } |
| 1418 } |
| 1419 SortUnhandled(); |
| 1420 ASSERT(UnhandledIsSorted()); |
| 1421 |
| 1422 ASSERT(active_live_ranges_.is_empty()); |
| 1423 ASSERT(inactive_live_ranges_.is_empty()); |
| 1424 |
| 1425 if (mode_ == XMM_REGISTERS) { |
| 1426 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) { |
| 1427 LiveRange* current = fixed_double_live_ranges_.at(i); |
| 1428 if (current != NULL) { |
| 1429 AddToInactive(current); |
| 1430 } |
| 1431 } |
| 1432 } else { |
| 1433 for (int i = 0; i < fixed_live_ranges_.length(); ++i) { |
| 1434 LiveRange* current = fixed_live_ranges_.at(i); |
| 1435 if (current != NULL) { |
| 1436 AddToInactive(current); |
| 1437 } |
| 1438 } |
| 1439 } |
| 1440 |
| 1441 while (!unhandled_live_ranges_.is_empty()) { |
| 1442 ASSERT(UnhandledIsSorted()); |
| 1443 LiveRange* current = unhandled_live_ranges_.RemoveLast(); |
| 1444 ASSERT(UnhandledIsSorted()); |
| 1445 LifetimePosition position = current->Start(); |
| 1446 TraceAlloc("Processing interval %d start=%d\n", |
| 1447 current->id(), |
| 1448 position.Value()); |
| 1449 |
| 1450 if (current->HasAllocatedSpillOperand()) { |
| 1451 TraceAlloc("Live range %d already has a spill operand\n", current->id()); |
| 1452 LifetimePosition next_pos = position; |
| 1453 if (chunk_->IsGapAt(next_pos.InstructionIndex())) { |
| 1454 next_pos = next_pos.NextInstruction(); |
| 1455 } |
| 1456 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); |
| 1457 // If the range already has a spill operand and it doesn't need a |
| 1458 // register immediately, split it and spill the first part of the range. |
| 1459 if (pos == NULL) { |
| 1460 Spill(current); |
| 1461 continue; |
| 1462 } else if (pos->pos().Value() > |
| 1463 current->Start().NextInstruction().Value()) { |
| 1464 // Do not spill live range eagerly if use position that can benefit from |
| 1465 // the register is too close to the start of live range. |
| 1466 LiveRange* part = Split(current, |
| 1467 current->Start().NextInstruction(), |
| 1468 pos->pos()); |
| 1469 Spill(current); |
| 1470 AddToUnhandledSorted(part); |
| 1471 ASSERT(UnhandledIsSorted()); |
| 1472 continue; |
| 1473 } |
| 1474 } |
| 1475 |
| 1476 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1477 LiveRange* cur_active = active_live_ranges_.at(i); |
| 1478 if (cur_active->End().Value() <= position.Value()) { |
| 1479 ActiveToHandled(cur_active); |
| 1480 --i; // The live range was removed from the list of active live ranges. |
| 1481 } else if (!cur_active->Covers(position)) { |
| 1482 ActiveToInactive(cur_active); |
| 1483 --i; // The live range was removed from the list of active live ranges. |
| 1484 } |
| 1485 } |
| 1486 |
| 1487 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
| 1488 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
| 1489 if (cur_inactive->End().Value() <= position.Value()) { |
| 1490 InactiveToHandled(cur_inactive); |
| 1491 --i; // Live range was removed from the list of inactive live ranges. |
| 1492 } else if (cur_inactive->Covers(position)) { |
| 1493 InactiveToActive(cur_inactive); |
| 1494 --i; // Live range was removed from the list of inactive live ranges. |
| 1495 } |
| 1496 } |
| 1497 |
| 1498 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled()); |
| 1499 |
| 1500 bool result = TryAllocateFreeReg(current); |
| 1501 if (!result) { |
| 1502 AllocateBlockedReg(current); |
| 1503 } |
| 1504 |
| 1505 if (current->HasRegisterAssigned()) { |
| 1506 AddToActive(current); |
| 1507 } |
| 1508 } |
| 1509 |
| 1510 active_live_ranges_.Clear(); |
| 1511 inactive_live_ranges_.Clear(); |
| 1512 } |
| 1513 |
| 1514 |
| 1515 void LAllocator::Setup() { |
| 1516 LConstantOperand::SetupCache(); |
| 1517 LStackSlot::SetupCache(); |
| 1518 LDoubleStackSlot::SetupCache(); |
| 1519 LRegister::SetupCache(); |
| 1520 LDoubleRegister::SetupCache(); |
| 1521 } |
| 1522 |
| 1523 |
| 1524 void LAllocator::TraceAlloc(const char* msg, ...) { |
| 1525 if (FLAG_trace_alloc) { |
| 1526 va_list arguments; |
| 1527 va_start(arguments, msg); |
| 1528 OS::VPrint(msg, arguments); |
| 1529 va_end(arguments); |
| 1530 } |
| 1531 } |
| 1532 |
| 1533 |
| 1534 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) { |
| 1535 operand->set_virtual_register(value->id()); |
| 1536 current_summary()->AddInput(operand); |
| 1537 } |
| 1538 |
| 1539 |
| 1540 bool LAllocator::HasTaggedValue(int virtual_register) const { |
| 1541 HValue* value = graph()->LookupValue(virtual_register); |
| 1542 if (value == NULL) return false; |
| 1543 return value->representation().IsTagged(); |
| 1544 } |
| 1545 |
| 1546 |
| 1547 bool LAllocator::HasDoubleValue(int virtual_register) const { |
| 1548 HValue* value = graph()->LookupValue(virtual_register); |
| 1549 if (value == NULL) return false; |
| 1550 return value->representation().IsDouble(); |
| 1551 } |
| 1552 |
| 1553 |
| 1554 void LAllocator::MarkAsCall() { |
| 1555 current_summary()->MarkAsCall(); |
| 1556 } |
| 1557 |
| 1558 |
| 1559 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) { |
| 1560 operand->set_virtual_register(instr->id()); |
| 1561 current_summary()->SetOutput(operand); |
| 1562 } |
| 1563 |
| 1564 |
| 1565 void LAllocator::RecordTemporary(LUnallocated* operand) { |
| 1566 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters); |
| 1567 if (!operand->HasFixedPolicy()) { |
| 1568 operand->set_virtual_register(next_virtual_register_++); |
| 1569 } |
| 1570 current_summary()->AddTemp(operand); |
| 1571 } |
| 1572 |
| 1573 |
| 1574 int LAllocator::max_initial_value_ids() { |
| 1575 return LUnallocated::kMaxVirtualRegisters / 32; |
| 1576 } |
| 1577 |
| 1578 |
| 1579 void LAllocator::BeginInstruction() { |
| 1580 if (next_summary_ == NULL) { |
| 1581 next_summary_ = new InstructionSummary(); |
| 1582 } |
| 1583 summary_stack_.Add(next_summary_); |
| 1584 next_summary_ = NULL; |
| 1585 } |
| 1586 |
| 1587 |
| 1588 void LAllocator::SummarizeInstruction(int index) { |
| 1589 InstructionSummary* sum = summary_stack_.RemoveLast(); |
| 1590 if (summaries_.length() <= index) { |
| 1591 summaries_.AddBlock(NULL, index + 1 - summaries_.length()); |
| 1592 } |
| 1593 ASSERT(summaries_[index] == NULL); |
| 1594 if (sum->Output() != NULL || sum->InputCount() > 0 || sum->TempCount() > 0) { |
| 1595 summaries_[index] = sum; |
| 1596 } else { |
| 1597 next_summary_ = sum; |
| 1598 } |
| 1599 } |
| 1600 |
| 1601 |
| 1602 void LAllocator::OmitInstruction() { |
| 1603 summary_stack_.RemoveLast(); |
| 1604 } |
| 1605 |
| 1606 |
| 1607 void LAllocator::AddToActive(LiveRange* range) { |
| 1608 TraceAlloc("Add live range %d to active\n", range->id()); |
| 1609 active_live_ranges_.Add(range); |
| 1610 } |
| 1611 |
| 1612 |
| 1613 void LAllocator::AddToInactive(LiveRange* range) { |
| 1614 TraceAlloc("Add live range %d to inactive\n", range->id()); |
| 1615 inactive_live_ranges_.Add(range); |
| 1616 } |
| 1617 |
| 1618 |
| 1619 void LAllocator::AddToUnhandledSorted(LiveRange* range) { |
| 1620 if (range == NULL || range->IsEmpty()) return; |
| 1621 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1622 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { |
| 1623 LiveRange* cur_range = unhandled_live_ranges_.at(i); |
| 1624 if (range->ShouldBeAllocatedBefore(cur_range)) { |
| 1625 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); |
| 1626 unhandled_live_ranges_.InsertAt(i + 1, range); |
| 1627 ASSERT(UnhandledIsSorted()); |
| 1628 return; |
| 1629 } |
| 1630 } |
| 1631 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); |
| 1632 unhandled_live_ranges_.InsertAt(0, range); |
| 1633 ASSERT(UnhandledIsSorted()); |
| 1634 } |
| 1635 |
| 1636 |
| 1637 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { |
| 1638 if (range == NULL || range->IsEmpty()) return; |
| 1639 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); |
| 1640 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); |
| 1641 unhandled_live_ranges_.Add(range); |
| 1642 } |
| 1643 |
| 1644 |
| 1645 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { |
| 1646 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || |
| 1647 !(*b)->ShouldBeAllocatedBefore(*a)); |
| 1648 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; |
| 1649 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; |
| 1650 return (*a)->id() - (*b)->id(); |
| 1651 } |
| 1652 |
| 1653 |
| 1654 // Sort the unhandled live ranges so that the ranges to be processed first are |
| 1655 // at the end of the array list. This is convenient for the register allocation |
| 1656 // algorithm because it is efficient to remove elements from the end. |
| 1657 void LAllocator::SortUnhandled() { |
| 1658 TraceAlloc("Sort unhandled\n"); |
| 1659 unhandled_live_ranges_.Sort(&UnhandledSortHelper); |
| 1660 } |
| 1661 |
| 1662 |
| 1663 bool LAllocator::UnhandledIsSorted() { |
| 1664 int len = unhandled_live_ranges_.length(); |
| 1665 for (int i = 1; i < len; i++) { |
| 1666 LiveRange* a = unhandled_live_ranges_.at(i - 1); |
| 1667 LiveRange* b = unhandled_live_ranges_.at(i); |
| 1668 if (a->Start().Value() < b->Start().Value()) return false; |
| 1669 } |
| 1670 return true; |
| 1671 } |
| 1672 |
| 1673 |
| 1674 void LAllocator::FreeSpillSlot(LiveRange* range) { |
| 1675 // Check that we are the last range. |
| 1676 if (range->next() != NULL) return; |
| 1677 |
| 1678 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; |
| 1679 |
| 1680 int index = range->TopLevel()->GetSpillOperand()->index(); |
| 1681 if (index >= 0) { |
| 1682 reusable_slots_.Add(range); |
| 1683 } |
| 1684 } |
| 1685 |
| 1686 |
| 1687 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { |
| 1688 if (reusable_slots_.is_empty()) return NULL; |
| 1689 if (reusable_slots_.first()->End().Value() > |
| 1690 range->TopLevel()->Start().Value()) { |
| 1691 return NULL; |
| 1692 } |
| 1693 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); |
| 1694 reusable_slots_.Remove(0); |
| 1695 return result; |
| 1696 } |
| 1697 |
| 1698 |
| 1699 void LAllocator::ActiveToHandled(LiveRange* range) { |
| 1700 ASSERT(active_live_ranges_.Contains(range)); |
| 1701 active_live_ranges_.RemoveElement(range); |
| 1702 TraceAlloc("Moving live range %d from active to handled\n", range->id()); |
| 1703 FreeSpillSlot(range); |
| 1704 } |
| 1705 |
| 1706 |
| 1707 void LAllocator::ActiveToInactive(LiveRange* range) { |
| 1708 ASSERT(active_live_ranges_.Contains(range)); |
| 1709 active_live_ranges_.RemoveElement(range); |
| 1710 inactive_live_ranges_.Add(range); |
| 1711 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); |
| 1712 } |
| 1713 |
| 1714 |
| 1715 void LAllocator::InactiveToHandled(LiveRange* range) { |
| 1716 ASSERT(inactive_live_ranges_.Contains(range)); |
| 1717 inactive_live_ranges_.RemoveElement(range); |
| 1718 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); |
| 1719 FreeSpillSlot(range); |
| 1720 } |
| 1721 |
| 1722 |
| 1723 void LAllocator::InactiveToActive(LiveRange* range) { |
| 1724 ASSERT(inactive_live_ranges_.Contains(range)); |
| 1725 inactive_live_ranges_.RemoveElement(range); |
| 1726 active_live_ranges_.Add(range); |
| 1727 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); |
| 1728 } |
| 1729 |
| 1730 |
| 1731 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { |
| 1732 LifetimePosition max_pos = LifetimePosition::FromInstructionIndex( |
| 1733 chunk_->instructions()->length() + 1); |
| 1734 ASSERT(DoubleRegister::kNumAllocatableRegisters >= |
| 1735 Register::kNumAllocatableRegisters); |
| 1736 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> |
| 1737 free_pos(max_pos); |
| 1738 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1739 LiveRange* cur_active = active_live_ranges_.at(i); |
| 1740 free_pos[cur_active->assigned_register()] = |
| 1741 LifetimePosition::FromInstructionIndex(0); |
| 1742 } |
| 1743 |
| 1744 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
| 1745 LiveRange* cur_inactive = inactive_live_ranges_.at(i); |
| 1746 ASSERT(cur_inactive->End().Value() > current->Start().Value()); |
| 1747 LifetimePosition next_intersection = |
| 1748 cur_inactive->FirstIntersection(current); |
| 1749 if (!next_intersection.IsValid()) continue; |
| 1750 int cur_reg = cur_inactive->assigned_register(); |
| 1751 free_pos[cur_reg] = Min(free_pos[cur_reg], next_intersection); |
| 1752 } |
| 1753 |
| 1754 UsePosition* pos = current->FirstPosWithHint(); |
| 1755 if (pos != NULL) { |
| 1756 LOperand* hint = pos->hint(); |
| 1757 if (hint->IsRegister() || hint->IsDoubleRegister()) { |
| 1758 int register_index = hint->index(); |
| 1759 TraceAlloc("Found reg hint %d for live range %d (free [%d, end %d[)\n", |
| 1760 register_index, |
| 1761 current->id(), |
| 1762 free_pos[register_index].Value(), |
| 1763 current->End().Value()); |
| 1764 if (free_pos[register_index].Value() >= current->End().Value()) { |
| 1765 TraceAlloc("Assigning preferred reg %d to live range %d\n", |
| 1766 register_index, |
| 1767 current->id()); |
| 1768 current->set_assigned_register(register_index, mode_ == XMM_REGISTERS); |
| 1769 return true; |
| 1770 } |
| 1771 } |
| 1772 } |
| 1773 |
| 1774 int max_reg = 0; |
| 1775 for (int i = 1; i < RegisterCount(); ++i) { |
| 1776 if (free_pos[i].Value() > free_pos[max_reg].Value()) { |
| 1777 max_reg = i; |
| 1778 } |
| 1779 } |
| 1780 |
| 1781 if (free_pos[max_reg].InstructionIndex() == 0) { |
| 1782 return false; |
| 1783 } else if (free_pos[max_reg].Value() >= current->End().Value()) { |
| 1784 TraceAlloc("Assigning reg %d to live range %d\n", max_reg, current->id()); |
| 1785 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); |
| 1786 } else { |
| 1787 // Split the interval at the nearest gap and never split an interval at its |
| 1788 // start position. |
| 1789 LifetimePosition pos = |
| 1790 LifetimePosition::FromInstructionIndex( |
| 1791 chunk_->NearestGapPos(free_pos[max_reg].InstructionIndex())); |
| 1792 if (pos.Value() <= current->Start().Value()) return false; |
| 1793 LiveRange* second_range = Split(current, pos); |
| 1794 AddToUnhandledSorted(second_range); |
| 1795 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); |
| 1796 } |
| 1797 |
| 1798 return true; |
| 1799 } |
| 1800 |
| 1801 |
| 1802 void LAllocator::AllocateBlockedReg(LiveRange* current) { |
| 1803 LifetimePosition max_pos = |
| 1804 LifetimePosition::FromInstructionIndex( |
| 1805 chunk_->instructions()->length() + 1); |
| 1806 ASSERT(DoubleRegister::kNumAllocatableRegisters >= |
| 1807 Register::kNumAllocatableRegisters); |
| 1808 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> |
| 1809 use_pos(max_pos); |
| 1810 EmbeddedVector<LifetimePosition, DoubleRegister::kNumAllocatableRegisters> |
| 1811 block_pos(max_pos); |
| 1812 |
| 1813 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1814 LiveRange* range = active_live_ranges_[i]; |
| 1815 int cur_reg = range->assigned_register(); |
| 1816 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { |
| 1817 block_pos[cur_reg] = use_pos[cur_reg] = |
| 1818 LifetimePosition::FromInstructionIndex(0); |
| 1819 } else { |
| 1820 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( |
| 1821 current->Start()); |
| 1822 if (next_use == NULL) { |
| 1823 use_pos[cur_reg] = range->End(); |
| 1824 } else { |
| 1825 use_pos[cur_reg] = next_use->pos(); |
| 1826 } |
| 1827 } |
| 1828 } |
| 1829 |
| 1830 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
| 1831 LiveRange* range = inactive_live_ranges_.at(i); |
| 1832 ASSERT(range->End().Value() > current->Start().Value()); |
| 1833 LifetimePosition next_intersection = range->FirstIntersection(current); |
| 1834 if (!next_intersection.IsValid()) continue; |
| 1835 int cur_reg = range->assigned_register(); |
| 1836 if (range->IsFixed()) { |
| 1837 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); |
| 1838 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); |
| 1839 } else { |
| 1840 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); |
| 1841 } |
| 1842 } |
| 1843 |
| 1844 int max_reg = 0; |
| 1845 for (int i = 1; i < RegisterCount(); ++i) { |
| 1846 if (use_pos[i].Value() > use_pos[max_reg].Value()) { |
| 1847 max_reg = i; |
| 1848 } |
| 1849 } |
| 1850 |
| 1851 UsePosition* first_usage = current->NextRegisterPosition(current->Start()); |
| 1852 if (first_usage == NULL) { |
| 1853 Spill(current); |
| 1854 } else if (use_pos[max_reg].Value() < first_usage->pos().Value()) { |
| 1855 SplitAndSpill(current, current->Start(), first_usage->pos()); |
| 1856 } else { |
| 1857 if (block_pos[max_reg].Value() < current->End().Value()) { |
| 1858 // Split current before blocked position. |
| 1859 LiveRange* second_range = Split(current, |
| 1860 current->Start(), |
| 1861 block_pos[max_reg]); |
| 1862 AddToUnhandledSorted(second_range); |
| 1863 } |
| 1864 |
| 1865 current->set_assigned_register(max_reg, mode_ == XMM_REGISTERS); |
| 1866 SplitAndSpillIntersecting(current); |
| 1867 } |
| 1868 } |
| 1869 |
| 1870 |
| 1871 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { |
| 1872 ASSERT(current->HasRegisterAssigned()); |
| 1873 int reg = current->assigned_register(); |
| 1874 LifetimePosition split_pos = |
| 1875 LifetimePosition::FromInstructionIndex( |
| 1876 chunk_->NearestGapPos(current->Start().InstructionIndex())); |
| 1877 for (int i = 0; i < active_live_ranges_.length(); ++i) { |
| 1878 LiveRange* range = active_live_ranges_[i]; |
| 1879 if (range->assigned_register() == reg) { |
| 1880 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
| 1881 if (next_pos == NULL) { |
| 1882 SplitAndSpill(range, split_pos); |
| 1883 } else { |
| 1884 SplitAndSpill(range, split_pos, next_pos->pos()); |
| 1885 } |
| 1886 ActiveToHandled(range); |
| 1887 --i; |
| 1888 } |
| 1889 } |
| 1890 |
| 1891 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { |
| 1892 LiveRange* range = inactive_live_ranges_[i]; |
| 1893 ASSERT(range->End().Value() > current->Start().Value()); |
| 1894 if (range->assigned_register() == reg && !range->IsFixed()) { |
| 1895 LifetimePosition next_intersection = range->FirstIntersection(current); |
| 1896 if (next_intersection.IsValid()) { |
| 1897 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); |
| 1898 if (next_pos == NULL) { |
| 1899 SplitAndSpill(range, split_pos); |
| 1900 } else { |
| 1901 next_intersection = Min(next_intersection, next_pos->pos()); |
| 1902 SplitAndSpill(range, split_pos, next_intersection); |
| 1903 } |
| 1904 InactiveToHandled(range); |
| 1905 --i; |
| 1906 } |
| 1907 } |
| 1908 } |
| 1909 } |
| 1910 |
| 1911 |
| 1912 LiveRange* LAllocator::Split(LiveRange* range, |
| 1913 LifetimePosition start, |
| 1914 LifetimePosition end) { |
| 1915 ASSERT(!range->IsFixed()); |
| 1916 TraceAlloc("Splitting live range %d in position between [%d, %d[\n", |
| 1917 range->id(), |
| 1918 start.Value(), |
| 1919 end.Value()); |
| 1920 |
| 1921 LifetimePosition split_pos = FindOptimalSplitPos( |
| 1922 start, end.PrevInstruction().InstructionEnd()); |
| 1923 ASSERT(split_pos.Value() >= start.Value()); |
| 1924 return Split(range, split_pos); |
| 1925 } |
| 1926 |
| 1927 |
| 1928 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, |
| 1929 LifetimePosition end) { |
| 1930 int start_instr = start.InstructionIndex(); |
| 1931 int end_instr = end.InstructionIndex(); |
| 1932 ASSERT(start_instr <= end_instr); |
| 1933 |
| 1934 // We have no choice |
| 1935 if (start_instr == end_instr) return end; |
| 1936 |
| 1937 HBasicBlock* end_block = GetBlock(start); |
| 1938 HBasicBlock* start_block = GetBlock(end); |
| 1939 |
| 1940 if (end_block == start_block) { |
| 1941 // The interval is split in the same basic block. Split at latest possible |
| 1942 // position. |
| 1943 return end; |
| 1944 } |
| 1945 |
| 1946 HBasicBlock* block = end_block; |
| 1947 // Move to the most outside loop header. |
| 1948 while (block->parent_loop_header() != NULL && |
| 1949 block->parent_loop_header()->block_id() > start_block->block_id()) { |
| 1950 block = block->parent_loop_header(); |
| 1951 } |
| 1952 |
| 1953 if (block == end_block) { |
| 1954 return end; |
| 1955 } |
| 1956 |
| 1957 return LifetimePosition::FromInstructionIndex( |
| 1958 block->first_instruction_index()); |
| 1959 } |
| 1960 |
| 1961 |
| 1962 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { |
| 1963 return pos.IsInstructionStart() && |
| 1964 chunk_->instructions()->at(pos.InstructionIndex())->IsLabel(); |
| 1965 } |
| 1966 |
| 1967 |
| 1968 void LAllocator::AddGapMove(int pos, LiveRange* prev, LiveRange* next) { |
| 1969 UsePosition* prev_pos = prev->AddUsePosition( |
| 1970 LifetimePosition::FromInstructionIndex(pos)); |
| 1971 UsePosition* next_pos = next->AddUsePosition( |
| 1972 LifetimePosition::FromInstructionIndex(pos)); |
| 1973 LOperand* prev_operand = prev_pos->operand(); |
| 1974 LOperand* next_operand = next_pos->operand(); |
| 1975 LGap* gap = chunk_->GetGapAt(pos); |
| 1976 gap->GetOrCreateParallelMove(LGap::START)-> |
| 1977 AddMove(prev_operand, next_operand); |
| 1978 next_pos->set_hint(prev_operand); |
| 1979 } |
| 1980 |
| 1981 |
| 1982 LiveRange* LAllocator::Split(LiveRange* range, LifetimePosition pos) { |
| 1983 ASSERT(!range->IsFixed()); |
| 1984 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); |
| 1985 if (pos.Value() <= range->Start().Value()) { |
| 1986 return range; |
| 1987 } |
| 1988 LiveRange* result = LiveRangeFor(next_virtual_register_++); |
| 1989 range->SplitAt(pos, result); |
| 1990 return result; |
| 1991 } |
| 1992 |
| 1993 |
| 1994 void LAllocator::SplitAndSpill(LiveRange* range, |
| 1995 LifetimePosition start, |
| 1996 LifetimePosition end) { |
| 1997 // We have an interval range and want to make sure that it is |
| 1998 // spilled at start and at most spilled until end. |
| 1999 ASSERT(start.Value() < end.Value()); |
| 2000 LiveRange* tail_part = Split(range, start); |
| 2001 if (tail_part->Start().Value() < end.Value()) { |
| 2002 LiveRange* third_part = Split(tail_part, |
| 2003 tail_part->Start().NextInstruction(), |
| 2004 end); |
| 2005 Spill(tail_part); |
| 2006 ASSERT(third_part != tail_part); |
| 2007 AddToUnhandledSorted(third_part); |
| 2008 } else { |
| 2009 AddToUnhandledSorted(tail_part); |
| 2010 } |
| 2011 } |
| 2012 |
| 2013 |
| 2014 void LAllocator::SplitAndSpill(LiveRange* range, LifetimePosition at) { |
| 2015 at = LifetimePosition::FromInstructionIndex( |
| 2016 chunk_->NearestGapPos(at.InstructionIndex())); |
| 2017 LiveRange* second_part = Split(range, at); |
| 2018 Spill(second_part); |
| 2019 } |
| 2020 |
| 2021 |
| 2022 void LAllocator::Spill(LiveRange* range) { |
| 2023 ASSERT(!range->IsSpilled()); |
| 2024 TraceAlloc("Spilling live range %d\n", range->id()); |
| 2025 LiveRange* first = range->TopLevel(); |
| 2026 |
| 2027 if (!first->HasAllocatedSpillOperand()) { |
| 2028 LOperand* op = TryReuseSpillSlot(range); |
| 2029 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == XMM_REGISTERS); |
| 2030 first->SetSpillOperand(op); |
| 2031 } |
| 2032 range->MakeSpilled(); |
| 2033 } |
| 2034 |
| 2035 |
| 2036 int LAllocator::RegisterCount() const { |
| 2037 return num_registers_; |
| 2038 } |
| 2039 |
| 2040 |
| 2041 #ifdef DEBUG |
| 2042 |
| 2043 |
| 2044 void LAllocator::Verify() const { |
| 2045 for (int i = 0; i < live_ranges()->length(); ++i) { |
| 2046 LiveRange* current = live_ranges()->at(i); |
| 2047 if (current != NULL) current->Verify(); |
| 2048 } |
| 2049 } |
| 2050 |
| 2051 |
| 2052 #endif |
| 2053 |
| 2054 |
| 2055 } } // namespace v8::internal |
| OLD | NEW |