Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(954)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1540453002: [turbofan] Drop 'auto' from register-allocator.cc (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
42 42
43 const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg, 43 const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
44 RegisterKind kind) { 44 RegisterKind kind) {
45 return kind == DOUBLE_REGISTERS ? cfg->allocatable_double_codes() 45 return kind == DOUBLE_REGISTERS ? cfg->allocatable_double_codes()
46 : cfg->allocatable_general_codes(); 46 : cfg->allocatable_general_codes();
47 } 47 }
48 48
49 49
50 const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence, 50 const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
51 const InstructionBlock* block) { 51 const InstructionBlock* block) {
52 auto index = block->loop_header(); 52 RpoNumber index = block->loop_header();
53 if (!index.IsValid()) return nullptr; 53 if (!index.IsValid()) return nullptr;
54 return sequence->InstructionBlockAt(index); 54 return sequence->InstructionBlockAt(index);
55 } 55 }
56 56
57 57
58 const InstructionBlock* GetInstructionBlock(const InstructionSequence* code, 58 const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
59 LifetimePosition pos) { 59 LifetimePosition pos) {
60 return code->GetInstructionBlock(pos.ToInstructionIndex()); 60 return code->GetInstructionBlock(pos.ToInstructionIndex());
61 } 61 }
62 62
63 63
64 Instruction* GetLastInstruction(InstructionSequence* code, 64 Instruction* GetLastInstruction(InstructionSequence* code,
65 const InstructionBlock* block) { 65 const InstructionBlock* block) {
66 return code->InstructionAt(block->last_instruction_index()); 66 return code->InstructionAt(block->last_instruction_index());
67 } 67 }
68 68
69 69
70 bool IsOutputRegisterOf(Instruction* instr, Register reg) { 70 bool IsOutputRegisterOf(Instruction* instr, Register reg) {
71 for (size_t i = 0; i < instr->OutputCount(); i++) { 71 for (size_t i = 0; i < instr->OutputCount(); i++) {
72 auto output = instr->OutputAt(i); 72 InstructionOperand* output = instr->OutputAt(i);
73 if (output->IsRegister() && 73 if (output->IsRegister() &&
74 LocationOperand::cast(output)->GetRegister().is(reg)) { 74 LocationOperand::cast(output)->GetRegister().is(reg)) {
75 return true; 75 return true;
76 } 76 }
77 } 77 }
78 return false; 78 return false;
79 } 79 }
80 80
81 81
82 bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) { 82 bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
83 for (size_t i = 0; i < instr->OutputCount(); i++) { 83 for (size_t i = 0; i < instr->OutputCount(); i++) {
84 auto output = instr->OutputAt(i); 84 InstructionOperand* output = instr->OutputAt(i);
85 if (output->IsDoubleRegister() && 85 if (output->IsDoubleRegister() &&
86 LocationOperand::cast(output)->GetDoubleRegister().is(reg)) { 86 LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
87 return true; 87 return true;
88 } 88 }
89 } 89 }
90 return false; 90 return false;
91 } 91 }
92 92
93 93
94 // TODO(dcarney): fix frame to allow frame accesses to half size location. 94 // TODO(dcarney): fix frame to allow frame accesses to half size location.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 } 143 }
144 144
145 145
146 bool UsePosition::HintRegister(int* register_code) const { 146 bool UsePosition::HintRegister(int* register_code) const {
147 if (hint_ == nullptr) return false; 147 if (hint_ == nullptr) return false;
148 switch (HintTypeField::decode(flags_)) { 148 switch (HintTypeField::decode(flags_)) {
149 case UsePositionHintType::kNone: 149 case UsePositionHintType::kNone:
150 case UsePositionHintType::kUnresolved: 150 case UsePositionHintType::kUnresolved:
151 return false; 151 return false;
152 case UsePositionHintType::kUsePos: { 152 case UsePositionHintType::kUsePos: {
153 auto use_pos = reinterpret_cast<UsePosition*>(hint_); 153 UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
154 int assigned_register = AssignedRegisterField::decode(use_pos->flags_); 154 int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
155 if (assigned_register == kUnassignedRegister) return false; 155 if (assigned_register == kUnassignedRegister) return false;
156 *register_code = assigned_register; 156 *register_code = assigned_register;
157 return true; 157 return true;
158 } 158 }
159 case UsePositionHintType::kOperand: { 159 case UsePositionHintType::kOperand: {
160 auto operand = reinterpret_cast<InstructionOperand*>(hint_); 160 InstructionOperand* operand =
161 reinterpret_cast<InstructionOperand*>(hint_);
161 int assigned_register = 162 int assigned_register =
162 operand->IsRegister() 163 operand->IsRegister()
163 ? LocationOperand::cast(operand)->GetRegister().code() 164 ? LocationOperand::cast(operand)->GetRegister().code()
164 : LocationOperand::cast(operand)->GetDoubleRegister().code(); 165 : LocationOperand::cast(operand)->GetDoubleRegister().code();
165 *register_code = assigned_register; 166 *register_code = assigned_register;
166 return true; 167 return true;
167 } 168 }
168 case UsePositionHintType::kPhi: { 169 case UsePositionHintType::kPhi: {
169 auto phi = reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_); 170 RegisterAllocationData::PhiMapValue* phi =
171 reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
170 int assigned_register = phi->assigned_register(); 172 int assigned_register = phi->assigned_register();
171 if (assigned_register == kUnassignedRegister) return false; 173 if (assigned_register == kUnassignedRegister) return false;
172 *register_code = assigned_register; 174 *register_code = assigned_register;
173 return true; 175 return true;
174 } 176 }
175 } 177 }
176 UNREACHABLE(); 178 UNREACHABLE();
177 return false; 179 return false;
178 } 180 }
179 181
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
215 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_)); 217 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
216 flags_ = TypeField::encode(type) | 218 flags_ = TypeField::encode(type) |
217 RegisterBeneficialField::encode(register_beneficial) | 219 RegisterBeneficialField::encode(register_beneficial) |
218 HintTypeField::encode(HintTypeField::decode(flags_)) | 220 HintTypeField::encode(HintTypeField::decode(flags_)) |
219 AssignedRegisterField::encode(kUnassignedRegister); 221 AssignedRegisterField::encode(kUnassignedRegister);
220 } 222 }
221 223
222 224
223 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { 225 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
224 DCHECK(Contains(pos) && pos != start()); 226 DCHECK(Contains(pos) && pos != start());
225 auto after = new (zone) UseInterval(pos, end_); 227 UseInterval* after = new (zone) UseInterval(pos, end_);
226 after->next_ = next_; 228 after->next_ = next_;
227 next_ = nullptr; 229 next_ = nullptr;
228 end_ = pos; 230 end_ = pos;
229 return after; 231 return after;
230 } 232 }
231 233
232 234
233 void LifetimePosition::Print() const { 235 void LifetimePosition::Print() const {
234 OFStream os(stdout); 236 OFStream os(stdout);
235 os << *this << std::endl; 237 os << *this << std::endl;
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
273 weight_(kInvalidWeight), 275 weight_(kInvalidWeight),
274 group_(nullptr) { 276 group_(nullptr) {
275 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep)); 277 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
276 bits_ = AssignedRegisterField::encode(kUnassignedRegister) | 278 bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
277 RepresentationField::encode(rep); 279 RepresentationField::encode(rep);
278 } 280 }
279 281
280 282
281 void LiveRange::VerifyPositions() const { 283 void LiveRange::VerifyPositions() const {
282 // Walk the positions, verifying that each is in an interval. 284 // Walk the positions, verifying that each is in an interval.
283 auto interval = first_interval_; 285 UseInterval* interval = first_interval_;
284 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { 286 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
285 CHECK(Start() <= pos->pos()); 287 CHECK(Start() <= pos->pos());
286 CHECK(pos->pos() <= End()); 288 CHECK(pos->pos() <= End());
287 CHECK(interval != nullptr); 289 CHECK(interval != nullptr);
288 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) { 290 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
289 interval = interval->next(); 291 interval = interval->next();
290 CHECK(interval != nullptr); 292 CHECK(interval != nullptr);
291 } 293 }
292 } 294 }
293 } 295 }
294 296
(...skipping 30 matching lines...) Expand all
325 } 327 }
326 328
327 329
328 RegisterKind LiveRange::kind() const { 330 RegisterKind LiveRange::kind() const {
329 return IsFloatingPoint(representation()) ? DOUBLE_REGISTERS 331 return IsFloatingPoint(representation()) ? DOUBLE_REGISTERS
330 : GENERAL_REGISTERS; 332 : GENERAL_REGISTERS;
331 } 333 }
332 334
333 335
334 UsePosition* LiveRange::FirstHintPosition(int* register_index) const { 336 UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
335 for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { 337 for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
336 if (pos->HintRegister(register_index)) return pos; 338 if (pos->HintRegister(register_index)) return pos;
337 } 339 }
338 return nullptr; 340 return nullptr;
339 } 341 }
340 342
341 343
342 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { 344 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
343 UsePosition* use_pos = last_processed_use_; 345 UsePosition* use_pos = last_processed_use_;
344 if (use_pos == nullptr || use_pos->pos() > start) { 346 if (use_pos == nullptr || use_pos->pos() > start) {
345 use_pos = first_pos(); 347 use_pos = first_pos();
(...skipping 11 matching lines...) Expand all
357 UsePosition* pos = NextUsePosition(start); 359 UsePosition* pos = NextUsePosition(start);
358 while (pos != nullptr && !pos->RegisterIsBeneficial()) { 360 while (pos != nullptr && !pos->RegisterIsBeneficial()) {
359 pos = pos->next(); 361 pos = pos->next();
360 } 362 }
361 return pos; 363 return pos;
362 } 364 }
363 365
364 366
365 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial( 367 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
366 LifetimePosition start) const { 368 LifetimePosition start) const {
367 auto pos = first_pos(); 369 UsePosition* pos = first_pos();
368 UsePosition* prev = nullptr; 370 UsePosition* prev = nullptr;
369 while (pos != nullptr && pos->pos() < start) { 371 while (pos != nullptr && pos->pos() < start) {
370 if (pos->RegisterIsBeneficial()) prev = pos; 372 if (pos->RegisterIsBeneficial()) prev = pos;
371 pos = pos->next(); 373 pos = pos->next();
372 } 374 }
373 return prev; 375 return prev;
374 } 376 }
375 377
376 378
377 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const { 379 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
(...skipping 11 matching lines...) Expand all
389 if (pos->type() != UsePositionType::kRequiresSlot) continue; 391 if (pos->type() != UsePositionType::kRequiresSlot) continue;
390 return pos; 392 return pos;
391 } 393 }
392 return nullptr; 394 return nullptr;
393 } 395 }
394 396
395 397
396 bool LiveRange::CanBeSpilled(LifetimePosition pos) const { 398 bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
397 // We cannot spill a live range that has a use requiring a register 399 // We cannot spill a live range that has a use requiring a register
398 // at the current or the immediate next position. 400 // at the current or the immediate next position.
399 auto use_pos = NextRegisterPosition(pos); 401 UsePosition* use_pos = NextRegisterPosition(pos);
400 if (use_pos == nullptr) return true; 402 if (use_pos == nullptr) return true;
401 return use_pos->pos() > pos.NextStart().End(); 403 return use_pos->pos() > pos.NextStart().End();
402 } 404 }
403 405
404 406
405 bool LiveRange::IsTopLevel() const { return top_level_ == this; } 407 bool LiveRange::IsTopLevel() const { return top_level_ == this; }
406 408
407 409
408 InstructionOperand LiveRange::GetAssignedOperand() const { 410 InstructionOperand LiveRange::GetAssignedOperand() const {
409 if (HasRegisterAssigned()) { 411 if (HasRegisterAssigned()) {
410 DCHECK(!spilled()); 412 DCHECK(!spilled());
411 return AllocatedOperand(LocationOperand::REGISTER, representation(), 413 return AllocatedOperand(LocationOperand::REGISTER, representation(),
412 assigned_register()); 414 assigned_register());
413 } 415 }
414 DCHECK(spilled()); 416 DCHECK(spilled());
415 DCHECK(!HasRegisterAssigned()); 417 DCHECK(!HasRegisterAssigned());
416 if (TopLevel()->HasSpillOperand()) { 418 if (TopLevel()->HasSpillOperand()) {
417 auto op = TopLevel()->GetSpillOperand(); 419 InstructionOperand* op = TopLevel()->GetSpillOperand();
418 DCHECK(!op->IsUnallocated()); 420 DCHECK(!op->IsUnallocated());
419 return *op; 421 return *op;
420 } 422 }
421 return TopLevel()->GetSpillRangeOperand(); 423 return TopLevel()->GetSpillRangeOperand();
422 } 424 }
423 425
424 426
425 UseInterval* LiveRange::FirstSearchIntervalForPosition( 427 UseInterval* LiveRange::FirstSearchIntervalForPosition(
426 LifetimePosition position) const { 428 LifetimePosition position) const {
427 if (current_interval_ == nullptr) return first_interval_; 429 if (current_interval_ == nullptr) return first_interval_;
428 if (current_interval_->start() > position) { 430 if (current_interval_->start() > position) {
429 current_interval_ = nullptr; 431 current_interval_ = nullptr;
430 return first_interval_; 432 return first_interval_;
431 } 433 }
432 return current_interval_; 434 return current_interval_;
433 } 435 }
434 436
435 437
436 void LiveRange::AdvanceLastProcessedMarker( 438 void LiveRange::AdvanceLastProcessedMarker(
437 UseInterval* to_start_of, LifetimePosition but_not_past) const { 439 UseInterval* to_start_of, LifetimePosition but_not_past) const {
438 if (to_start_of == nullptr) return; 440 if (to_start_of == nullptr) return;
439 if (to_start_of->start() > but_not_past) return; 441 if (to_start_of->start() > but_not_past) return;
440 auto start = current_interval_ == nullptr ? LifetimePosition::Invalid() 442 LifetimePosition start = current_interval_ == nullptr
441 : current_interval_->start(); 443 ? LifetimePosition::Invalid()
444 : current_interval_->start();
442 if (to_start_of->start() > start) { 445 if (to_start_of->start() > start) {
443 current_interval_ = to_start_of; 446 current_interval_ = to_start_of;
444 } 447 }
445 } 448 }
446 449
447 450
448 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) { 451 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
449 int new_id = TopLevel()->GetNextChildId(); 452 int new_id = TopLevel()->GetNextChildId();
450 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel()); 453 LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
451 DetachAt(position, child, zone); 454 DetachAt(position, child, zone);
452 455
453 child->top_level_ = TopLevel(); 456 child->top_level_ = TopLevel();
454 child->next_ = next_; 457 child->next_ = next_;
455 next_ = child; 458 next_ = child;
456 return child; 459 return child;
457 } 460 }
458 461
459 462
460 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result, 463 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
461 Zone* zone) { 464 Zone* zone) {
462 DCHECK(Start() < position); 465 DCHECK(Start() < position);
463 DCHECK(End() > position); 466 DCHECK(End() > position);
464 DCHECK(result->IsEmpty()); 467 DCHECK(result->IsEmpty());
465 // Find the last interval that ends before the position. If the 468 // Find the last interval that ends before the position. If the
466 // position is contained in one of the intervals in the chain, we 469 // position is contained in one of the intervals in the chain, we
467 // split that interval and use the first part. 470 // split that interval and use the first part.
468 auto current = FirstSearchIntervalForPosition(position); 471 UseInterval* current = FirstSearchIntervalForPosition(position);
469 472
470 // If the split position coincides with the beginning of a use interval 473 // If the split position coincides with the beginning of a use interval
471 // we need to split use positons in a special way. 474 // we need to split use positons in a special way.
472 bool split_at_start = false; 475 bool split_at_start = false;
473 476
474 if (current->start() == position) { 477 if (current->start() == position) {
475 // When splitting at start we need to locate the previous use interval. 478 // When splitting at start we need to locate the previous use interval.
476 current = first_interval_; 479 current = first_interval_;
477 } 480 }
478 481
479 UseInterval* after = nullptr; 482 UseInterval* after = nullptr;
480 while (current != nullptr) { 483 while (current != nullptr) {
481 if (current->Contains(position)) { 484 if (current->Contains(position)) {
482 after = current->SplitAt(position, zone); 485 after = current->SplitAt(position, zone);
483 break; 486 break;
484 } 487 }
485 auto next = current->next(); 488 UseInterval* next = current->next();
486 if (next->start() >= position) { 489 if (next->start() >= position) {
487 split_at_start = (next->start() == position); 490 split_at_start = (next->start() == position);
488 after = next; 491 after = next;
489 current->set_next(nullptr); 492 current->set_next(nullptr);
490 break; 493 break;
491 } 494 }
492 current = next; 495 current = next;
493 } 496 }
494 DCHECK(nullptr != after); 497 DCHECK(nullptr != after);
495 498
496 // Partition original use intervals to the two live ranges. 499 // Partition original use intervals to the two live ranges.
497 auto before = current; 500 UseInterval* before = current;
498 result->last_interval_ = 501 result->last_interval_ =
499 (last_interval_ == before) 502 (last_interval_ == before)
500 ? after // Only interval in the range after split. 503 ? after // Only interval in the range after split.
501 : last_interval_; // Last interval of the original range. 504 : last_interval_; // Last interval of the original range.
502 result->first_interval_ = after; 505 result->first_interval_ = after;
503 last_interval_ = before; 506 last_interval_ = before;
504 507
505 // Find the last use position before the split and the first use 508 // Find the last use position before the split and the first use
506 // position after it. 509 // position after it.
507 auto use_after = 510 UsePosition* use_after =
508 splitting_pointer_ == nullptr || splitting_pointer_->pos() > position 511 splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
509 ? first_pos() 512 ? first_pos()
510 : splitting_pointer_; 513 : splitting_pointer_;
511 UsePosition* use_before = nullptr; 514 UsePosition* use_before = nullptr;
512 if (split_at_start) { 515 if (split_at_start) {
513 // The split position coincides with the beginning of a use interval (the 516 // The split position coincides with the beginning of a use interval (the
514 // end of a lifetime hole). Use at this position should be attributed to 517 // end of a lifetime hole). Use at this position should be attributed to
515 // the split child because split child owns use interval covering it. 518 // the split child because split child owns use interval covering it.
516 while (use_after != nullptr && use_after->pos() < position) { 519 while (use_after != nullptr && use_after->pos() < position) {
517 use_before = use_after; 520 use_before = use_after;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
552 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) { 555 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
553 LiveRange* child = this; 556 LiveRange* child = this;
554 for (; child != nullptr; child = child->next()) { 557 for (; child != nullptr; child = child->next()) {
555 child->top_level_ = new_top_level; 558 child->top_level_ = new_top_level;
556 } 559 }
557 } 560 }
558 561
559 562
560 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op, 563 void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
561 const InstructionOperand& spill_op) { 564 const InstructionOperand& spill_op) {
562 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { 565 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
563 DCHECK(Start() <= pos->pos() && pos->pos() <= End()); 566 DCHECK(Start() <= pos->pos() && pos->pos() <= End());
564 if (!pos->HasOperand()) continue; 567 if (!pos->HasOperand()) continue;
565 switch (pos->type()) { 568 switch (pos->type()) {
566 case UsePositionType::kRequiresSlot: 569 case UsePositionType::kRequiresSlot:
567 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot()); 570 DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
568 InstructionOperand::ReplaceWith(pos->operand(), &spill_op); 571 InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
569 break; 572 break;
570 case UsePositionType::kRequiresRegister: 573 case UsePositionType::kRequiresRegister:
571 DCHECK(op.IsRegister() || op.IsDoubleRegister()); 574 DCHECK(op.IsRegister() || op.IsDoubleRegister());
572 // Fall through. 575 // Fall through.
(...skipping 18 matching lines...) Expand all
591 if (pos == nullptr) return false; 594 if (pos == nullptr) return false;
592 UsePosition* other_pos = other->first_pos(); 595 UsePosition* other_pos = other->first_pos();
593 if (other_pos == nullptr) return true; 596 if (other_pos == nullptr) return true;
594 return pos->pos() < other_pos->pos(); 597 return pos->pos() < other_pos->pos();
595 } 598 }
596 return start < other_start; 599 return start < other_start;
597 } 600 }
598 601
599 602
600 void LiveRange::SetUseHints(int register_index) { 603 void LiveRange::SetUseHints(int register_index) {
601 for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) { 604 for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
602 if (!pos->HasOperand()) continue; 605 if (!pos->HasOperand()) continue;
603 switch (pos->type()) { 606 switch (pos->type()) {
604 case UsePositionType::kRequiresSlot: 607 case UsePositionType::kRequiresSlot:
605 break; 608 break;
606 case UsePositionType::kRequiresRegister: 609 case UsePositionType::kRequiresRegister:
607 case UsePositionType::kAny: 610 case UsePositionType::kAny:
608 pos->set_assigned_register(register_index); 611 pos->set_assigned_register(register_index);
609 break; 612 break;
610 } 613 }
611 } 614 }
612 } 615 }
613 616
614 617
615 bool LiveRange::CanCover(LifetimePosition position) const { 618 bool LiveRange::CanCover(LifetimePosition position) const {
616 if (IsEmpty()) return false; 619 if (IsEmpty()) return false;
617 return Start() <= position && position < End(); 620 return Start() <= position && position < End();
618 } 621 }
619 622
620 623
621 bool LiveRange::Covers(LifetimePosition position) const { 624 bool LiveRange::Covers(LifetimePosition position) const {
622 if (!CanCover(position)) return false; 625 if (!CanCover(position)) return false;
623 auto start_search = FirstSearchIntervalForPosition(position); 626 UseInterval* start_search = FirstSearchIntervalForPosition(position);
624 for (auto interval = start_search; interval != nullptr; 627 for (UseInterval* interval = start_search; interval != nullptr;
625 interval = interval->next()) { 628 interval = interval->next()) {
626 DCHECK(interval->next() == nullptr || 629 DCHECK(interval->next() == nullptr ||
627 interval->next()->start() >= interval->start()); 630 interval->next()->start() >= interval->start());
628 AdvanceLastProcessedMarker(interval, position); 631 AdvanceLastProcessedMarker(interval, position);
629 if (interval->Contains(position)) return true; 632 if (interval->Contains(position)) return true;
630 if (interval->start() > position) return false; 633 if (interval->start() > position) return false;
631 } 634 }
632 return false; 635 return false;
633 } 636 }
634 637
635 638
636 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const { 639 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
637 auto b = other->first_interval(); 640 UseInterval* b = other->first_interval();
638 if (b == nullptr) return LifetimePosition::Invalid(); 641 if (b == nullptr) return LifetimePosition::Invalid();
639 auto advance_last_processed_up_to = b->start(); 642 LifetimePosition advance_last_processed_up_to = b->start();
640 auto a = FirstSearchIntervalForPosition(b->start()); 643 UseInterval* a = FirstSearchIntervalForPosition(b->start());
641 while (a != nullptr && b != nullptr) { 644 while (a != nullptr && b != nullptr) {
642 if (a->start() > other->End()) break; 645 if (a->start() > other->End()) break;
643 if (b->start() > End()) break; 646 if (b->start() > End()) break;
644 auto cur_intersection = a->Intersect(b); 647 LifetimePosition cur_intersection = a->Intersect(b);
645 if (cur_intersection.IsValid()) { 648 if (cur_intersection.IsValid()) {
646 return cur_intersection; 649 return cur_intersection;
647 } 650 }
648 if (a->start() < b->start()) { 651 if (a->start() < b->start()) {
649 a = a->next(); 652 a = a->next();
650 if (a == nullptr || a->start() > other->End()) break; 653 if (a == nullptr || a->start() > other->End()) break;
651 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 654 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
652 } else { 655 } else {
653 b = b->next(); 656 b = b->next();
654 } 657 }
655 } 658 }
656 return LifetimePosition::Invalid(); 659 return LifetimePosition::Invalid();
657 } 660 }
658 661
659 662
660 unsigned LiveRange::GetSize() { 663 unsigned LiveRange::GetSize() {
661 if (size_ == kInvalidSize) { 664 if (size_ == kInvalidSize) {
662 size_ = 0; 665 size_ = 0;
663 for (auto interval = first_interval(); interval != nullptr; 666 for (const UseInterval* interval = first_interval(); interval != nullptr;
664 interval = interval->next()) { 667 interval = interval->next()) {
665 size_ += (interval->end().value() - interval->start().value()); 668 size_ += (interval->end().value() - interval->start().value());
666 } 669 }
667 } 670 }
668 671
669 return static_cast<unsigned>(size_); 672 return static_cast<unsigned>(size_);
670 } 673 }
671 674
672 675
673 void LiveRange::Print(const RegisterConfiguration* config, 676 void LiveRange::Print(const RegisterConfiguration* config,
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
734 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock( 737 bool TopLevelLiveRange::TryCommitSpillInDeferredBlock(
735 InstructionSequence* code, const InstructionOperand& spill_operand) { 738 InstructionSequence* code, const InstructionOperand& spill_operand) {
736 if (!IsSpilledOnlyInDeferredBlocks()) return false; 739 if (!IsSpilledOnlyInDeferredBlocks()) return false;
737 740
738 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg()); 741 TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg());
739 // If we have ranges that aren't spilled but require the operand on the stack, 742 // If we have ranges that aren't spilled but require the operand on the stack,
740 // make sure we insert the spill. 743 // make sure we insert the spill.
741 for (const LiveRange* child = this; child != nullptr; child = child->next()) { 744 for (const LiveRange* child = this; child != nullptr; child = child->next()) {
742 if (!child->spilled() && 745 if (!child->spilled() &&
743 child->NextSlotPosition(child->Start()) != nullptr) { 746 child->NextSlotPosition(child->Start()) != nullptr) {
744 auto instr = code->InstructionAt(child->Start().ToInstructionIndex()); 747 Instruction* instr =
748 code->InstructionAt(child->Start().ToInstructionIndex());
745 // Insert spill at the end to let live range connections happen at START. 749 // Insert spill at the end to let live range connections happen at START.
746 auto move = 750 ParallelMove* move =
747 instr->GetOrCreateParallelMove(Instruction::END, code->zone()); 751 instr->GetOrCreateParallelMove(Instruction::END, code->zone());
748 InstructionOperand assigned = child->GetAssignedOperand(); 752 InstructionOperand assigned = child->GetAssignedOperand();
749 if (TopLevel()->has_slot_use()) { 753 if (TopLevel()->has_slot_use()) {
750 bool found = false; 754 bool found = false;
751 for (auto move_op : *move) { 755 for (MoveOperands* move_op : *move) {
752 if (move_op->IsEliminated()) continue; 756 if (move_op->IsEliminated()) continue;
753 if (move_op->source().Equals(assigned) && 757 if (move_op->source().Equals(assigned) &&
754 move_op->destination().Equals(spill_operand)) { 758 move_op->destination().Equals(spill_operand)) {
755 found = true; 759 found = true;
756 break; 760 break;
757 } 761 }
758 } 762 }
759 if (found) continue; 763 if (found) continue;
760 } 764 }
761 765
762 move->AddMove(assigned, spill_operand); 766 move->AddMove(assigned, spill_operand);
763 } 767 }
764 } 768 }
765 769
766 return true; 770 return true;
767 } 771 }
768 772
769 773
770 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence, 774 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
771 const InstructionOperand& op, 775 const InstructionOperand& op,
772 bool might_be_duplicated) { 776 bool might_be_duplicated) {
773 DCHECK_IMPLIES(op.IsConstant(), spill_move_insertion_locations() == nullptr); 777 DCHECK_IMPLIES(op.IsConstant(), spill_move_insertion_locations() == nullptr);
774 auto zone = sequence->zone(); 778 Zone* zone = sequence->zone();
775 779
776 for (auto to_spill = spill_move_insertion_locations(); to_spill != nullptr; 780 for (SpillMoveInsertionList* to_spill = spill_move_insertion_locations();
777 to_spill = to_spill->next) { 781 to_spill != nullptr; to_spill = to_spill->next) {
778 auto instr = sequence->InstructionAt(to_spill->gap_index); 782 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
779 auto move = instr->GetOrCreateParallelMove(Instruction::START, zone); 783 ParallelMove* move =
784 instr->GetOrCreateParallelMove(Instruction::START, zone);
780 // Skip insertion if it's possible that the move exists already as a 785 // Skip insertion if it's possible that the move exists already as a
781 // constraint move from a fixed output register to a slot. 786 // constraint move from a fixed output register to a slot.
782 if (might_be_duplicated || has_preassigned_slot()) { 787 if (might_be_duplicated || has_preassigned_slot()) {
783 bool found = false; 788 bool found = false;
784 for (auto move_op : *move) { 789 for (MoveOperands* move_op : *move) {
785 if (move_op->IsEliminated()) continue; 790 if (move_op->IsEliminated()) continue;
786 if (move_op->source().Equals(*to_spill->operand) && 791 if (move_op->source().Equals(*to_spill->operand) &&
787 move_op->destination().Equals(op)) { 792 move_op->destination().Equals(op)) {
788 found = true; 793 found = true;
789 if (has_preassigned_slot()) move_op->Eliminate(); 794 if (has_preassigned_slot()) move_op->Eliminate();
790 break; 795 break;
791 } 796 }
792 } 797 }
793 if (found) continue; 798 if (found) continue;
794 } 799 }
(...skipping 13 matching lines...) Expand all
808 813
809 814
810 void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) { 815 void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
811 DCHECK(!HasSpillOperand()); 816 DCHECK(!HasSpillOperand());
812 DCHECK(spill_range); 817 DCHECK(spill_range);
813 spill_range_ = spill_range; 818 spill_range_ = spill_range;
814 } 819 }
815 820
816 821
817 AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const { 822 AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
818 auto spill_range = GetSpillRange(); 823 SpillRange* spill_range = GetSpillRange();
819 int index = spill_range->assigned_slot(); 824 int index = spill_range->assigned_slot();
820 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index); 825 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
821 } 826 }
822 827
823 828
824 void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end, 829 void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
825 Zone* zone) { 830 Zone* zone) {
826 DCHECK(start != Start() || end != End()); 831 DCHECK(start != Start() || end != End());
827 DCHECK(start < end); 832 DCHECK(start < end);
828 833
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
995 DCHECK(first_interval_->start() <= start); 1000 DCHECK(first_interval_->start() <= start);
996 DCHECK(start < first_interval_->end()); 1001 DCHECK(start < first_interval_->end());
997 first_interval_->set_start(start); 1002 first_interval_->set_start(start);
998 } 1003 }
999 1004
1000 1005
1001 void TopLevelLiveRange::EnsureInterval(LifetimePosition start, 1006 void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1002 LifetimePosition end, Zone* zone) { 1007 LifetimePosition end, Zone* zone) {
1003 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(), 1008 TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1004 end.value()); 1009 end.value());
1005 auto new_end = end; 1010 LifetimePosition new_end = end;
1006 while (first_interval_ != nullptr && first_interval_->start() <= end) { 1011 while (first_interval_ != nullptr && first_interval_->start() <= end) {
1007 if (first_interval_->end() > end) { 1012 if (first_interval_->end() > end) {
1008 new_end = first_interval_->end(); 1013 new_end = first_interval_->end();
1009 } 1014 }
1010 first_interval_ = first_interval_->next(); 1015 first_interval_ = first_interval_->next();
1011 } 1016 }
1012 1017
1013 auto new_interval = new (zone) UseInterval(start, new_end); 1018 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1014 new_interval->set_next(first_interval_); 1019 new_interval->set_next(first_interval_);
1015 first_interval_ = new_interval; 1020 first_interval_ = new_interval;
1016 if (new_interval->next() == nullptr) { 1021 if (new_interval->next() == nullptr) {
1017 last_interval_ = new_interval; 1022 last_interval_ = new_interval;
1018 } 1023 }
1019 } 1024 }
1020 1025
1021 1026
1022 void TopLevelLiveRange::AddUseInterval(LifetimePosition start, 1027 void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1023 LifetimePosition end, Zone* zone) { 1028 LifetimePosition end, Zone* zone) {
1024 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(), 1029 TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1025 end.value()); 1030 end.value());
1026 if (first_interval_ == nullptr) { 1031 if (first_interval_ == nullptr) {
1027 auto interval = new (zone) UseInterval(start, end); 1032 UseInterval* interval = new (zone) UseInterval(start, end);
1028 first_interval_ = interval; 1033 first_interval_ = interval;
1029 last_interval_ = interval; 1034 last_interval_ = interval;
1030 } else { 1035 } else {
1031 if (end == first_interval_->start()) { 1036 if (end == first_interval_->start()) {
1032 first_interval_->set_start(start); 1037 first_interval_->set_start(start);
1033 } else if (end < first_interval_->start()) { 1038 } else if (end < first_interval_->start()) {
1034 auto interval = new (zone) UseInterval(start, end); 1039 UseInterval* interval = new (zone) UseInterval(start, end);
1035 interval->set_next(first_interval_); 1040 interval->set_next(first_interval_);
1036 first_interval_ = interval; 1041 first_interval_ = interval;
1037 } else { 1042 } else {
1038 // Order of instruction's processing (see ProcessInstructions) guarantees 1043 // Order of instruction's processing (see ProcessInstructions) guarantees
1039 // that each new use interval either precedes or intersects with 1044 // that each new use interval either precedes or intersects with
1040 // last added interval. 1045 // last added interval.
1041 DCHECK(start < first_interval_->end()); 1046 DCHECK(start < first_interval_->end());
1042 first_interval_->set_start(Min(start, first_interval_->start())); 1047 first_interval_->set_start(Min(start, first_interval_->start()));
1043 first_interval_->set_end(Max(end, first_interval_->end())); 1048 first_interval_->set_end(Max(end, first_interval_->end()));
1044 } 1049 }
1045 } 1050 }
1046 } 1051 }
1047 1052
1048 1053
1049 void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) { 1054 void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1050 auto pos = use_pos->pos(); 1055 LifetimePosition pos = use_pos->pos();
1051 TRACE("Add to live range %d use position %d\n", vreg(), pos.value()); 1056 TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1052 UsePosition* prev_hint = nullptr; 1057 UsePosition* prev_hint = nullptr;
1053 UsePosition* prev = nullptr; 1058 UsePosition* prev = nullptr;
1054 auto current = first_pos_; 1059 UsePosition* current = first_pos_;
1055 while (current != nullptr && current->pos() < pos) { 1060 while (current != nullptr && current->pos() < pos) {
1056 prev_hint = current->HasHint() ? current : prev_hint; 1061 prev_hint = current->HasHint() ? current : prev_hint;
1057 prev = current; 1062 prev = current;
1058 current = current->next(); 1063 current = current->next();
1059 } 1064 }
1060 1065
1061 if (prev == nullptr) { 1066 if (prev == nullptr) {
1062 use_pos->set_next(first_pos_); 1067 use_pos->set_next(first_pos_);
1063 first_pos_ = use_pos; 1068 first_pos_ = use_pos;
1064 } else { 1069 } else {
(...skipping 28 matching lines...) Expand all
1093 1098
1094 std::ostream& operator<<(std::ostream& os, 1099 std::ostream& operator<<(std::ostream& os,
1095 const PrintableLiveRange& printable_range) { 1100 const PrintableLiveRange& printable_range) {
1096 const LiveRange* range = printable_range.range_; 1101 const LiveRange* range = printable_range.range_;
1097 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id() 1102 os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1098 << " "; 1103 << " ";
1099 if (range->TopLevel()->is_phi()) os << "phi "; 1104 if (range->TopLevel()->is_phi()) os << "phi ";
1100 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi "; 1105 if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1101 1106
1102 os << "{" << std::endl; 1107 os << "{" << std::endl;
1103 auto interval = range->first_interval(); 1108 UseInterval* interval = range->first_interval();
1104 auto use_pos = range->first_pos(); 1109 UsePosition* use_pos = range->first_pos();
1105 PrintableInstructionOperand pio; 1110 PrintableInstructionOperand pio;
1106 pio.register_configuration_ = printable_range.register_configuration_; 1111 pio.register_configuration_ = printable_range.register_configuration_;
1107 while (use_pos != nullptr) { 1112 while (use_pos != nullptr) {
1108 if (use_pos->HasOperand()) { 1113 if (use_pos->HasOperand()) {
1109 pio.op_ = *use_pos->operand(); 1114 pio.op_ = *use_pos->operand();
1110 os << pio << use_pos->pos() << " "; 1115 os << pio << use_pos->pos() << " ";
1111 } 1116 }
1112 use_pos = use_pos->next(); 1117 use_pos = use_pos->next();
1113 } 1118 }
1114 os << std::endl; 1119 os << std::endl;
(...skipping 14 matching lines...) Expand all
1129 byte_width_(GetByteWidth(parent->representation())), 1134 byte_width_(GetByteWidth(parent->representation())),
1130 kind_(parent->kind()) { 1135 kind_(parent->kind()) {
1131 // Spill ranges are created for top level, non-splintered ranges. This is so 1136 // Spill ranges are created for top level, non-splintered ranges. This is so
1132 // that, when merging decisions are made, we consider the full extent of the 1137 // that, when merging decisions are made, we consider the full extent of the
1133 // virtual register, and avoid clobbering it. 1138 // virtual register, and avoid clobbering it.
1134 DCHECK(!parent->IsSplinter()); 1139 DCHECK(!parent->IsSplinter());
1135 UseInterval* result = nullptr; 1140 UseInterval* result = nullptr;
1136 UseInterval* node = nullptr; 1141 UseInterval* node = nullptr;
1137 // Copy the intervals for all ranges. 1142 // Copy the intervals for all ranges.
1138 for (LiveRange* range = parent; range != nullptr; range = range->next()) { 1143 for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1139 auto src = range->first_interval(); 1144 UseInterval* src = range->first_interval();
1140 while (src != nullptr) { 1145 while (src != nullptr) {
1141 auto new_node = new (zone) UseInterval(src->start(), src->end()); 1146 UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1142 if (result == nullptr) { 1147 if (result == nullptr) {
1143 result = new_node; 1148 result = new_node;
1144 } else { 1149 } else {
1145 node->set_next(new_node); 1150 node->set_next(new_node);
1146 } 1151 }
1147 node = new_node; 1152 node = new_node;
1148 src = src->next(); 1153 src = src->next();
1149 } 1154 }
1150 } 1155 }
1151 use_interval_ = result; 1156 use_interval_ = result;
(...skipping 19 matching lines...) Expand all
1171 1176
1172 1177
1173 bool SpillRange::TryMerge(SpillRange* other) { 1178 bool SpillRange::TryMerge(SpillRange* other) {
1174 if (HasSlot() || other->HasSlot()) return false; 1179 if (HasSlot() || other->HasSlot()) return false;
1175 // TODO(dcarney): byte widths should be compared here not kinds. 1180 // TODO(dcarney): byte widths should be compared here not kinds.
1176 if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() || 1181 if (live_ranges_[0]->kind() != other->live_ranges_[0]->kind() ||
1177 IsIntersectingWith(other)) { 1182 IsIntersectingWith(other)) {
1178 return false; 1183 return false;
1179 } 1184 }
1180 1185
1181 auto max = LifetimePosition::MaxPosition(); 1186 LifetimePosition max = LifetimePosition::MaxPosition();
1182 if (End() < other->End() && other->End() != max) { 1187 if (End() < other->End() && other->End() != max) {
1183 end_position_ = other->End(); 1188 end_position_ = other->End();
1184 } 1189 }
1185 other->end_position_ = max; 1190 other->end_position_ = max;
1186 1191
1187 MergeDisjointIntervals(other->use_interval_); 1192 MergeDisjointIntervals(other->use_interval_);
1188 other->use_interval_ = nullptr; 1193 other->use_interval_ = nullptr;
1189 1194
1190 for (auto range : other->live_ranges()) { 1195 for (TopLevelLiveRange* range : other->live_ranges()) {
1191 DCHECK(range->GetSpillRange() == other); 1196 DCHECK(range->GetSpillRange() == other);
1192 range->SetSpillRange(this); 1197 range->SetSpillRange(this);
1193 } 1198 }
1194 1199
1195 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(), 1200 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1196 other->live_ranges().end()); 1201 other->live_ranges().end());
1197 other->live_ranges().clear(); 1202 other->live_ranges().clear();
1198 1203
1199 return true; 1204 return true;
1200 } 1205 }
1201 1206
1202 1207
1203 void SpillRange::MergeDisjointIntervals(UseInterval* other) { 1208 void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1204 UseInterval* tail = nullptr; 1209 UseInterval* tail = nullptr;
1205 auto current = use_interval_; 1210 UseInterval* current = use_interval_;
1206 while (other != nullptr) { 1211 while (other != nullptr) {
1207 // Make sure the 'current' list starts first 1212 // Make sure the 'current' list starts first
1208 if (current == nullptr || current->start() > other->start()) { 1213 if (current == nullptr || current->start() > other->start()) {
1209 std::swap(current, other); 1214 std::swap(current, other);
1210 } 1215 }
1211 // Check disjointness 1216 // Check disjointness
1212 DCHECK(other == nullptr || current->end() <= other->start()); 1217 DCHECK(other == nullptr || current->end() <= other->start());
1213 // Append the 'current' node to the result accumulator and move forward 1218 // Append the 'current' node to the result accumulator and move forward
1214 if (tail == nullptr) { 1219 if (tail == nullptr) {
1215 use_interval_ = current; 1220 use_interval_ = current;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1250 1255
1251 1256
1252 void RegisterAllocationData::PhiMapValue::AddOperand( 1257 void RegisterAllocationData::PhiMapValue::AddOperand(
1253 InstructionOperand* operand) { 1258 InstructionOperand* operand) {
1254 incoming_operands_.push_back(operand); 1259 incoming_operands_.push_back(operand);
1255 } 1260 }
1256 1261
1257 1262
1258 void RegisterAllocationData::PhiMapValue::CommitAssignment( 1263 void RegisterAllocationData::PhiMapValue::CommitAssignment(
1259 const InstructionOperand& assigned) { 1264 const InstructionOperand& assigned) {
1260 for (auto operand : incoming_operands_) { 1265 for (InstructionOperand* operand : incoming_operands_) {
1261 InstructionOperand::ReplaceWith(operand, &assigned); 1266 InstructionOperand::ReplaceWith(operand, &assigned);
1262 } 1267 }
1263 } 1268 }
1264 1269
1265 1270
1266 RegisterAllocationData::RegisterAllocationData( 1271 RegisterAllocationData::RegisterAllocationData(
1267 const RegisterConfiguration* config, Zone* zone, Frame* frame, 1272 const RegisterConfiguration* config, Zone* zone, Frame* frame,
1268 InstructionSequence* code, const char* debug_name) 1273 InstructionSequence* code, const char* debug_name)
1269 : allocation_zone_(zone), 1274 : allocation_zone_(zone),
1270 frame_(frame), 1275 frame_(frame),
(...skipping 28 matching lines...) Expand all
1299 assigned_double_registers_ = new (code_zone()) 1304 assigned_double_registers_ = new (code_zone())
1300 BitVector(this->config()->num_double_registers(), code_zone()); 1305 BitVector(this->config()->num_double_registers(), code_zone());
1301 this->frame()->SetAllocatedRegisters(assigned_registers_); 1306 this->frame()->SetAllocatedRegisters(assigned_registers_);
1302 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 1307 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1303 } 1308 }
1304 1309
1305 1310
1306 MoveOperands* RegisterAllocationData::AddGapMove( 1311 MoveOperands* RegisterAllocationData::AddGapMove(
1307 int index, Instruction::GapPosition position, 1312 int index, Instruction::GapPosition position,
1308 const InstructionOperand& from, const InstructionOperand& to) { 1313 const InstructionOperand& from, const InstructionOperand& to) {
1309 auto instr = code()->InstructionAt(index); 1314 Instruction* instr = code()->InstructionAt(index);
1310 auto moves = instr->GetOrCreateParallelMove(position, code_zone()); 1315 ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1311 return moves->AddMove(from, to); 1316 return moves->AddMove(from, to);
1312 } 1317 }
1313 1318
1314 1319
1315 MachineRepresentation RegisterAllocationData::RepresentationFor( 1320 MachineRepresentation RegisterAllocationData::RepresentationFor(
1316 int virtual_register) { 1321 int virtual_register) {
1317 DCHECK_LT(virtual_register, code()->VirtualRegisterCount()); 1322 DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1318 return code()->GetRepresentation(virtual_register); 1323 return code()->GetRepresentation(virtual_register);
1319 } 1324 }
1320 1325
1321 1326
1322 TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) { 1327 TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1323 if (index >= static_cast<int>(live_ranges().size())) { 1328 if (index >= static_cast<int>(live_ranges().size())) {
1324 live_ranges().resize(index + 1, nullptr); 1329 live_ranges().resize(index + 1, nullptr);
1325 } 1330 }
1326 auto result = live_ranges()[index]; 1331 TopLevelLiveRange* result = live_ranges()[index];
1327 if (result == nullptr) { 1332 if (result == nullptr) {
1328 result = NewLiveRange(index, RepresentationFor(index)); 1333 result = NewLiveRange(index, RepresentationFor(index));
1329 live_ranges()[index] = result; 1334 live_ranges()[index] = result;
1330 } 1335 }
1331 return result; 1336 return result;
1332 } 1337 }
1333 1338
1334 1339
1335 TopLevelLiveRange* RegisterAllocationData::NewLiveRange( 1340 TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1336 int index, MachineRepresentation rep) { 1341 int index, MachineRepresentation rep) {
(...skipping 13 matching lines...) Expand all
1350 TopLevelLiveRange* RegisterAllocationData::NextLiveRange( 1355 TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1351 MachineRepresentation rep) { 1356 MachineRepresentation rep) {
1352 int vreg = GetNextLiveRangeId(); 1357 int vreg = GetNextLiveRangeId();
1353 TopLevelLiveRange* ret = NewLiveRange(vreg, rep); 1358 TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1354 return ret; 1359 return ret;
1355 } 1360 }
1356 1361
1357 1362
1358 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap( 1363 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1359 const InstructionBlock* block, PhiInstruction* phi) { 1364 const InstructionBlock* block, PhiInstruction* phi) {
1360 auto map_value = new (allocation_zone()) 1365 RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1361 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone()); 1366 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1362 auto res = 1367 auto res =
1363 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value)); 1368 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1364 DCHECK(res.second); 1369 DCHECK(res.second);
1365 USE(res); 1370 USE(res);
1366 return map_value; 1371 return map_value;
1367 } 1372 }
1368 1373
1369 1374
1370 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( 1375 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
1450 spill_ranges()[spill_range_index] = spill_range; 1455 spill_ranges()[spill_range_index] = spill_range;
1451 1456
1452 return spill_range; 1457 return spill_range;
1453 } 1458 }
1454 1459
1455 1460
1456 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( 1461 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1457 TopLevelLiveRange* range) { 1462 TopLevelLiveRange* range) {
1458 DCHECK(!range->HasSpillOperand()); 1463 DCHECK(!range->HasSpillOperand());
1459 DCHECK(!range->IsSplinter()); 1464 DCHECK(!range->IsSplinter());
1460 auto spill_range = 1465 SpillRange* spill_range =
1461 new (allocation_zone()) SpillRange(range, allocation_zone()); 1466 new (allocation_zone()) SpillRange(range, allocation_zone());
1462 return spill_range; 1467 return spill_range;
1463 } 1468 }
1464 1469
1465 1470
1466 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) { 1471 void RegisterAllocationData::MarkAllocated(RegisterKind kind, int index) {
1467 if (kind == DOUBLE_REGISTERS) { 1472 if (kind == DOUBLE_REGISTERS) {
1468 assigned_double_registers_->Add(index); 1473 assigned_double_registers_->Add(index);
1469 } else { 1474 } else {
1470 DCHECK(kind == GENERAL_REGISTERS); 1475 DCHECK(kind == GENERAL_REGISTERS);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1505 DCHECK(IsFloatingPoint(rep)); 1510 DCHECK(IsFloatingPoint(rep));
1506 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register); 1511 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1507 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep, 1512 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1508 operand->fixed_register_index()); 1513 operand->fixed_register_index());
1509 } else { 1514 } else {
1510 UNREACHABLE(); 1515 UNREACHABLE();
1511 } 1516 }
1512 InstructionOperand::ReplaceWith(operand, &allocated); 1517 InstructionOperand::ReplaceWith(operand, &allocated);
1513 if (is_tagged) { 1518 if (is_tagged) {
1514 TRACE("Fixed reg is tagged at %d\n", pos); 1519 TRACE("Fixed reg is tagged at %d\n", pos);
1515 auto instr = code()->InstructionAt(pos); 1520 Instruction* instr = code()->InstructionAt(pos);
1516 if (instr->HasReferenceMap()) { 1521 if (instr->HasReferenceMap()) {
1517 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); 1522 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1518 } 1523 }
1519 } 1524 }
1520 return operand; 1525 return operand;
1521 } 1526 }
1522 1527
1523 1528
1524 void ConstraintBuilder::MeetRegisterConstraints() { 1529 void ConstraintBuilder::MeetRegisterConstraints() {
1525 for (auto block : code()->instruction_blocks()) { 1530 for (InstructionBlock* block : code()->instruction_blocks()) {
1526 MeetRegisterConstraints(block); 1531 MeetRegisterConstraints(block);
1527 } 1532 }
1528 } 1533 }
1529 1534
1530 1535
1531 void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) { 1536 void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1532 int start = block->first_instruction_index(); 1537 int start = block->first_instruction_index();
1533 int end = block->last_instruction_index(); 1538 int end = block->last_instruction_index();
1534 DCHECK_NE(-1, start); 1539 DCHECK_NE(-1, start);
1535 for (int i = start; i <= end; ++i) { 1540 for (int i = start; i <= end; ++i) {
1536 MeetConstraintsBefore(i); 1541 MeetConstraintsBefore(i);
1537 if (i != end) MeetConstraintsAfter(i); 1542 if (i != end) MeetConstraintsAfter(i);
1538 } 1543 }
1539 // Meet register constraints for the instruction in the end. 1544 // Meet register constraints for the instruction in the end.
1540 MeetRegisterConstraintsForLastInstructionInBlock(block); 1545 MeetRegisterConstraintsForLastInstructionInBlock(block);
1541 } 1546 }
1542 1547
1543 1548
1544 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( 1549 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1545 const InstructionBlock* block) { 1550 const InstructionBlock* block) {
1546 int end = block->last_instruction_index(); 1551 int end = block->last_instruction_index();
1547 auto last_instruction = code()->InstructionAt(end); 1552 Instruction* last_instruction = code()->InstructionAt(end);
1548 for (size_t i = 0; i < last_instruction->OutputCount(); i++) { 1553 for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1549 auto output_operand = last_instruction->OutputAt(i); 1554 InstructionOperand* output_operand = last_instruction->OutputAt(i);
1550 DCHECK(!output_operand->IsConstant()); 1555 DCHECK(!output_operand->IsConstant());
1551 auto output = UnallocatedOperand::cast(output_operand); 1556 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1552 int output_vreg = output->virtual_register(); 1557 int output_vreg = output->virtual_register();
1553 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); 1558 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1554 bool assigned = false; 1559 bool assigned = false;
1555 if (output->HasFixedPolicy()) { 1560 if (output->HasFixedPolicy()) {
1556 AllocateFixed(output, -1, false); 1561 AllocateFixed(output, -1, false);
1557 // This value is produced on the stack, we never need to spill it. 1562 // This value is produced on the stack, we never need to spill it.
1558 if (output->IsStackSlot()) { 1563 if (output->IsStackSlot()) {
1559 DCHECK(LocationOperand::cast(output)->index() < 1564 DCHECK(LocationOperand::cast(output)->index() <
1560 data()->frame()->GetSpillSlotCount()); 1565 data()->frame()->GetSpillSlotCount());
1561 range->SetSpillOperand(LocationOperand::cast(output)); 1566 range->SetSpillOperand(LocationOperand::cast(output));
1562 range->SetSpillStartIndex(end); 1567 range->SetSpillStartIndex(end);
1563 assigned = true; 1568 assigned = true;
1564 } 1569 }
1565 1570
1566 for (auto succ : block->successors()) { 1571 for (const RpoNumber& succ : block->successors()) {
1567 const InstructionBlock* successor = code()->InstructionBlockAt(succ); 1572 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1568 DCHECK(successor->PredecessorCount() == 1); 1573 DCHECK(successor->PredecessorCount() == 1);
1569 int gap_index = successor->first_instruction_index(); 1574 int gap_index = successor->first_instruction_index();
1570 // Create an unconstrained operand for the same virtual register 1575 // Create an unconstrained operand for the same virtual register
1571 // and insert a gap move from the fixed output to the operand. 1576 // and insert a gap move from the fixed output to the operand.
1572 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); 1577 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1573 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy); 1578 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1574 } 1579 }
1575 } 1580 }
1576 1581
1577 if (!assigned) { 1582 if (!assigned) {
1578 for (auto succ : block->successors()) { 1583 for (const RpoNumber& succ : block->successors()) {
1579 const InstructionBlock* successor = code()->InstructionBlockAt(succ); 1584 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1580 DCHECK(successor->PredecessorCount() == 1); 1585 DCHECK(successor->PredecessorCount() == 1);
1581 int gap_index = successor->first_instruction_index(); 1586 int gap_index = successor->first_instruction_index();
1582 range->RecordSpillLocation(allocation_zone(), gap_index, output); 1587 range->RecordSpillLocation(allocation_zone(), gap_index, output);
1583 range->SetSpillStartIndex(gap_index); 1588 range->SetSpillStartIndex(gap_index);
1584 } 1589 }
1585 } 1590 }
1586 } 1591 }
1587 } 1592 }
1588 1593
1589 1594
1590 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { 1595 void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1591 auto first = code()->InstructionAt(instr_index); 1596 Instruction* first = code()->InstructionAt(instr_index);
1592 // Handle fixed temporaries. 1597 // Handle fixed temporaries.
1593 for (size_t i = 0; i < first->TempCount(); i++) { 1598 for (size_t i = 0; i < first->TempCount(); i++) {
1594 auto temp = UnallocatedOperand::cast(first->TempAt(i)); 1599 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1595 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false); 1600 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1596 } 1601 }
1597 // Handle constant/fixed output operands. 1602 // Handle constant/fixed output operands.
1598 for (size_t i = 0; i < first->OutputCount(); i++) { 1603 for (size_t i = 0; i < first->OutputCount(); i++) {
1599 InstructionOperand* output = first->OutputAt(i); 1604 InstructionOperand* output = first->OutputAt(i);
1600 if (output->IsConstant()) { 1605 if (output->IsConstant()) {
1601 int output_vreg = ConstantOperand::cast(output)->virtual_register(); 1606 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1602 auto range = data()->GetOrCreateLiveRangeFor(output_vreg); 1607 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1603 range->SetSpillStartIndex(instr_index + 1); 1608 range->SetSpillStartIndex(instr_index + 1);
1604 range->SetSpillOperand(output); 1609 range->SetSpillOperand(output);
1605 continue; 1610 continue;
1606 } 1611 }
1607 auto first_output = UnallocatedOperand::cast(output); 1612 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1608 auto range = 1613 TopLevelLiveRange* range =
1609 data()->GetOrCreateLiveRangeFor(first_output->virtual_register()); 1614 data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1610 bool assigned = false; 1615 bool assigned = false;
1611 if (first_output->HasFixedPolicy()) { 1616 if (first_output->HasFixedPolicy()) {
1612 int output_vreg = first_output->virtual_register(); 1617 int output_vreg = first_output->virtual_register();
1613 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); 1618 UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1614 bool is_tagged = code()->IsReference(output_vreg); 1619 bool is_tagged = code()->IsReference(output_vreg);
1615 if (first_output->HasSecondaryStorage()) { 1620 if (first_output->HasSecondaryStorage()) {
1616 range->MarkHasPreassignedSlot(); 1621 range->MarkHasPreassignedSlot();
1617 data()->preassigned_slot_ranges().push_back( 1622 data()->preassigned_slot_ranges().push_back(
1618 std::make_pair(range, first_output->GetSecondaryStorage())); 1623 std::make_pair(range, first_output->GetSecondaryStorage()));
(...skipping 16 matching lines...) Expand all
1635 if (!assigned) { 1640 if (!assigned) {
1636 range->RecordSpillLocation(allocation_zone(), instr_index + 1, 1641 range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1637 first_output); 1642 first_output);
1638 range->SetSpillStartIndex(instr_index + 1); 1643 range->SetSpillStartIndex(instr_index + 1);
1639 } 1644 }
1640 } 1645 }
1641 } 1646 }
1642 1647
1643 1648
1644 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { 1649 void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1645 auto second = code()->InstructionAt(instr_index); 1650 Instruction* second = code()->InstructionAt(instr_index);
1646 // Handle fixed input operands of second instruction. 1651 // Handle fixed input operands of second instruction.
1647 for (size_t i = 0; i < second->InputCount(); i++) { 1652 for (size_t i = 0; i < second->InputCount(); i++) {
1648 auto input = second->InputAt(i); 1653 InstructionOperand* input = second->InputAt(i);
1649 if (input->IsImmediate() || input->IsExplicit()) { 1654 if (input->IsImmediate() || input->IsExplicit()) {
1650 continue; // Ignore immediates and explicitly reserved registers. 1655 continue; // Ignore immediates and explicitly reserved registers.
1651 } 1656 }
1652 auto cur_input = UnallocatedOperand::cast(input); 1657 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1653 if (cur_input->HasFixedPolicy()) { 1658 if (cur_input->HasFixedPolicy()) {
1654 int input_vreg = cur_input->virtual_register(); 1659 int input_vreg = cur_input->virtual_register();
1655 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1660 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1656 bool is_tagged = code()->IsReference(input_vreg); 1661 bool is_tagged = code()->IsReference(input_vreg);
1657 AllocateFixed(cur_input, instr_index, is_tagged); 1662 AllocateFixed(cur_input, instr_index, is_tagged);
1658 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); 1663 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1659 } 1664 }
1660 } 1665 }
1661 // Handle "output same as input" for second instruction. 1666 // Handle "output same as input" for second instruction.
1662 for (size_t i = 0; i < second->OutputCount(); i++) { 1667 for (size_t i = 0; i < second->OutputCount(); i++) {
1663 auto output = second->OutputAt(i); 1668 InstructionOperand* output = second->OutputAt(i);
1664 if (!output->IsUnallocated()) continue; 1669 if (!output->IsUnallocated()) continue;
1665 auto second_output = UnallocatedOperand::cast(output); 1670 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1666 if (!second_output->HasSameAsInputPolicy()) continue; 1671 if (!second_output->HasSameAsInputPolicy()) continue;
1667 DCHECK(i == 0); // Only valid for first output. 1672 DCHECK(i == 0); // Only valid for first output.
1668 UnallocatedOperand* cur_input = 1673 UnallocatedOperand* cur_input =
1669 UnallocatedOperand::cast(second->InputAt(0)); 1674 UnallocatedOperand::cast(second->InputAt(0));
1670 int output_vreg = second_output->virtual_register(); 1675 int output_vreg = second_output->virtual_register();
1671 int input_vreg = cur_input->virtual_register(); 1676 int input_vreg = cur_input->virtual_register();
1672 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); 1677 UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1673 cur_input->set_virtual_register(second_output->virtual_register()); 1678 cur_input->set_virtual_register(second_output->virtual_register());
1674 auto gap_move = data()->AddGapMove(instr_index, Instruction::END, 1679 MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1675 input_copy, *cur_input); 1680 input_copy, *cur_input);
1676 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) { 1681 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1677 if (second->HasReferenceMap()) { 1682 if (second->HasReferenceMap()) {
1678 RegisterAllocationData::DelayedReference delayed_reference = { 1683 RegisterAllocationData::DelayedReference delayed_reference = {
1679 second->reference_map(), &gap_move->source()}; 1684 second->reference_map(), &gap_move->source()};
1680 data()->delayed_references().push_back(delayed_reference); 1685 data()->delayed_references().push_back(delayed_reference);
1681 } 1686 }
1682 } else if (!code()->IsReference(input_vreg) && 1687 } else if (!code()->IsReference(input_vreg) &&
1683 code()->IsReference(output_vreg)) { 1688 code()->IsReference(output_vreg)) {
1684 // The input is assumed to immediately have a tagged representation, 1689 // The input is assumed to immediately have a tagged representation,
1685 // before the pointer map can be used. I.e. the pointer map at the 1690 // before the pointer map can be used. I.e. the pointer map at the
1686 // instruction will include the output operand (whose value at the 1691 // instruction will include the output operand (whose value at the
1687 // beginning of the instruction is equal to the input operand). If 1692 // beginning of the instruction is equal to the input operand). If
1688 // this is not desired, then the pointer map at this instruction needs 1693 // this is not desired, then the pointer map at this instruction needs
1689 // to be adjusted manually. 1694 // to be adjusted manually.
1690 } 1695 }
1691 } 1696 }
1692 } 1697 }
1693 1698
1694 1699
1695 void ConstraintBuilder::ResolvePhis() { 1700 void ConstraintBuilder::ResolvePhis() {
1696 // Process the blocks in reverse order. 1701 // Process the blocks in reverse order.
1697 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) { 1702 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1698 ResolvePhis(block); 1703 ResolvePhis(block);
1699 } 1704 }
1700 } 1705 }
1701 1706
1702 1707
1703 void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) { 1708 void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1704 for (auto phi : block->phis()) { 1709 for (PhiInstruction* phi : block->phis()) {
1705 int phi_vreg = phi->virtual_register(); 1710 int phi_vreg = phi->virtual_register();
1706 auto map_value = data()->InitializePhiMap(block, phi); 1711 RegisterAllocationData::PhiMapValue* map_value =
1707 auto& output = phi->output(); 1712 data()->InitializePhiMap(block, phi);
1713 InstructionOperand& output = phi->output();
1708 // Map the destination operands, so the commitment phase can find them. 1714 // Map the destination operands, so the commitment phase can find them.
1709 for (size_t i = 0; i < phi->operands().size(); ++i) { 1715 for (size_t i = 0; i < phi->operands().size(); ++i) {
1710 InstructionBlock* cur_block = 1716 InstructionBlock* cur_block =
1711 code()->InstructionBlockAt(block->predecessors()[i]); 1717 code()->InstructionBlockAt(block->predecessors()[i]);
1712 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]); 1718 UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1713 auto move = data()->AddGapMove(cur_block->last_instruction_index(), 1719 MoveOperands* move = data()->AddGapMove(
1714 Instruction::END, input, output); 1720 cur_block->last_instruction_index(), Instruction::END, input, output);
1715 map_value->AddOperand(&move->destination()); 1721 map_value->AddOperand(&move->destination());
1716 DCHECK(!code() 1722 DCHECK(!code()
1717 ->InstructionAt(cur_block->last_instruction_index()) 1723 ->InstructionAt(cur_block->last_instruction_index())
1718 ->HasReferenceMap()); 1724 ->HasReferenceMap());
1719 } 1725 }
1720 auto live_range = data()->GetOrCreateLiveRangeFor(phi_vreg); 1726 TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1721 int gap_index = block->first_instruction_index(); 1727 int gap_index = block->first_instruction_index();
1722 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output); 1728 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1723 live_range->SetSpillStartIndex(gap_index); 1729 live_range->SetSpillStartIndex(gap_index);
1724 // We use the phi-ness of some nodes in some later heuristics. 1730 // We use the phi-ness of some nodes in some later heuristics.
1725 live_range->set_is_phi(true); 1731 live_range->set_is_phi(true);
1726 live_range->set_is_non_loop_phi(!block->IsLoopHeader()); 1732 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1727 } 1733 }
1728 } 1734 }
1729 1735
1730 1736
(...skipping 16 matching lines...) Expand all
1747 1753
1748 // Process all successor blocks. 1754 // Process all successor blocks.
1749 for (const RpoNumber& succ : block->successors()) { 1755 for (const RpoNumber& succ : block->successors()) {
1750 // Add values live on entry to the successor. 1756 // Add values live on entry to the successor.
1751 if (succ <= block->rpo_number()) continue; 1757 if (succ <= block->rpo_number()) continue;
1752 BitVector* live_in = data->live_in_sets()[succ.ToSize()]; 1758 BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1753 if (live_in != nullptr) live_out->Union(*live_in); 1759 if (live_in != nullptr) live_out->Union(*live_in);
1754 1760
1755 // All phi input operands corresponding to this successor edge are live 1761 // All phi input operands corresponding to this successor edge are live
1756 // out from this block. 1762 // out from this block.
1757 auto successor = code->InstructionBlockAt(succ); 1763 const InstructionBlock* successor = code->InstructionBlockAt(succ);
1758 size_t index = successor->PredecessorIndexOf(block->rpo_number()); 1764 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1759 DCHECK(index < successor->PredecessorCount()); 1765 DCHECK(index < successor->PredecessorCount());
1760 for (PhiInstruction* phi : successor->phis()) { 1766 for (PhiInstruction* phi : successor->phis()) {
1761 live_out->Add(phi->operands()[index]); 1767 live_out->Add(phi->operands()[index]);
1762 } 1768 }
1763 } 1769 }
1764 data->live_out_sets()[block_index] = live_out; 1770 data->live_out_sets()[block_index] = live_out;
1765 } 1771 }
1766 return live_out; 1772 return live_out;
1767 } 1773 }
1768 1774
1769 1775
1770 void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block, 1776 void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1771 BitVector* live_out) { 1777 BitVector* live_out) {
1772 // Add an interval that includes the entire block to the live range for 1778 // Add an interval that includes the entire block to the live range for
1773 // each live_out value. 1779 // each live_out value.
1774 auto start = LifetimePosition::GapFromInstructionIndex( 1780 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1775 block->first_instruction_index()); 1781 block->first_instruction_index());
1776 auto end = LifetimePosition::InstructionFromInstructionIndex( 1782 LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1777 block->last_instruction_index()).NextStart(); 1783 block->last_instruction_index())
1784 .NextStart();
1778 BitVector::Iterator iterator(live_out); 1785 BitVector::Iterator iterator(live_out);
1779 while (!iterator.Done()) { 1786 while (!iterator.Done()) {
1780 int operand_index = iterator.Current(); 1787 int operand_index = iterator.Current();
1781 auto range = data()->GetOrCreateLiveRangeFor(operand_index); 1788 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1782 range->AddUseInterval(start, end, allocation_zone()); 1789 range->AddUseInterval(start, end, allocation_zone());
1783 iterator.Advance(); 1790 iterator.Advance();
1784 } 1791 }
1785 } 1792 }
1786 1793
1787 1794
1788 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { 1795 int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
1789 return -index - 1 - config()->num_general_registers(); 1796 return -index - 1 - config()->num_general_registers();
1790 } 1797 }
1791 1798
1792 1799
1793 TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { 1800 TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1794 DCHECK(index < config()->num_general_registers()); 1801 DCHECK(index < config()->num_general_registers());
1795 auto result = data()->fixed_live_ranges()[index]; 1802 TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1796 if (result == nullptr) { 1803 if (result == nullptr) {
1797 result = data()->NewLiveRange(FixedLiveRangeID(index), 1804 result = data()->NewLiveRange(FixedLiveRangeID(index),
1798 InstructionSequence::DefaultRepresentation()); 1805 InstructionSequence::DefaultRepresentation());
1799 DCHECK(result->IsFixed()); 1806 DCHECK(result->IsFixed());
1800 result->set_assigned_register(index); 1807 result->set_assigned_register(index);
1801 data()->MarkAllocated(GENERAL_REGISTERS, index); 1808 data()->MarkAllocated(GENERAL_REGISTERS, index);
1802 data()->fixed_live_ranges()[index] = result; 1809 data()->fixed_live_ranges()[index] = result;
1803 } 1810 }
1804 return result; 1811 return result;
1805 } 1812 }
1806 1813
1807 1814
1808 TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { 1815 TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
1809 DCHECK(index < config()->num_double_registers()); 1816 DCHECK(index < config()->num_double_registers());
1810 auto result = data()->fixed_double_live_ranges()[index]; 1817 TopLevelLiveRange* result = data()->fixed_double_live_ranges()[index];
1811 if (result == nullptr) { 1818 if (result == nullptr) {
1812 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index), 1819 result = data()->NewLiveRange(FixedDoubleLiveRangeID(index),
1813 MachineRepresentation::kFloat64); 1820 MachineRepresentation::kFloat64);
1814 DCHECK(result->IsFixed()); 1821 DCHECK(result->IsFixed());
1815 result->set_assigned_register(index); 1822 result->set_assigned_register(index);
1816 data()->MarkAllocated(DOUBLE_REGISTERS, index); 1823 data()->MarkAllocated(DOUBLE_REGISTERS, index);
1817 data()->fixed_double_live_ranges()[index] = result; 1824 data()->fixed_double_live_ranges()[index] = result;
1818 } 1825 }
1819 return result; 1826 return result;
1820 } 1827 }
(...skipping 22 matching lines...) Expand all
1843 InstructionOperand* operand, 1850 InstructionOperand* operand,
1844 void* hint, 1851 void* hint,
1845 UsePositionHintType hint_type) { 1852 UsePositionHintType hint_type) {
1846 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type); 1853 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1847 } 1854 }
1848 1855
1849 1856
1850 UsePosition* LiveRangeBuilder::Define(LifetimePosition position, 1857 UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1851 InstructionOperand* operand, void* hint, 1858 InstructionOperand* operand, void* hint,
1852 UsePositionHintType hint_type) { 1859 UsePositionHintType hint_type) {
1853 auto range = LiveRangeFor(operand); 1860 TopLevelLiveRange* range = LiveRangeFor(operand);
1854 if (range == nullptr) return nullptr; 1861 if (range == nullptr) return nullptr;
1855 1862
1856 if (range->IsEmpty() || range->Start() > position) { 1863 if (range->IsEmpty() || range->Start() > position) {
1857 // Can happen if there is a definition without use. 1864 // Can happen if there is a definition without use.
1858 range->AddUseInterval(position, position.NextStart(), allocation_zone()); 1865 range->AddUseInterval(position, position.NextStart(), allocation_zone());
1859 range->AddUsePosition(NewUsePosition(position.NextStart())); 1866 range->AddUsePosition(NewUsePosition(position.NextStart()));
1860 } else { 1867 } else {
1861 range->ShortenTo(position); 1868 range->ShortenTo(position);
1862 } 1869 }
1863 if (!operand->IsUnallocated()) return nullptr; 1870 if (!operand->IsUnallocated()) return nullptr;
1864 auto unalloc_operand = UnallocatedOperand::cast(operand); 1871 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1865 auto use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type); 1872 UsePosition* use_pos =
1873 NewUsePosition(position, unalloc_operand, hint, hint_type);
1866 range->AddUsePosition(use_pos); 1874 range->AddUsePosition(use_pos);
1867 return use_pos; 1875 return use_pos;
1868 } 1876 }
1869 1877
1870 1878
1871 UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start, 1879 UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
1872 LifetimePosition position, 1880 LifetimePosition position,
1873 InstructionOperand* operand, void* hint, 1881 InstructionOperand* operand, void* hint,
1874 UsePositionHintType hint_type) { 1882 UsePositionHintType hint_type) {
1875 auto range = LiveRangeFor(operand); 1883 TopLevelLiveRange* range = LiveRangeFor(operand);
1876 if (range == nullptr) return nullptr; 1884 if (range == nullptr) return nullptr;
1877 UsePosition* use_pos = nullptr; 1885 UsePosition* use_pos = nullptr;
1878 if (operand->IsUnallocated()) { 1886 if (operand->IsUnallocated()) {
1879 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand); 1887 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1880 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type); 1888 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
1881 range->AddUsePosition(use_pos); 1889 range->AddUsePosition(use_pos);
1882 } 1890 }
1883 range->AddUseInterval(block_start, position, allocation_zone()); 1891 range->AddUseInterval(block_start, position, allocation_zone());
1884 return use_pos; 1892 return use_pos;
1885 } 1893 }
1886 1894
1887 1895
1888 void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block, 1896 void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
1889 BitVector* live) { 1897 BitVector* live) {
1890 int block_start = block->first_instruction_index(); 1898 int block_start = block->first_instruction_index();
1891 auto block_start_position = 1899 LifetimePosition block_start_position =
1892 LifetimePosition::GapFromInstructionIndex(block_start); 1900 LifetimePosition::GapFromInstructionIndex(block_start);
1893 1901
1894 for (int index = block->last_instruction_index(); index >= block_start; 1902 for (int index = block->last_instruction_index(); index >= block_start;
1895 index--) { 1903 index--) {
1896 auto curr_position = 1904 LifetimePosition curr_position =
1897 LifetimePosition::InstructionFromInstructionIndex(index); 1905 LifetimePosition::InstructionFromInstructionIndex(index);
1898 auto instr = code()->InstructionAt(index); 1906 Instruction* instr = code()->InstructionAt(index);
1899 DCHECK(instr != nullptr); 1907 DCHECK(instr != nullptr);
1900 DCHECK(curr_position.IsInstructionPosition()); 1908 DCHECK(curr_position.IsInstructionPosition());
1901 // Process output, inputs, and temps of this instruction. 1909 // Process output, inputs, and temps of this instruction.
1902 for (size_t i = 0; i < instr->OutputCount(); i++) { 1910 for (size_t i = 0; i < instr->OutputCount(); i++) {
1903 auto output = instr->OutputAt(i); 1911 InstructionOperand* output = instr->OutputAt(i);
1904 if (output->IsUnallocated()) { 1912 if (output->IsUnallocated()) {
1905 // Unsupported. 1913 // Unsupported.
1906 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy()); 1914 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
1907 int out_vreg = UnallocatedOperand::cast(output)->virtual_register(); 1915 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
1908 live->Remove(out_vreg); 1916 live->Remove(out_vreg);
1909 } else if (output->IsConstant()) { 1917 } else if (output->IsConstant()) {
1910 int out_vreg = ConstantOperand::cast(output)->virtual_register(); 1918 int out_vreg = ConstantOperand::cast(output)->virtual_register();
1911 live->Remove(out_vreg); 1919 live->Remove(out_vreg);
1912 } 1920 }
1913 if (block->IsHandler() && index == block_start) { 1921 if (block->IsHandler() && index == block_start) {
1914 // The register defined here is blocked from gap start - it is the 1922 // The register defined here is blocked from gap start - it is the
1915 // exception value. 1923 // exception value.
1916 // TODO(mtrofin): should we explore an explicit opcode for 1924 // TODO(mtrofin): should we explore an explicit opcode for
1917 // the first instruction in the handler? 1925 // the first instruction in the handler?
1918 Define(LifetimePosition::GapFromInstructionIndex(index), output); 1926 Define(LifetimePosition::GapFromInstructionIndex(index), output);
1919 } else { 1927 } else {
1920 Define(curr_position, output); 1928 Define(curr_position, output);
1921 } 1929 }
1922 } 1930 }
1923 1931
1924 if (instr->ClobbersRegisters()) { 1932 if (instr->ClobbersRegisters()) {
1925 for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) { 1933 for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
1926 int code = config()->GetAllocatableGeneralCode(i); 1934 int code = config()->GetAllocatableGeneralCode(i);
1927 if (!IsOutputRegisterOf(instr, Register::from_code(code))) { 1935 if (!IsOutputRegisterOf(instr, Register::from_code(code))) {
1928 auto range = FixedLiveRangeFor(code); 1936 TopLevelLiveRange* range = FixedLiveRangeFor(code);
1929 range->AddUseInterval(curr_position, curr_position.End(), 1937 range->AddUseInterval(curr_position, curr_position.End(),
1930 allocation_zone()); 1938 allocation_zone());
1931 } 1939 }
1932 } 1940 }
1933 } 1941 }
1934 1942
1935 if (instr->ClobbersDoubleRegisters()) { 1943 if (instr->ClobbersDoubleRegisters()) {
1936 for (int i = 0; i < config()->num_allocatable_aliased_double_registers(); 1944 for (int i = 0; i < config()->num_allocatable_aliased_double_registers();
1937 ++i) { 1945 ++i) {
1938 int code = config()->GetAllocatableDoubleCode(i); 1946 int code = config()->GetAllocatableDoubleCode(i);
1939 if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) { 1947 if (!IsOutputDoubleRegisterOf(instr, DoubleRegister::from_code(code))) {
1940 auto range = FixedDoubleLiveRangeFor(code); 1948 TopLevelLiveRange* range = FixedDoubleLiveRangeFor(code);
1941 range->AddUseInterval(curr_position, curr_position.End(), 1949 range->AddUseInterval(curr_position, curr_position.End(),
1942 allocation_zone()); 1950 allocation_zone());
1943 } 1951 }
1944 } 1952 }
1945 } 1953 }
1946 1954
1947 for (size_t i = 0; i < instr->InputCount(); i++) { 1955 for (size_t i = 0; i < instr->InputCount(); i++) {
1948 auto input = instr->InputAt(i); 1956 InstructionOperand* input = instr->InputAt(i);
1949 if (input->IsImmediate() || input->IsExplicit()) { 1957 if (input->IsImmediate() || input->IsExplicit()) {
1950 continue; // Ignore immediates and explicitly reserved registers. 1958 continue; // Ignore immediates and explicitly reserved registers.
1951 } 1959 }
1952 LifetimePosition use_pos; 1960 LifetimePosition use_pos;
1953 if (input->IsUnallocated() && 1961 if (input->IsUnallocated() &&
1954 UnallocatedOperand::cast(input)->IsUsedAtStart()) { 1962 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
1955 use_pos = curr_position; 1963 use_pos = curr_position;
1956 } else { 1964 } else {
1957 use_pos = curr_position.End(); 1965 use_pos = curr_position.End();
1958 } 1966 }
1959 1967
1960 if (input->IsUnallocated()) { 1968 if (input->IsUnallocated()) {
1961 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input); 1969 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
1962 int vreg = unalloc->virtual_register(); 1970 int vreg = unalloc->virtual_register();
1963 live->Add(vreg); 1971 live->Add(vreg);
1964 if (unalloc->HasSlotPolicy()) { 1972 if (unalloc->HasSlotPolicy()) {
1965 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true); 1973 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
1966 } 1974 }
1967 } 1975 }
1968 Use(block_start_position, use_pos, input); 1976 Use(block_start_position, use_pos, input);
1969 } 1977 }
1970 1978
1971 for (size_t i = 0; i < instr->TempCount(); i++) { 1979 for (size_t i = 0; i < instr->TempCount(); i++) {
1972 auto temp = instr->TempAt(i); 1980 InstructionOperand* temp = instr->TempAt(i);
1973 // Unsupported. 1981 // Unsupported.
1974 DCHECK_IMPLIES(temp->IsUnallocated(), 1982 DCHECK_IMPLIES(temp->IsUnallocated(),
1975 !UnallocatedOperand::cast(temp)->HasSlotPolicy()); 1983 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
1976 if (instr->ClobbersTemps()) { 1984 if (instr->ClobbersTemps()) {
1977 if (temp->IsRegister()) continue; 1985 if (temp->IsRegister()) continue;
1978 if (temp->IsUnallocated()) { 1986 if (temp->IsUnallocated()) {
1979 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp); 1987 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
1980 if (temp_unalloc->HasFixedPolicy()) { 1988 if (temp_unalloc->HasFixedPolicy()) {
1981 continue; 1989 continue;
1982 } 1990 }
1983 } 1991 }
1984 } 1992 }
1985 Use(block_start_position, curr_position.End(), temp); 1993 Use(block_start_position, curr_position.End(), temp);
1986 Define(curr_position, temp); 1994 Define(curr_position, temp);
1987 } 1995 }
1988 1996
1989 // Process the moves of the instruction's gaps, making their sources live. 1997 // Process the moves of the instruction's gaps, making their sources live.
1990 const Instruction::GapPosition kPositions[] = {Instruction::END, 1998 const Instruction::GapPosition kPositions[] = {Instruction::END,
1991 Instruction::START}; 1999 Instruction::START};
1992 curr_position = curr_position.PrevStart(); 2000 curr_position = curr_position.PrevStart();
1993 DCHECK(curr_position.IsGapPosition()); 2001 DCHECK(curr_position.IsGapPosition());
1994 for (auto position : kPositions) { 2002 for (const Instruction::GapPosition& position : kPositions) {
1995 auto move = instr->GetParallelMove(position); 2003 ParallelMove* move = instr->GetParallelMove(position);
1996 if (move == nullptr) continue; 2004 if (move == nullptr) continue;
1997 if (position == Instruction::END) { 2005 if (position == Instruction::END) {
1998 curr_position = curr_position.End(); 2006 curr_position = curr_position.End();
1999 } else { 2007 } else {
2000 curr_position = curr_position.Start(); 2008 curr_position = curr_position.Start();
2001 } 2009 }
2002 for (auto cur : *move) { 2010 for (MoveOperands* cur : *move) {
2003 auto& from = cur->source(); 2011 InstructionOperand& from = cur->source();
2004 auto& to = cur->destination(); 2012 InstructionOperand& to = cur->destination();
2005 void* hint = &to; 2013 void* hint = &to;
2006 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to); 2014 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2007 UsePosition* to_use = nullptr; 2015 UsePosition* to_use = nullptr;
2008 int phi_vreg = -1; 2016 int phi_vreg = -1;
2009 if (to.IsUnallocated()) { 2017 if (to.IsUnallocated()) {
2010 int to_vreg = UnallocatedOperand::cast(to).virtual_register(); 2018 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2011 auto to_range = data()->GetOrCreateLiveRangeFor(to_vreg); 2019 TopLevelLiveRange* to_range =
2020 data()->GetOrCreateLiveRangeFor(to_vreg);
2012 if (to_range->is_phi()) { 2021 if (to_range->is_phi()) {
2013 phi_vreg = to_vreg; 2022 phi_vreg = to_vreg;
2014 if (to_range->is_non_loop_phi()) { 2023 if (to_range->is_non_loop_phi()) {
2015 hint = to_range->current_hint_position(); 2024 hint = to_range->current_hint_position();
2016 hint_type = hint == nullptr ? UsePositionHintType::kNone 2025 hint_type = hint == nullptr ? UsePositionHintType::kNone
2017 : UsePositionHintType::kUsePos; 2026 : UsePositionHintType::kUsePos;
2018 } else { 2027 } else {
2019 hint_type = UsePositionHintType::kPhi; 2028 hint_type = UsePositionHintType::kPhi;
2020 hint = data()->GetPhiMapValueFor(to_vreg); 2029 hint = data()->GetPhiMapValueFor(to_vreg);
2021 } 2030 }
2022 } else { 2031 } else {
2023 if (live->Contains(to_vreg)) { 2032 if (live->Contains(to_vreg)) {
2024 to_use = Define(curr_position, &to, &from, 2033 to_use = Define(curr_position, &to, &from,
2025 UsePosition::HintTypeForOperand(from)); 2034 UsePosition::HintTypeForOperand(from));
2026 live->Remove(to_vreg); 2035 live->Remove(to_vreg);
2027 } else { 2036 } else {
2028 cur->Eliminate(); 2037 cur->Eliminate();
2029 continue; 2038 continue;
2030 } 2039 }
2031 } 2040 }
2032 } else { 2041 } else {
2033 Define(curr_position, &to); 2042 Define(curr_position, &to);
2034 } 2043 }
2035 auto from_use = 2044 UsePosition* from_use =
2036 Use(block_start_position, curr_position, &from, hint, hint_type); 2045 Use(block_start_position, curr_position, &from, hint, hint_type);
2037 // Mark range live. 2046 // Mark range live.
2038 if (from.IsUnallocated()) { 2047 if (from.IsUnallocated()) {
2039 live->Add(UnallocatedOperand::cast(from).virtual_register()); 2048 live->Add(UnallocatedOperand::cast(from).virtual_register());
2040 } 2049 }
2041 // Resolve use position hints just created. 2050 // Resolve use position hints just created.
2042 if (to_use != nullptr && from_use != nullptr) { 2051 if (to_use != nullptr && from_use != nullptr) {
2043 to_use->ResolveHint(from_use); 2052 to_use->ResolveHint(from_use);
2044 from_use->ResolveHint(to_use); 2053 from_use->ResolveHint(to_use);
2045 } 2054 }
2046 DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved()); 2055 DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2047 DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved()); 2056 DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2048 // Potentially resolve phi hint. 2057 // Potentially resolve phi hint.
2049 if (phi_vreg != -1) ResolvePhiHint(&from, from_use); 2058 if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2050 } 2059 }
2051 } 2060 }
2052 } 2061 }
2053 } 2062 }
2054 2063
2055 2064
2056 void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block, 2065 void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2057 BitVector* live) { 2066 BitVector* live) {
2058 for (auto phi : block->phis()) { 2067 for (PhiInstruction* phi : block->phis()) {
2059 // The live range interval already ends at the first instruction of the 2068 // The live range interval already ends at the first instruction of the
2060 // block. 2069 // block.
2061 int phi_vreg = phi->virtual_register(); 2070 int phi_vreg = phi->virtual_register();
2062 live->Remove(phi_vreg); 2071 live->Remove(phi_vreg);
2063 InstructionOperand* hint = nullptr; 2072 InstructionOperand* hint = nullptr;
2064 auto instr = GetLastInstruction( 2073 Instruction* instr = GetLastInstruction(
2065 code(), code()->InstructionBlockAt(block->predecessors()[0])); 2074 code(), code()->InstructionBlockAt(block->predecessors()[0]));
2066 for (auto move : *instr->GetParallelMove(Instruction::END)) { 2075 for (MoveOperands* move : *instr->GetParallelMove(Instruction::END)) {
2067 auto& to = move->destination(); 2076 InstructionOperand& to = move->destination();
2068 if (to.IsUnallocated() && 2077 if (to.IsUnallocated() &&
2069 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) { 2078 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2070 hint = &move->source(); 2079 hint = &move->source();
2071 break; 2080 break;
2072 } 2081 }
2073 } 2082 }
2074 DCHECK(hint != nullptr); 2083 DCHECK(hint != nullptr);
2075 auto block_start = LifetimePosition::GapFromInstructionIndex( 2084 LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2076 block->first_instruction_index()); 2085 block->first_instruction_index());
2077 auto use_pos = Define(block_start, &phi->output(), hint, 2086 UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2078 UsePosition::HintTypeForOperand(*hint)); 2087 UsePosition::HintTypeForOperand(*hint));
2079 MapPhiHint(hint, use_pos); 2088 MapPhiHint(hint, use_pos);
2080 } 2089 }
2081 } 2090 }
2082 2091
2083 2092
2084 void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block, 2093 void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2085 BitVector* live) { 2094 BitVector* live) {
2086 DCHECK(block->IsLoopHeader()); 2095 DCHECK(block->IsLoopHeader());
2087 // Add a live range stretching from the first loop instruction to the last 2096 // Add a live range stretching from the first loop instruction to the last
2088 // for each value live on entry to the header. 2097 // for each value live on entry to the header.
2089 BitVector::Iterator iterator(live); 2098 BitVector::Iterator iterator(live);
2090 auto start = LifetimePosition::GapFromInstructionIndex( 2099 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2091 block->first_instruction_index()); 2100 block->first_instruction_index());
2092 auto end = LifetimePosition::GapFromInstructionIndex( 2101 LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2093 code()->LastLoopInstructionIndex(block)).NextFullStart(); 2102 code()->LastLoopInstructionIndex(block))
2103 .NextFullStart();
2094 while (!iterator.Done()) { 2104 while (!iterator.Done()) {
2095 int operand_index = iterator.Current(); 2105 int operand_index = iterator.Current();
2096 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index); 2106 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2097 range->EnsureInterval(start, end, allocation_zone()); 2107 range->EnsureInterval(start, end, allocation_zone());
2098 iterator.Advance(); 2108 iterator.Advance();
2099 } 2109 }
2100 // Insert all values into the live in sets of all blocks in the loop. 2110 // Insert all values into the live in sets of all blocks in the loop.
2101 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt(); 2111 for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2102 ++i) { 2112 ++i) {
2103 live_in_sets()[i]->Union(*live); 2113 live_in_sets()[i]->Union(*live);
2104 } 2114 }
2105 } 2115 }
2106 2116
2107 2117
2108 void LiveRangeBuilder::BuildLiveRanges() { 2118 void LiveRangeBuilder::BuildLiveRanges() {
2109 // Process the blocks in reverse order. 2119 // Process the blocks in reverse order.
2110 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 2120 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2111 --block_id) { 2121 --block_id) {
2112 auto block = code()->InstructionBlockAt(RpoNumber::FromInt(block_id)); 2122 InstructionBlock* block =
2113 auto live = ComputeLiveOut(block, data()); 2123 code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2124 BitVector* live = ComputeLiveOut(block, data());
2114 // Initially consider all live_out values live for the entire block. We 2125 // Initially consider all live_out values live for the entire block. We
2115 // will shorten these intervals if necessary. 2126 // will shorten these intervals if necessary.
2116 AddInitialIntervals(block, live); 2127 AddInitialIntervals(block, live);
2117 // Process the instructions in reverse order, generating and killing 2128 // Process the instructions in reverse order, generating and killing
2118 // live values. 2129 // live values.
2119 ProcessInstructions(block, live); 2130 ProcessInstructions(block, live);
2120 // All phi output operands are killed by this block. 2131 // All phi output operands are killed by this block.
2121 ProcessPhis(block, live); 2132 ProcessPhis(block, live);
2122 // Now live is live_in for this block except not including values live 2133 // Now live is live_in for this block except not including values live
2123 // out on backward successor edges. 2134 // out on backward successor edges.
2124 if (block->IsLoopHeader()) ProcessLoopHeader(block, live); 2135 if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2125 live_in_sets()[block_id] = live; 2136 live_in_sets()[block_id] = live;
2126 } 2137 }
2127 // Postprocess the ranges. 2138 // Postprocess the ranges.
2128 for (auto range : data()->live_ranges()) { 2139 for (TopLevelLiveRange* range : data()->live_ranges()) {
2129 if (range == nullptr) continue; 2140 if (range == nullptr) continue;
2130 // Give slots to all ranges with a non fixed slot use. 2141 // Give slots to all ranges with a non fixed slot use.
2131 if (range->has_slot_use() && range->HasNoSpillType()) { 2142 if (range->has_slot_use() && range->HasNoSpillType()) {
2132 data()->AssignSpillRangeToLiveRange(range); 2143 data()->AssignSpillRangeToLiveRange(range);
2133 } 2144 }
2134 // TODO(bmeurer): This is a horrible hack to make sure that for constant 2145 // TODO(bmeurer): This is a horrible hack to make sure that for constant
2135 // live ranges, every use requires the constant to be in a register. 2146 // live ranges, every use requires the constant to be in a register.
2136 // Without this hack, all uses with "any" policy would get the constant 2147 // Without this hack, all uses with "any" policy would get the constant
2137 // operand assigned. 2148 // operand assigned.
2138 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) { 2149 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2139 for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next()) { 2150 for (UsePosition* pos = range->first_pos(); pos != nullptr;
2151 pos = pos->next()) {
2140 if (pos->type() == UsePositionType::kRequiresSlot) continue; 2152 if (pos->type() == UsePositionType::kRequiresSlot) continue;
2141 UsePositionType new_type = UsePositionType::kAny; 2153 UsePositionType new_type = UsePositionType::kAny;
2142 // Can't mark phis as needing a register. 2154 // Can't mark phis as needing a register.
2143 if (!pos->pos().IsGapPosition()) { 2155 if (!pos->pos().IsGapPosition()) {
2144 new_type = UsePositionType::kRequiresRegister; 2156 new_type = UsePositionType::kRequiresRegister;
2145 } 2157 }
2146 pos->set_type(new_type, true); 2158 pos->set_type(new_type, true);
2147 } 2159 }
2148 } 2160 }
2149 } 2161 }
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2272 2284
2273 2285
2274 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, 2286 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2275 LifetimePosition start, 2287 LifetimePosition start,
2276 LifetimePosition end) { 2288 LifetimePosition end) {
2277 DCHECK(!range->TopLevel()->IsFixed()); 2289 DCHECK(!range->TopLevel()->IsFixed());
2278 TRACE("Splitting live range %d:%d in position between [%d, %d]\n", 2290 TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2279 range->TopLevel()->vreg(), range->relative_id(), start.value(), 2291 range->TopLevel()->vreg(), range->relative_id(), start.value(),
2280 end.value()); 2292 end.value());
2281 2293
2282 auto split_pos = FindOptimalSplitPos(start, end); 2294 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2283 DCHECK(split_pos >= start); 2295 DCHECK(split_pos >= start);
2284 return SplitRangeAt(range, split_pos); 2296 return SplitRangeAt(range, split_pos);
2285 } 2297 }
2286 2298
2287 2299
2288 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start, 2300 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2289 LifetimePosition end) { 2301 LifetimePosition end) {
2290 int start_instr = start.ToInstructionIndex(); 2302 int start_instr = start.ToInstructionIndex();
2291 int end_instr = end.ToInstructionIndex(); 2303 int end_instr = end.ToInstructionIndex();
2292 DCHECK(start_instr <= end_instr); 2304 DCHECK(start_instr <= end_instr);
2293 2305
2294 // We have no choice 2306 // We have no choice
2295 if (start_instr == end_instr) return end; 2307 if (start_instr == end_instr) return end;
2296 2308
2297 auto start_block = GetInstructionBlock(code(), start); 2309 const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2298 auto end_block = GetInstructionBlock(code(), end); 2310 const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2299 2311
2300 if (end_block == start_block) { 2312 if (end_block == start_block) {
2301 // The interval is split in the same basic block. Split at the latest 2313 // The interval is split in the same basic block. Split at the latest
2302 // possible position. 2314 // possible position.
2303 return end; 2315 return end;
2304 } 2316 }
2305 2317
2306 auto block = end_block; 2318 const InstructionBlock* block = end_block;
2307 // Find header of outermost loop. 2319 // Find header of outermost loop.
2308 // TODO(titzer): fix redundancy below. 2320 // TODO(titzer): fix redundancy below.
2309 while (GetContainingLoop(code(), block) != nullptr && 2321 while (GetContainingLoop(code(), block) != nullptr &&
2310 GetContainingLoop(code(), block)->rpo_number().ToInt() > 2322 GetContainingLoop(code(), block)->rpo_number().ToInt() >
2311 start_block->rpo_number().ToInt()) { 2323 start_block->rpo_number().ToInt()) {
2312 block = GetContainingLoop(code(), block); 2324 block = GetContainingLoop(code(), block);
2313 } 2325 }
2314 2326
2315 // We did not find any suitable outer loop. Split at the latest possible 2327 // We did not find any suitable outer loop. Split at the latest possible
2316 // position unless end_block is a loop header itself. 2328 // position unless end_block is a loop header itself.
2317 if (block == end_block && !end_block->IsLoopHeader()) return end; 2329 if (block == end_block && !end_block->IsLoopHeader()) return end;
2318 2330
2319 return LifetimePosition::GapFromInstructionIndex( 2331 return LifetimePosition::GapFromInstructionIndex(
2320 block->first_instruction_index()); 2332 block->first_instruction_index());
2321 } 2333 }
2322 2334
2323 2335
2324 LifetimePosition RegisterAllocator::FindOptimalSpillingPos( 2336 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2325 LiveRange* range, LifetimePosition pos) { 2337 LiveRange* range, LifetimePosition pos) {
2326 auto block = GetInstructionBlock(code(), pos.Start()); 2338 const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2327 auto loop_header = 2339 const InstructionBlock* loop_header =
2328 block->IsLoopHeader() ? block : GetContainingLoop(code(), block); 2340 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2329 2341
2330 if (loop_header == nullptr) return pos; 2342 if (loop_header == nullptr) return pos;
2331 2343
2332 auto prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos); 2344 const UsePosition* prev_use =
2345 range->PreviousUsePositionRegisterIsBeneficial(pos);
2333 2346
2334 while (loop_header != nullptr) { 2347 while (loop_header != nullptr) {
2335 // We are going to spill live range inside the loop. 2348 // We are going to spill live range inside the loop.
2336 // If possible try to move spilling position backwards to loop header. 2349 // If possible try to move spilling position backwards to loop header.
2337 // This will reduce number of memory moves on the back edge. 2350 // This will reduce number of memory moves on the back edge.
2338 auto loop_start = LifetimePosition::GapFromInstructionIndex( 2351 LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2339 loop_header->first_instruction_index()); 2352 loop_header->first_instruction_index());
2340 2353
2341 if (range->Covers(loop_start)) { 2354 if (range->Covers(loop_start)) {
2342 if (prev_use == nullptr || prev_use->pos() < loop_start) { 2355 if (prev_use == nullptr || prev_use->pos() < loop_start) {
2343 // No register beneficial use inside the loop before the pos. 2356 // No register beneficial use inside the loop before the pos.
2344 pos = loop_start; 2357 pos = loop_start;
2345 } 2358 }
2346 } 2359 }
2347 2360
2348 // Try hoisting out to an outer loop. 2361 // Try hoisting out to an outer loop.
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
2412 to_add = to_add->next()) { 2425 to_add = to_add->next()) {
2413 if (!to_add->spilled()) { 2426 if (!to_add->spilled()) {
2414 AddToUnhandledUnsorted(to_add); 2427 AddToUnhandledUnsorted(to_add);
2415 } 2428 }
2416 } 2429 }
2417 } 2430 }
2418 SortUnhandled(); 2431 SortUnhandled();
2419 DCHECK(UnhandledIsSorted()); 2432 DCHECK(UnhandledIsSorted());
2420 2433
2421 auto& fixed_ranges = GetFixedRegisters(); 2434 auto& fixed_ranges = GetFixedRegisters();
2422 for (auto current : fixed_ranges) { 2435 for (TopLevelLiveRange* current : fixed_ranges) {
2423 if (current != nullptr) { 2436 if (current != nullptr) {
2424 DCHECK_EQ(mode(), current->kind()); 2437 DCHECK_EQ(mode(), current->kind());
2425 AddToInactive(current); 2438 AddToInactive(current);
2426 } 2439 }
2427 } 2440 }
2428 2441
2429 while (!unhandled_live_ranges().empty()) { 2442 while (!unhandled_live_ranges().empty()) {
2430 DCHECK(UnhandledIsSorted()); 2443 DCHECK(UnhandledIsSorted());
2431 auto current = unhandled_live_ranges().back(); 2444 LiveRange* current = unhandled_live_ranges().back();
2432 unhandled_live_ranges().pop_back(); 2445 unhandled_live_ranges().pop_back();
2433 DCHECK(UnhandledIsSorted()); 2446 DCHECK(UnhandledIsSorted());
2434 auto position = current->Start(); 2447 LifetimePosition position = current->Start();
2435 #ifdef DEBUG 2448 #ifdef DEBUG
2436 allocation_finger_ = position; 2449 allocation_finger_ = position;
2437 #endif 2450 #endif
2438 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(), 2451 TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2439 current->relative_id(), position.value()); 2452 current->relative_id(), position.value());
2440 2453
2441 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel())) 2454 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2442 continue; 2455 continue;
2443 2456
2444 for (size_t i = 0; i < active_live_ranges().size(); ++i) { 2457 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2445 auto cur_active = active_live_ranges()[i]; 2458 LiveRange* cur_active = active_live_ranges()[i];
2446 if (cur_active->End() <= position) { 2459 if (cur_active->End() <= position) {
2447 ActiveToHandled(cur_active); 2460 ActiveToHandled(cur_active);
2448 --i; // The live range was removed from the list of active live ranges. 2461 --i; // The live range was removed from the list of active live ranges.
2449 } else if (!cur_active->Covers(position)) { 2462 } else if (!cur_active->Covers(position)) {
2450 ActiveToInactive(cur_active); 2463 ActiveToInactive(cur_active);
2451 --i; // The live range was removed from the list of active live ranges. 2464 --i; // The live range was removed from the list of active live ranges.
2452 } 2465 }
2453 } 2466 }
2454 2467
2455 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { 2468 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2456 auto cur_inactive = inactive_live_ranges()[i]; 2469 LiveRange* cur_inactive = inactive_live_ranges()[i];
2457 if (cur_inactive->End() <= position) { 2470 if (cur_inactive->End() <= position) {
2458 InactiveToHandled(cur_inactive); 2471 InactiveToHandled(cur_inactive);
2459 --i; // Live range was removed from the list of inactive live ranges. 2472 --i; // Live range was removed from the list of inactive live ranges.
2460 } else if (cur_inactive->Covers(position)) { 2473 } else if (cur_inactive->Covers(position)) {
2461 InactiveToActive(cur_inactive); 2474 InactiveToActive(cur_inactive);
2462 --i; // Live range was removed from the list of inactive live ranges. 2475 --i; // Live range was removed from the list of inactive live ranges.
2463 } 2476 }
2464 } 2477 }
2465 2478
2466 DCHECK(!current->HasRegisterAssigned() && !current->spilled()); 2479 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2498 inactive_live_ranges().push_back(range); 2511 inactive_live_ranges().push_back(range);
2499 } 2512 }
2500 2513
2501 2514
2502 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { 2515 void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2503 if (range == nullptr || range->IsEmpty()) return; 2516 if (range == nullptr || range->IsEmpty()) return;
2504 DCHECK(!range->HasRegisterAssigned() && !range->spilled()); 2517 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2505 DCHECK(allocation_finger_ <= range->Start()); 2518 DCHECK(allocation_finger_ <= range->Start());
2506 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0; 2519 for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2507 --i) { 2520 --i) {
2508 auto cur_range = unhandled_live_ranges().at(i); 2521 LiveRange* cur_range = unhandled_live_ranges().at(i);
2509 if (!range->ShouldBeAllocatedBefore(cur_range)) continue; 2522 if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2510 TRACE("Add live range %d:%d to unhandled at %d\n", 2523 TRACE("Add live range %d:%d to unhandled at %d\n",
2511 range->TopLevel()->vreg(), range->relative_id(), i + 1); 2524 range->TopLevel()->vreg(), range->relative_id(), i + 1);
2512 auto it = unhandled_live_ranges().begin() + (i + 1); 2525 auto it = unhandled_live_ranges().begin() + (i + 1);
2513 unhandled_live_ranges().insert(it, range); 2526 unhandled_live_ranges().insert(it, range);
2514 DCHECK(UnhandledIsSorted()); 2527 DCHECK(UnhandledIsSorted());
2515 return; 2528 return;
2516 } 2529 }
2517 TRACE("Add live range %d:%d to unhandled at start\n", 2530 TRACE("Add live range %d:%d to unhandled at start\n",
2518 range->TopLevel()->vreg(), range->relative_id()); 2531 range->TopLevel()->vreg(), range->relative_id());
(...skipping 25 matching lines...) Expand all
2544 void LinearScanAllocator::SortUnhandled() { 2557 void LinearScanAllocator::SortUnhandled() {
2545 TRACE("Sort unhandled\n"); 2558 TRACE("Sort unhandled\n");
2546 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(), 2559 std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2547 &UnhandledSortHelper); 2560 &UnhandledSortHelper);
2548 } 2561 }
2549 2562
2550 2563
2551 bool LinearScanAllocator::UnhandledIsSorted() { 2564 bool LinearScanAllocator::UnhandledIsSorted() {
2552 size_t len = unhandled_live_ranges().size(); 2565 size_t len = unhandled_live_ranges().size();
2553 for (size_t i = 1; i < len; i++) { 2566 for (size_t i = 1; i < len; i++) {
2554 auto a = unhandled_live_ranges().at(i - 1); 2567 LiveRange* a = unhandled_live_ranges().at(i - 1);
2555 auto b = unhandled_live_ranges().at(i); 2568 LiveRange* b = unhandled_live_ranges().at(i);
2556 if (a->Start() < b->Start()) return false; 2569 if (a->Start() < b->Start()) return false;
2557 } 2570 }
2558 return true; 2571 return true;
2559 } 2572 }
2560 2573
2561 2574
2562 void LinearScanAllocator::ActiveToHandled(LiveRange* range) { 2575 void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2563 RemoveElement(&active_live_ranges(), range); 2576 RemoveElement(&active_live_ranges(), range);
2564 TRACE("Moving live range %d:%d from active to handled\n", 2577 TRACE("Moving live range %d:%d from active to handled\n",
2565 range->TopLevel()->vreg(), range->relative_id()); 2578 range->TopLevel()->vreg(), range->relative_id());
(...skipping 23 matching lines...) Expand all
2589 } 2602 }
2590 2603
2591 2604
2592 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { 2605 bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
2593 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2606 LifetimePosition free_until_pos[RegisterConfiguration::kMaxDoubleRegisters];
2594 2607
2595 for (int i = 0; i < num_registers(); i++) { 2608 for (int i = 0; i < num_registers(); i++) {
2596 free_until_pos[i] = LifetimePosition::MaxPosition(); 2609 free_until_pos[i] = LifetimePosition::MaxPosition();
2597 } 2610 }
2598 2611
2599 for (auto cur_active : active_live_ranges()) { 2612 for (LiveRange* cur_active : active_live_ranges()) {
2600 free_until_pos[cur_active->assigned_register()] = 2613 free_until_pos[cur_active->assigned_register()] =
2601 LifetimePosition::GapFromInstructionIndex(0); 2614 LifetimePosition::GapFromInstructionIndex(0);
2602 TRACE("Register %s is free until pos %d (1)\n", 2615 TRACE("Register %s is free until pos %d (1)\n",
2603 RegisterName(cur_active->assigned_register()), 2616 RegisterName(cur_active->assigned_register()),
2604 LifetimePosition::GapFromInstructionIndex(0).value()); 2617 LifetimePosition::GapFromInstructionIndex(0).value());
2605 } 2618 }
2606 2619
2607 for (auto cur_inactive : inactive_live_ranges()) { 2620 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2608 DCHECK(cur_inactive->End() > current->Start()); 2621 DCHECK(cur_inactive->End() > current->Start());
2609 auto next_intersection = cur_inactive->FirstIntersection(current); 2622 LifetimePosition next_intersection =
2623 cur_inactive->FirstIntersection(current);
2610 if (!next_intersection.IsValid()) continue; 2624 if (!next_intersection.IsValid()) continue;
2611 int cur_reg = cur_inactive->assigned_register(); 2625 int cur_reg = cur_inactive->assigned_register();
2612 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 2626 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
2613 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg), 2627 TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2614 Min(free_until_pos[cur_reg], next_intersection).value()); 2628 Min(free_until_pos[cur_reg], next_intersection).value());
2615 } 2629 }
2616 2630
2617 int hint_register; 2631 int hint_register;
2618 if (current->FirstHintPosition(&hint_register) != nullptr) { 2632 if (current->FirstHintPosition(&hint_register) != nullptr) {
2619 TRACE( 2633 TRACE(
(...skipping 14 matching lines...) Expand all
2634 2648
2635 // Find the register which stays free for the longest time. 2649 // Find the register which stays free for the longest time.
2636 int reg = allocatable_register_code(0); 2650 int reg = allocatable_register_code(0);
2637 for (int i = 1; i < num_allocatable_registers(); ++i) { 2651 for (int i = 1; i < num_allocatable_registers(); ++i) {
2638 int code = allocatable_register_code(i); 2652 int code = allocatable_register_code(i);
2639 if (free_until_pos[code] > free_until_pos[reg]) { 2653 if (free_until_pos[code] > free_until_pos[reg]) {
2640 reg = code; 2654 reg = code;
2641 } 2655 }
2642 } 2656 }
2643 2657
2644 auto pos = free_until_pos[reg]; 2658 LifetimePosition pos = free_until_pos[reg];
2645 2659
2646 if (pos <= current->Start()) { 2660 if (pos <= current->Start()) {
2647 // All registers are blocked. 2661 // All registers are blocked.
2648 return false; 2662 return false;
2649 } 2663 }
2650 2664
2651 if (pos < current->End()) { 2665 if (pos < current->End()) {
2652 // Register reg is available at the range start but becomes blocked before 2666 // Register reg is available at the range start but becomes blocked before
2653 // the range end. Split current at position where it becomes blocked. 2667 // the range end. Split current at position where it becomes blocked.
2654 auto tail = SplitRangeAt(current, pos); 2668 LiveRange* tail = SplitRangeAt(current, pos);
2655 AddToUnhandledSorted(tail); 2669 AddToUnhandledSorted(tail);
2656 } 2670 }
2657 2671
2658 // Register reg is available at the range start and is free until 2672 // Register reg is available at the range start and is free until
2659 // the range end. 2673 // the range end.
2660 DCHECK(pos >= current->End()); 2674 DCHECK(pos >= current->End());
2661 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg), 2675 TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
2662 current->TopLevel()->vreg(), current->relative_id()); 2676 current->TopLevel()->vreg(), current->relative_id());
2663 SetLiveRangeAssignedRegister(current, reg); 2677 SetLiveRangeAssignedRegister(current, reg);
2664 2678
2665 return true; 2679 return true;
2666 } 2680 }
2667 2681
2668 2682
2669 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { 2683 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
2670 auto register_use = current->NextRegisterPosition(current->Start()); 2684 UsePosition* register_use = current->NextRegisterPosition(current->Start());
2671 if (register_use == nullptr) { 2685 if (register_use == nullptr) {
2672 // There is no use in the current live range that requires a register. 2686 // There is no use in the current live range that requires a register.
2673 // We can just spill it. 2687 // We can just spill it.
2674 Spill(current); 2688 Spill(current);
2675 return; 2689 return;
2676 } 2690 }
2677 2691
2678 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2692 LifetimePosition use_pos[RegisterConfiguration::kMaxDoubleRegisters];
2679 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters]; 2693 LifetimePosition block_pos[RegisterConfiguration::kMaxDoubleRegisters];
2680 2694
2681 for (int i = 0; i < num_registers(); i++) { 2695 for (int i = 0; i < num_registers(); i++) {
2682 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); 2696 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
2683 } 2697 }
2684 2698
2685 for (auto range : active_live_ranges()) { 2699 for (LiveRange* range : active_live_ranges()) {
2686 int cur_reg = range->assigned_register(); 2700 int cur_reg = range->assigned_register();
2687 if (range->TopLevel()->IsFixed() || 2701 if (range->TopLevel()->IsFixed() ||
2688 !range->CanBeSpilled(current->Start())) { 2702 !range->CanBeSpilled(current->Start())) {
2689 block_pos[cur_reg] = use_pos[cur_reg] = 2703 block_pos[cur_reg] = use_pos[cur_reg] =
2690 LifetimePosition::GapFromInstructionIndex(0); 2704 LifetimePosition::GapFromInstructionIndex(0);
2691 } else { 2705 } else {
2692 auto next_use = 2706 UsePosition* next_use =
2693 range->NextUsePositionRegisterIsBeneficial(current->Start()); 2707 range->NextUsePositionRegisterIsBeneficial(current->Start());
2694 if (next_use == nullptr) { 2708 if (next_use == nullptr) {
2695 use_pos[cur_reg] = range->End(); 2709 use_pos[cur_reg] = range->End();
2696 } else { 2710 } else {
2697 use_pos[cur_reg] = next_use->pos(); 2711 use_pos[cur_reg] = next_use->pos();
2698 } 2712 }
2699 } 2713 }
2700 } 2714 }
2701 2715
2702 for (auto range : inactive_live_ranges()) { 2716 for (LiveRange* range : inactive_live_ranges()) {
2703 DCHECK(range->End() > current->Start()); 2717 DCHECK(range->End() > current->Start());
2704 auto next_intersection = range->FirstIntersection(current); 2718 LifetimePosition next_intersection = range->FirstIntersection(current);
2705 if (!next_intersection.IsValid()) continue; 2719 if (!next_intersection.IsValid()) continue;
2706 int cur_reg = range->assigned_register(); 2720 int cur_reg = range->assigned_register();
2707 if (range->TopLevel()->IsFixed()) { 2721 if (range->TopLevel()->IsFixed()) {
2708 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); 2722 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
2709 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); 2723 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
2710 } else { 2724 } else {
2711 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection); 2725 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
2712 } 2726 }
2713 } 2727 }
2714 2728
2715 int reg = allocatable_register_code(0); 2729 int reg = allocatable_register_code(0);
2716 for (int i = 1; i < num_allocatable_registers(); ++i) { 2730 for (int i = 1; i < num_allocatable_registers(); ++i) {
2717 int code = allocatable_register_code(i); 2731 int code = allocatable_register_code(i);
2718 if (use_pos[code] > use_pos[reg]) { 2732 if (use_pos[code] > use_pos[reg]) {
2719 reg = code; 2733 reg = code;
2720 } 2734 }
2721 } 2735 }
2722 2736
2723 auto pos = use_pos[reg]; 2737 LifetimePosition pos = use_pos[reg];
2724 2738
2725 if (pos < register_use->pos()) { 2739 if (pos < register_use->pos()) {
2726 // All registers are blocked before the first use that requires a register. 2740 // All registers are blocked before the first use that requires a register.
2727 // Spill starting part of live range up to that use. 2741 // Spill starting part of live range up to that use.
2728 SpillBetween(current, current->Start(), register_use->pos()); 2742 SpillBetween(current, current->Start(), register_use->pos());
2729 return; 2743 return;
2730 } 2744 }
2731 2745
2732 if (block_pos[reg] < current->End()) { 2746 if (block_pos[reg] < current->End()) {
2733 // Register becomes blocked before the current range end. Split before that 2747 // Register becomes blocked before the current range end. Split before that
(...skipping 12 matching lines...) Expand all
2746 // This register was not free. Thus we need to find and spill 2760 // This register was not free. Thus we need to find and spill
2747 // parts of active and inactive live regions that use the same register 2761 // parts of active and inactive live regions that use the same register
2748 // at the same lifetime positions as current. 2762 // at the same lifetime positions as current.
2749 SplitAndSpillIntersecting(current); 2763 SplitAndSpillIntersecting(current);
2750 } 2764 }
2751 2765
2752 2766
2753 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { 2767 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
2754 DCHECK(current->HasRegisterAssigned()); 2768 DCHECK(current->HasRegisterAssigned());
2755 int reg = current->assigned_register(); 2769 int reg = current->assigned_register();
2756 auto split_pos = current->Start(); 2770 LifetimePosition split_pos = current->Start();
2757 for (size_t i = 0; i < active_live_ranges().size(); ++i) { 2771 for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2758 auto range = active_live_ranges()[i]; 2772 LiveRange* range = active_live_ranges()[i];
2759 if (range->assigned_register() == reg) { 2773 if (range->assigned_register() == reg) {
2760 auto next_pos = range->NextRegisterPosition(current->Start()); 2774 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2761 auto spill_pos = FindOptimalSpillingPos(range, split_pos); 2775 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2762 if (next_pos == nullptr) { 2776 if (next_pos == nullptr) {
2763 SpillAfter(range, spill_pos); 2777 SpillAfter(range, spill_pos);
2764 } else { 2778 } else {
2765 // When spilling between spill_pos and next_pos ensure that the range 2779 // When spilling between spill_pos and next_pos ensure that the range
2766 // remains spilled at least until the start of the current live range. 2780 // remains spilled at least until the start of the current live range.
2767 // This guarantees that we will not introduce new unhandled ranges that 2781 // This guarantees that we will not introduce new unhandled ranges that
2768 // start before the current range as this violates allocation invariant 2782 // start before the current range as this violates allocation invariant
2769 // and will lead to an inconsistent state of active and inactive 2783 // and will lead to an inconsistent state of active and inactive
2770 // live-ranges: ranges are allocated in order of their start positions, 2784 // live-ranges: ranges are allocated in order of their start positions,
2771 // ranges are retired from active/inactive when the start of the 2785 // ranges are retired from active/inactive when the start of the
2772 // current live-range is larger than their end. 2786 // current live-range is larger than their end.
2773 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos()); 2787 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2774 } 2788 }
2775 ActiveToHandled(range); 2789 ActiveToHandled(range);
2776 --i; 2790 --i;
2777 } 2791 }
2778 } 2792 }
2779 2793
2780 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { 2794 for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2781 auto range = inactive_live_ranges()[i]; 2795 LiveRange* range = inactive_live_ranges()[i];
2782 DCHECK(range->End() > current->Start()); 2796 DCHECK(range->End() > current->Start());
2783 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) { 2797 if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
2784 LifetimePosition next_intersection = range->FirstIntersection(current); 2798 LifetimePosition next_intersection = range->FirstIntersection(current);
2785 if (next_intersection.IsValid()) { 2799 if (next_intersection.IsValid()) {
2786 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 2800 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2787 if (next_pos == nullptr) { 2801 if (next_pos == nullptr) {
2788 SpillAfter(range, split_pos); 2802 SpillAfter(range, split_pos);
2789 } else { 2803 } else {
2790 next_intersection = Min(next_intersection, next_pos->pos()); 2804 next_intersection = Min(next_intersection, next_pos->pos());
2791 SpillBetween(range, split_pos, next_intersection); 2805 SpillBetween(range, split_pos, next_intersection);
2792 } 2806 }
2793 InactiveToHandled(range); 2807 InactiveToHandled(range);
2794 --i; 2808 --i;
2795 } 2809 }
2796 } 2810 }
2797 } 2811 }
2798 } 2812 }
2799 2813
2800 2814
2801 bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) { 2815 bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
2802 if (!range->is_phi()) return false; 2816 if (!range->is_phi()) return false;
2803 2817
2804 DCHECK(!range->HasSpillOperand()); 2818 DCHECK(!range->HasSpillOperand());
2805 auto phi_map_value = data()->GetPhiMapValueFor(range); 2819 RegisterAllocationData::PhiMapValue* phi_map_value =
2806 auto phi = phi_map_value->phi(); 2820 data()->GetPhiMapValueFor(range);
2807 auto block = phi_map_value->block(); 2821 const PhiInstruction* phi = phi_map_value->phi();
2822 const InstructionBlock* block = phi_map_value->block();
2808 // Count the number of spilled operands. 2823 // Count the number of spilled operands.
2809 size_t spilled_count = 0; 2824 size_t spilled_count = 0;
2810 LiveRange* first_op = nullptr; 2825 LiveRange* first_op = nullptr;
2811 for (size_t i = 0; i < phi->operands().size(); i++) { 2826 for (size_t i = 0; i < phi->operands().size(); i++) {
2812 int op = phi->operands()[i]; 2827 int op = phi->operands()[i];
2813 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op); 2828 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
2814 if (!op_range->TopLevel()->HasSpillRange()) continue; 2829 if (!op_range->TopLevel()->HasSpillRange()) continue;
2815 auto pred = code()->InstructionBlockAt(block->predecessors()[i]); 2830 const InstructionBlock* pred =
2816 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( 2831 code()->InstructionBlockAt(block->predecessors()[i]);
2817 pred->last_instruction_index()); 2832 LifetimePosition pred_end =
2833 LifetimePosition::InstructionFromInstructionIndex(
2834 pred->last_instruction_index());
2818 while (op_range != nullptr && !op_range->CanCover(pred_end)) { 2835 while (op_range != nullptr && !op_range->CanCover(pred_end)) {
2819 op_range = op_range->next(); 2836 op_range = op_range->next();
2820 } 2837 }
2821 if (op_range != nullptr && op_range->spilled()) { 2838 if (op_range != nullptr && op_range->spilled()) {
2822 spilled_count++; 2839 spilled_count++;
2823 if (first_op == nullptr) { 2840 if (first_op == nullptr) {
2824 first_op = op_range->TopLevel(); 2841 first_op = op_range->TopLevel();
2825 } 2842 }
2826 } 2843 }
2827 } 2844 }
2828 2845
2829 // Only continue if more than half of the operands are spilled. 2846 // Only continue if more than half of the operands are spilled.
2830 if (spilled_count * 2 <= phi->operands().size()) { 2847 if (spilled_count * 2 <= phi->operands().size()) {
2831 return false; 2848 return false;
2832 } 2849 }
2833 2850
2834 // Try to merge the spilled operands and count the number of merged spilled 2851 // Try to merge the spilled operands and count the number of merged spilled
2835 // operands. 2852 // operands.
2836 DCHECK(first_op != nullptr); 2853 DCHECK(first_op != nullptr);
2837 auto first_op_spill = first_op->TopLevel()->GetSpillRange(); 2854 SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
2838 size_t num_merged = 1; 2855 size_t num_merged = 1;
2839 for (size_t i = 1; i < phi->operands().size(); i++) { 2856 for (size_t i = 1; i < phi->operands().size(); i++) {
2840 int op = phi->operands()[i]; 2857 int op = phi->operands()[i];
2841 auto op_range = data()->GetOrCreateLiveRangeFor(op); 2858 TopLevelLiveRange* op_range = data()->live_ranges()[op];
2842 if (!op_range->HasSpillRange()) continue; 2859 if (!op_range->HasSpillRange()) continue;
2843 auto op_spill = op_range->GetSpillRange(); 2860 SpillRange* op_spill = op_range->GetSpillRange();
2844 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { 2861 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
2845 num_merged++; 2862 num_merged++;
2846 } 2863 }
2847 } 2864 }
2848 2865
2849 // Only continue if enough operands could be merged to the 2866 // Only continue if enough operands could be merged to the
2850 // same spill slot. 2867 // same spill slot.
2851 if (num_merged * 2 <= phi->operands().size() || 2868 if (num_merged * 2 <= phi->operands().size() ||
2852 AreUseIntervalsIntersecting(first_op_spill->interval(), 2869 AreUseIntervalsIntersecting(first_op_spill->interval(),
2853 range->first_interval())) { 2870 range->first_interval())) {
2854 return false; 2871 return false;
2855 } 2872 }
2856 2873
2857 // If the range does not need register soon, spill it to the merged 2874 // If the range does not need register soon, spill it to the merged
2858 // spill range. 2875 // spill range.
2859 auto next_pos = range->Start(); 2876 LifetimePosition next_pos = range->Start();
2860 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart(); 2877 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
2861 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); 2878 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2862 if (pos == nullptr) { 2879 if (pos == nullptr) {
2863 auto spill_range = 2880 SpillRange* spill_range =
2864 range->TopLevel()->HasSpillRange() 2881 range->TopLevel()->HasSpillRange()
2865 ? range->TopLevel()->GetSpillRange() 2882 ? range->TopLevel()->GetSpillRange()
2866 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); 2883 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2867 bool merged = first_op_spill->TryMerge(spill_range); 2884 bool merged = first_op_spill->TryMerge(spill_range);
2868 CHECK(merged); 2885 CHECK(merged);
2869 Spill(range); 2886 Spill(range);
2870 return true; 2887 return true;
2871 } else if (pos->pos() > range->Start().NextStart()) { 2888 } else if (pos->pos() > range->Start().NextStart()) {
2872 auto spill_range = 2889 SpillRange* spill_range =
2873 range->TopLevel()->HasSpillRange() 2890 range->TopLevel()->HasSpillRange()
2874 ? range->TopLevel()->GetSpillRange() 2891 ? range->TopLevel()->GetSpillRange()
2875 : data()->AssignSpillRangeToLiveRange(range->TopLevel()); 2892 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
2876 bool merged = first_op_spill->TryMerge(spill_range); 2893 bool merged = first_op_spill->TryMerge(spill_range);
2877 CHECK(merged); 2894 CHECK(merged);
2878 SpillBetween(range, range->Start(), pos->pos()); 2895 SpillBetween(range, range->Start(), pos->pos());
2879 DCHECK(UnhandledIsSorted()); 2896 DCHECK(UnhandledIsSorted());
2880 return true; 2897 return true;
2881 } 2898 }
2882 return false; 2899 return false;
2883 } 2900 }
2884 2901
2885 2902
2886 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2903 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2887 auto second_part = SplitRangeAt(range, pos); 2904 LiveRange* second_part = SplitRangeAt(range, pos);
2888 Spill(second_part); 2905 Spill(second_part);
2889 } 2906 }
2890 2907
2891 2908
2892 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start, 2909 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2893 LifetimePosition end) { 2910 LifetimePosition end) {
2894 SpillBetweenUntil(range, start, start, end); 2911 SpillBetweenUntil(range, start, start, end);
2895 } 2912 }
2896 2913
2897 2914
2898 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range, 2915 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
2899 LifetimePosition start, 2916 LifetimePosition start,
2900 LifetimePosition until, 2917 LifetimePosition until,
2901 LifetimePosition end) { 2918 LifetimePosition end) {
2902 CHECK(start < end); 2919 CHECK(start < end);
2903 auto second_part = SplitRangeAt(range, start); 2920 LiveRange* second_part = SplitRangeAt(range, start);
2904 2921
2905 if (second_part->Start() < end) { 2922 if (second_part->Start() < end) {
2906 // The split result intersects with [start, end[. 2923 // The split result intersects with [start, end[.
2907 // Split it at position between ]start+1, end[, spill the middle part 2924 // Split it at position between ]start+1, end[, spill the middle part
2908 // and put the rest to unhandled. 2925 // and put the rest to unhandled.
2909 auto third_part_end = end.PrevStart().End(); 2926 LifetimePosition third_part_end = end.PrevStart().End();
2910 if (data()->IsBlockBoundary(end.Start())) { 2927 if (data()->IsBlockBoundary(end.Start())) {
2911 third_part_end = end.Start(); 2928 third_part_end = end.Start();
2912 } 2929 }
2913 auto third_part = SplitBetween( 2930 LiveRange* third_part = SplitBetween(
2914 second_part, Max(second_part->Start().End(), until), third_part_end); 2931 second_part, Max(second_part->Start().End(), until), third_part_end);
2915 2932
2916 DCHECK(third_part != second_part); 2933 DCHECK(third_part != second_part);
2917 2934
2918 Spill(second_part); 2935 Spill(second_part);
2919 AddToUnhandledSorted(third_part); 2936 AddToUnhandledSorted(third_part);
2920 } else { 2937 } else {
2921 // The split result does not intersect with [start, end[. 2938 // The split result does not intersect with [start, end[.
2922 // Nothing to spill. Just put it to unhandled as whole. 2939 // Nothing to spill. Just put it to unhandled as whole.
2923 AddToUnhandledSorted(second_part); 2940 AddToUnhandledSorted(second_part);
2924 } 2941 }
2925 } 2942 }
2926 2943
2927 2944
2928 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) 2945 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
2929 : data_(data) {} 2946 : data_(data) {}
2930 2947
2931 2948
2932 void SpillSlotLocator::LocateSpillSlots() { 2949 void SpillSlotLocator::LocateSpillSlots() {
2933 auto code = data()->code(); 2950 const InstructionSequence* code = data()->code();
2934 for (TopLevelLiveRange* range : data()->live_ranges()) { 2951 for (TopLevelLiveRange* range : data()->live_ranges()) {
2935 if (range == nullptr || range->IsEmpty()) continue; 2952 if (range == nullptr || range->IsEmpty()) continue;
2936 // We care only about ranges which spill in the frame. 2953 // We care only about ranges which spill in the frame.
2937 if (!range->HasSpillRange()) continue; 2954 if (!range->HasSpillRange()) continue;
2938 if (range->IsSpilledOnlyInDeferredBlocks()) { 2955 if (range->IsSpilledOnlyInDeferredBlocks()) {
2939 for (LiveRange* child = range; child != nullptr; child = child->next()) { 2956 for (LiveRange* child = range; child != nullptr; child = child->next()) {
2940 if (child->spilled()) { 2957 if (child->spilled()) {
2941 code->GetInstructionBlock(child->Start().ToInstructionIndex()) 2958 code->GetInstructionBlock(child->Start().ToInstructionIndex())
2942 ->mark_needs_frame(); 2959 ->mark_needs_frame();
2943 } 2960 }
2944 } 2961 }
2945 } else { 2962 } else {
2946 auto spills = range->spill_move_insertion_locations(); 2963 TopLevelLiveRange::SpillMoveInsertionList* spills =
2964 range->spill_move_insertion_locations();
2947 DCHECK_NOT_NULL(spills); 2965 DCHECK_NOT_NULL(spills);
2948 for (; spills != nullptr; spills = spills->next) { 2966 for (; spills != nullptr; spills = spills->next) {
2949 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame(); 2967 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
2950 } 2968 }
2951 } 2969 }
2952 } 2970 }
2953 } 2971 }
2954 2972
2955 2973
2956 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2974 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2991 spill_operand = *top_range->TopLevel()->GetSpillOperand(); 3009 spill_operand = *top_range->TopLevel()->GetSpillOperand();
2992 } else if (top_range->TopLevel()->HasSpillRange()) { 3010 } else if (top_range->TopLevel()->HasSpillRange()) {
2993 spill_operand = top_range->TopLevel()->GetSpillRangeOperand(); 3011 spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
2994 } 3012 }
2995 if (top_range->is_phi()) { 3013 if (top_range->is_phi()) {
2996 data()->GetPhiMapValueFor(top_range)->CommitAssignment( 3014 data()->GetPhiMapValueFor(top_range)->CommitAssignment(
2997 top_range->GetAssignedOperand()); 3015 top_range->GetAssignedOperand());
2998 } 3016 }
2999 for (LiveRange* range = top_range; range != nullptr; 3017 for (LiveRange* range = top_range; range != nullptr;
3000 range = range->next()) { 3018 range = range->next()) {
3001 auto assigned = range->GetAssignedOperand(); 3019 InstructionOperand assigned = range->GetAssignedOperand();
3002 range->ConvertUsesToOperand(assigned, spill_operand); 3020 range->ConvertUsesToOperand(assigned, spill_operand);
3003 } 3021 }
3004 3022
3005 if (!spill_operand.IsInvalid()) { 3023 if (!spill_operand.IsInvalid()) {
3006 // If this top level range has a child spilled in a deferred block, we use 3024 // If this top level range has a child spilled in a deferred block, we use
3007 // the range and control flow connection mechanism instead of spilling at 3025 // the range and control flow connection mechanism instead of spilling at
3008 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow 3026 // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3009 // phases. Normally, when we spill at definition, we do not insert a 3027 // phases. Normally, when we spill at definition, we do not insert a
3010 // connecting move when a successor child range is spilled - because the 3028 // connecting move when a successor child range is spilled - because the
3011 // spilled range picks up its value from the slot which was assigned at 3029 // spilled range picks up its value from the slot which was assigned at
(...skipping 14 matching lines...) Expand all
3026 } 3044 }
3027 } 3045 }
3028 3046
3029 3047
3030 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data) 3048 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3031 : data_(data) {} 3049 : data_(data) {}
3032 3050
3033 3051
3034 bool ReferenceMapPopulator::SafePointsAreInOrder() const { 3052 bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3035 int safe_point = 0; 3053 int safe_point = 0;
3036 for (auto map : *data()->code()->reference_maps()) { 3054 for (ReferenceMap* map : *data()->code()->reference_maps()) {
3037 if (safe_point > map->instruction_position()) return false; 3055 if (safe_point > map->instruction_position()) return false;
3038 safe_point = map->instruction_position(); 3056 safe_point = map->instruction_position();
3039 } 3057 }
3040 return true; 3058 return true;
3041 } 3059 }
3042 3060
3043 3061
3044 void ReferenceMapPopulator::PopulateReferenceMaps() { 3062 void ReferenceMapPopulator::PopulateReferenceMaps() {
3045 DCHECK(SafePointsAreInOrder()); 3063 DCHECK(SafePointsAreInOrder());
3046 // Map all delayed references. 3064 // Map all delayed references.
3047 for (auto& delayed_reference : data()->delayed_references()) { 3065 for (RegisterAllocationData::DelayedReference& delayed_reference :
3066 data()->delayed_references()) {
3048 delayed_reference.map->RecordReference( 3067 delayed_reference.map->RecordReference(
3049 AllocatedOperand::cast(*delayed_reference.operand)); 3068 AllocatedOperand::cast(*delayed_reference.operand));
3050 } 3069 }
3051 // Iterate over all safe point positions and record a pointer 3070 // Iterate over all safe point positions and record a pointer
3052 // for all spilled live ranges at this point. 3071 // for all spilled live ranges at this point.
3053 int last_range_start = 0; 3072 int last_range_start = 0;
3054 auto reference_maps = data()->code()->reference_maps(); 3073 const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3055 ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); 3074 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3056 for (TopLevelLiveRange* range : data()->live_ranges()) { 3075 for (TopLevelLiveRange* range : data()->live_ranges()) {
3057 if (range == nullptr) continue; 3076 if (range == nullptr) continue;
3058 // Skip non-reference values. 3077 // Skip non-reference values.
3059 if (!data()->IsReference(range)) continue; 3078 if (!data()->IsReference(range)) continue;
3060 // Skip empty live ranges. 3079 // Skip empty live ranges.
3061 if (range->IsEmpty()) continue; 3080 if (range->IsEmpty()) continue;
3062 if (range->has_preassigned_slot()) continue; 3081 if (range->has_preassigned_slot()) continue;
3063 3082
3064 // Find the extent of the range and its children. 3083 // Find the extent of the range and its children.
3065 int start = range->Start().ToInstructionIndex(); 3084 int start = range->Start().ToInstructionIndex();
3066 int end = 0; 3085 int end = 0;
3067 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) { 3086 for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3068 auto this_end = cur->End(); 3087 LifetimePosition this_end = cur->End();
3069 if (this_end.ToInstructionIndex() > end) 3088 if (this_end.ToInstructionIndex() > end)
3070 end = this_end.ToInstructionIndex(); 3089 end = this_end.ToInstructionIndex();
3071 DCHECK(cur->Start().ToInstructionIndex() >= start); 3090 DCHECK(cur->Start().ToInstructionIndex() >= start);
3072 } 3091 }
3073 3092
3074 // Most of the ranges are in order, but not all. Keep an eye on when they 3093 // Most of the ranges are in order, but not all. Keep an eye on when they
3075 // step backwards and reset the first_it so we don't miss any safe points. 3094 // step backwards and reset the first_it so we don't miss any safe points.
3076 if (start < last_range_start) first_it = reference_maps->begin(); 3095 if (start < last_range_start) first_it = reference_maps->begin();
3077 last_range_start = start; 3096 last_range_start = start;
3078 3097
3079 // Step across all the safe points that are before the start of this range, 3098 // Step across all the safe points that are before the start of this range,
3080 // recording how far we step in order to save doing this for the next range. 3099 // recording how far we step in order to save doing this for the next range.
3081 for (; first_it != reference_maps->end(); ++first_it) { 3100 for (; first_it != reference_maps->end(); ++first_it) {
3082 auto map = *first_it; 3101 ReferenceMap* map = *first_it;
3083 if (map->instruction_position() >= start) break; 3102 if (map->instruction_position() >= start) break;
3084 } 3103 }
3085 3104
3086 InstructionOperand spill_operand; 3105 InstructionOperand spill_operand;
3087 if (((range->HasSpillOperand() && 3106 if (((range->HasSpillOperand() &&
3088 !range->GetSpillOperand()->IsConstant()) || 3107 !range->GetSpillOperand()->IsConstant()) ||
3089 range->HasSpillRange())) { 3108 range->HasSpillRange())) {
3090 if (range->HasSpillOperand()) { 3109 if (range->HasSpillOperand()) {
3091 spill_operand = *range->GetSpillOperand(); 3110 spill_operand = *range->GetSpillOperand();
3092 } else { 3111 } else {
3093 spill_operand = range->GetSpillRangeOperand(); 3112 spill_operand = range->GetSpillRangeOperand();
3094 } 3113 }
3095 DCHECK(spill_operand.IsStackSlot()); 3114 DCHECK(spill_operand.IsStackSlot());
3096 DCHECK_EQ(MachineRepresentation::kTagged, 3115 DCHECK_EQ(MachineRepresentation::kTagged,
3097 AllocatedOperand::cast(spill_operand).representation()); 3116 AllocatedOperand::cast(spill_operand).representation());
3098 } 3117 }
3099 3118
3100 LiveRange* cur = range; 3119 LiveRange* cur = range;
3101 // Step through the safe points to see whether they are in the range. 3120 // Step through the safe points to see whether they are in the range.
3102 for (auto it = first_it; it != reference_maps->end(); ++it) { 3121 for (auto it = first_it; it != reference_maps->end(); ++it) {
3103 auto map = *it; 3122 ReferenceMap* map = *it;
3104 int safe_point = map->instruction_position(); 3123 int safe_point = map->instruction_position();
3105 3124
3106 // The safe points are sorted so we can stop searching here. 3125 // The safe points are sorted so we can stop searching here.
3107 if (safe_point - 1 > end) break; 3126 if (safe_point - 1 > end) break;
3108 3127
3109 // Advance to the next active range that covers the current 3128 // Advance to the next active range that covers the current
3110 // safe point position. 3129 // safe point position.
3111 auto safe_point_pos = 3130 LifetimePosition safe_point_pos =
3112 LifetimePosition::InstructionFromInstructionIndex(safe_point); 3131 LifetimePosition::InstructionFromInstructionIndex(safe_point);
3113 3132
3114 // Search for the child range (cur) that covers safe_point_pos. If we 3133 // Search for the child range (cur) that covers safe_point_pos. If we
3115 // don't find it before the children pass safe_point_pos, keep cur at 3134 // don't find it before the children pass safe_point_pos, keep cur at
3116 // the last child, because the next safe_point_pos may be covered by cur. 3135 // the last child, because the next safe_point_pos may be covered by cur.
3117 // This may happen if cur has more than one interval, and the current 3136 // This may happen if cur has more than one interval, and the current
3118 // safe_point_pos is in between intervals. 3137 // safe_point_pos is in between intervals.
3119 // For that reason, cur may be at most the last child. 3138 // For that reason, cur may be at most the last child.
3120 DCHECK_NOT_NULL(cur); 3139 DCHECK_NOT_NULL(cur);
3121 DCHECK(safe_point_pos >= cur->Start() || range == cur); 3140 DCHECK(safe_point_pos >= cur->Start() || range == cur);
(...skipping 25 matching lines...) Expand all
3147 range->vreg(), spill_index, safe_point); 3166 range->vreg(), spill_index, safe_point);
3148 map->RecordReference(AllocatedOperand::cast(spill_operand)); 3167 map->RecordReference(AllocatedOperand::cast(spill_operand));
3149 } 3168 }
3150 3169
3151 if (!cur->spilled()) { 3170 if (!cur->spilled()) {
3152 TRACE( 3171 TRACE(
3153 "Pointer in register for range %d:%d (start at %d) " 3172 "Pointer in register for range %d:%d (start at %d) "
3154 "at safe point %d\n", 3173 "at safe point %d\n",
3155 range->vreg(), cur->relative_id(), cur->Start().value(), 3174 range->vreg(), cur->relative_id(), cur->Start().value(),
3156 safe_point); 3175 safe_point);
3157 auto operand = cur->GetAssignedOperand(); 3176 InstructionOperand operand = cur->GetAssignedOperand();
3158 DCHECK(!operand.IsStackSlot()); 3177 DCHECK(!operand.IsStackSlot());
3159 DCHECK_EQ(MachineRepresentation::kTagged, 3178 DCHECK_EQ(MachineRepresentation::kTagged,
3160 AllocatedOperand::cast(operand).representation()); 3179 AllocatedOperand::cast(operand).representation());
3161 map->RecordReference(AllocatedOperand::cast(operand)); 3180 map->RecordReference(AllocatedOperand::cast(operand));
3162 } 3181 }
3163 } 3182 }
3164 } 3183 }
3165 } 3184 }
3166 3185
3167 3186
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3213 new (curr) LiveRangeBound(i, !spilled_in_blocks && i->spilled()); 3232 new (curr) LiveRangeBound(i, !spilled_in_blocks && i->spilled());
3214 } 3233 }
3215 } 3234 }
3216 3235
3217 LiveRangeBound* Find(const LifetimePosition position) const { 3236 LiveRangeBound* Find(const LifetimePosition position) const {
3218 size_t left_index = 0; 3237 size_t left_index = 0;
3219 size_t right_index = length_; 3238 size_t right_index = length_;
3220 while (true) { 3239 while (true) {
3221 size_t current_index = left_index + (right_index - left_index) / 2; 3240 size_t current_index = left_index + (right_index - left_index) / 2;
3222 DCHECK(right_index > current_index); 3241 DCHECK(right_index > current_index);
3223 auto bound = &start_[current_index]; 3242 LiveRangeBound* bound = &start_[current_index];
3224 if (bound->start_ <= position) { 3243 if (bound->start_ <= position) {
3225 if (position < bound->end_) return bound; 3244 if (position < bound->end_) return bound;
3226 DCHECK(left_index < current_index); 3245 DCHECK(left_index < current_index);
3227 left_index = current_index; 3246 left_index = current_index;
3228 } else { 3247 } else {
3229 right_index = current_index; 3248 right_index = current_index;
3230 } 3249 }
3231 } 3250 }
3232 } 3251 }
3233 3252
3234 LiveRangeBound* FindPred(const InstructionBlock* pred) { 3253 LiveRangeBound* FindPred(const InstructionBlock* pred) {
3235 auto pred_end = LifetimePosition::InstructionFromInstructionIndex( 3254 LifetimePosition pred_end =
3236 pred->last_instruction_index()); 3255 LifetimePosition::InstructionFromInstructionIndex(
3256 pred->last_instruction_index());
3237 return Find(pred_end); 3257 return Find(pred_end);
3238 } 3258 }
3239 3259
3240 LiveRangeBound* FindSucc(const InstructionBlock* succ) { 3260 LiveRangeBound* FindSucc(const InstructionBlock* succ) {
3241 auto succ_start = LifetimePosition::GapFromInstructionIndex( 3261 LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
3242 succ->first_instruction_index()); 3262 succ->first_instruction_index());
3243 return Find(succ_start); 3263 return Find(succ_start);
3244 } 3264 }
3245 3265
3246 bool FindConnectableSubranges(const InstructionBlock* block, 3266 bool FindConnectableSubranges(const InstructionBlock* block,
3247 const InstructionBlock* pred, 3267 const InstructionBlock* pred,
3248 FindResult* result) const { 3268 FindResult* result) const {
3249 LifetimePosition pred_end = 3269 LifetimePosition pred_end =
3250 LifetimePosition::InstructionFromInstructionIndex( 3270 LifetimePosition::InstructionFromInstructionIndex(
3251 pred->last_instruction_index()); 3271 pred->last_instruction_index());
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3283 bounds_length_(static_cast<int>(data_->live_ranges().size())), 3303 bounds_length_(static_cast<int>(data_->live_ranges().size())),
3284 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)), 3304 bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
3285 zone_(zone) { 3305 zone_(zone) {
3286 for (int i = 0; i < bounds_length_; ++i) { 3306 for (int i = 0; i < bounds_length_; ++i) {
3287 new (&bounds_[i]) LiveRangeBoundArray(); 3307 new (&bounds_[i]) LiveRangeBoundArray();
3288 } 3308 }
3289 } 3309 }
3290 3310
3291 LiveRangeBoundArray* ArrayFor(int operand_index) { 3311 LiveRangeBoundArray* ArrayFor(int operand_index) {
3292 DCHECK(operand_index < bounds_length_); 3312 DCHECK(operand_index < bounds_length_);
3293 auto range = data_->live_ranges()[operand_index]; 3313 TopLevelLiveRange* range = data_->live_ranges()[operand_index];
3294 DCHECK(range != nullptr && !range->IsEmpty()); 3314 DCHECK(range != nullptr && !range->IsEmpty());
3295 auto array = &bounds_[operand_index]; 3315 LiveRangeBoundArray* array = &bounds_[operand_index];
3296 if (array->ShouldInitialize()) { 3316 if (array->ShouldInitialize()) {
3297 array->Initialize(zone_, range); 3317 array->Initialize(zone_, range);
3298 } 3318 }
3299 return array; 3319 return array;
3300 } 3320 }
3301 3321
3302 private: 3322 private:
3303 const RegisterAllocationData* const data_; 3323 const RegisterAllocationData* const data_;
3304 const int bounds_length_; 3324 const int bounds_length_;
3305 LiveRangeBoundArray* const bounds_; 3325 LiveRangeBoundArray* const bounds_;
(...skipping 30 matching lines...) Expand all
3336 bool LiveRangeConnector::CanEagerlyResolveControlFlow( 3356 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3337 const InstructionBlock* block) const { 3357 const InstructionBlock* block) const {
3338 if (block->PredecessorCount() != 1) return false; 3358 if (block->PredecessorCount() != 1) return false;
3339 return block->predecessors()[0].IsNext(block->rpo_number()); 3359 return block->predecessors()[0].IsNext(block->rpo_number());
3340 } 3360 }
3341 3361
3342 3362
3343 void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) { 3363 void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
3344 // Lazily linearize live ranges in memory for fast lookup. 3364 // Lazily linearize live ranges in memory for fast lookup.
3345 LiveRangeFinder finder(data(), local_zone); 3365 LiveRangeFinder finder(data(), local_zone);
3346 auto& live_in_sets = data()->live_in_sets(); 3366 ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3347 for (auto block : code()->instruction_blocks()) { 3367 for (const InstructionBlock* block : code()->instruction_blocks()) {
3348 if (CanEagerlyResolveControlFlow(block)) continue; 3368 if (CanEagerlyResolveControlFlow(block)) continue;
3349 auto live = live_in_sets[block->rpo_number().ToInt()]; 3369 BitVector* live = live_in_sets[block->rpo_number().ToInt()];
3350 BitVector::Iterator iterator(live); 3370 BitVector::Iterator iterator(live);
3351 while (!iterator.Done()) { 3371 while (!iterator.Done()) {
3352 auto* array = finder.ArrayFor(iterator.Current()); 3372 LiveRangeBoundArray* array = finder.ArrayFor(iterator.Current());
3353 for (auto pred : block->predecessors()) { 3373 for (const RpoNumber& pred : block->predecessors()) {
3354 FindResult result; 3374 FindResult result;
3355 const auto* pred_block = code()->InstructionBlockAt(pred); 3375 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3356 if (!array->FindConnectableSubranges(block, pred_block, &result)) { 3376 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3357 continue; 3377 continue;
3358 } 3378 }
3359 auto pred_op = result.pred_cover_->GetAssignedOperand(); 3379 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3360 auto cur_op = result.cur_cover_->GetAssignedOperand(); 3380 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3361 if (pred_op.Equals(cur_op)) continue; 3381 if (pred_op.Equals(cur_op)) continue;
3362 ResolveControlFlow(block, cur_op, pred_block, pred_op); 3382 ResolveControlFlow(block, cur_op, pred_block, pred_op);
3363 } 3383 }
3364 iterator.Advance(); 3384 iterator.Advance();
3365 } 3385 }
3366 } 3386 }
3367 } 3387 }
3368 3388
3369 3389
3370 void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block, 3390 void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
(...skipping 19 matching lines...) Expand all
3390 3410
3391 3411
3392 void LiveRangeConnector::ConnectRanges(Zone* local_zone) { 3412 void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3393 DelayedInsertionMap delayed_insertion_map(local_zone); 3413 DelayedInsertionMap delayed_insertion_map(local_zone);
3394 for (TopLevelLiveRange* top_range : data()->live_ranges()) { 3414 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3395 if (top_range == nullptr) continue; 3415 if (top_range == nullptr) continue;
3396 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(); 3416 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3397 LiveRange* first_range = top_range; 3417 LiveRange* first_range = top_range;
3398 for (LiveRange *second_range = first_range->next(); second_range != nullptr; 3418 for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3399 first_range = second_range, second_range = second_range->next()) { 3419 first_range = second_range, second_range = second_range->next()) {
3400 auto pos = second_range->Start(); 3420 LifetimePosition pos = second_range->Start();
3401 // Add gap move if the two live ranges touch and there is no block 3421 // Add gap move if the two live ranges touch and there is no block
3402 // boundary. 3422 // boundary.
3403 if (!connect_spilled && second_range->spilled()) continue; 3423 if (!connect_spilled && second_range->spilled()) continue;
3404 if (first_range->End() != pos) continue; 3424 if (first_range->End() != pos) continue;
3405 if (data()->IsBlockBoundary(pos) && 3425 if (data()->IsBlockBoundary(pos) &&
3406 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) { 3426 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
3407 continue; 3427 continue;
3408 } 3428 }
3409 auto prev_operand = first_range->GetAssignedOperand(); 3429 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3410 auto cur_operand = second_range->GetAssignedOperand(); 3430 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3411 if (prev_operand.Equals(cur_operand)) continue; 3431 if (prev_operand.Equals(cur_operand)) continue;
3412 bool delay_insertion = false; 3432 bool delay_insertion = false;
3413 Instruction::GapPosition gap_pos; 3433 Instruction::GapPosition gap_pos;
3414 int gap_index = pos.ToInstructionIndex(); 3434 int gap_index = pos.ToInstructionIndex();
3415 if (pos.IsGapPosition()) { 3435 if (pos.IsGapPosition()) {
3416 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END; 3436 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
3417 } else { 3437 } else {
3418 if (pos.IsStart()) { 3438 if (pos.IsStart()) {
3419 delay_insertion = true; 3439 delay_insertion = true;
3420 } else { 3440 } else {
3421 gap_index++; 3441 gap_index++;
3422 } 3442 }
3423 gap_pos = delay_insertion ? Instruction::END : Instruction::START; 3443 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3424 } 3444 }
3425 auto move = code()->InstructionAt(gap_index)->GetOrCreateParallelMove( 3445 ParallelMove* move =
3426 gap_pos, code_zone()); 3446 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3447 gap_pos, code_zone());
3427 if (!delay_insertion) { 3448 if (!delay_insertion) {
3428 move->AddMove(prev_operand, cur_operand); 3449 move->AddMove(prev_operand, cur_operand);
3429 } else { 3450 } else {
3430 delayed_insertion_map.insert( 3451 delayed_insertion_map.insert(
3431 std::make_pair(std::make_pair(move, prev_operand), cur_operand)); 3452 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
3432 } 3453 }
3433 } 3454 }
3434 } 3455 }
3435 if (delayed_insertion_map.empty()) return; 3456 if (delayed_insertion_map.empty()) return;
3436 // Insert all the moves which should occur after the stored move. 3457 // Insert all the moves which should occur after the stored move.
3437 ZoneVector<MoveOperands*> to_insert(local_zone); 3458 ZoneVector<MoveOperands*> to_insert(local_zone);
3438 ZoneVector<MoveOperands*> to_eliminate(local_zone); 3459 ZoneVector<MoveOperands*> to_eliminate(local_zone);
3439 to_insert.reserve(4); 3460 to_insert.reserve(4);
3440 to_eliminate.reserve(4); 3461 to_eliminate.reserve(4);
3441 auto moves = delayed_insertion_map.begin()->first.first; 3462 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3442 for (auto it = delayed_insertion_map.begin();; ++it) { 3463 for (auto it = delayed_insertion_map.begin();; ++it) {
3443 bool done = it == delayed_insertion_map.end(); 3464 bool done = it == delayed_insertion_map.end();
3444 if (done || it->first.first != moves) { 3465 if (done || it->first.first != moves) {
3445 // Commit the MoveOperands for current ParallelMove. 3466 // Commit the MoveOperands for current ParallelMove.
3446 for (auto move : to_eliminate) { 3467 for (MoveOperands* move : to_eliminate) {
3447 move->Eliminate(); 3468 move->Eliminate();
3448 } 3469 }
3449 for (auto move : to_insert) { 3470 for (MoveOperands* move : to_insert) {
3450 moves->push_back(move); 3471 moves->push_back(move);
3451 } 3472 }
3452 if (done) break; 3473 if (done) break;
3453 // Reset state. 3474 // Reset state.
3454 to_eliminate.clear(); 3475 to_eliminate.clear();
3455 to_insert.clear(); 3476 to_insert.clear();
3456 moves = it->first.first; 3477 moves = it->first.first;
3457 } 3478 }
3458 // Gather all MoveOperands for a single ParallelMove. 3479 // Gather all MoveOperands for a single ParallelMove.
3459 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); 3480 MoveOperands* move =
3460 auto eliminate = moves->PrepareInsertAfter(move); 3481 new (code_zone()) MoveOperands(it->first.second, it->second);
3482 MoveOperands* eliminate = moves->PrepareInsertAfter(move);
3461 to_insert.push_back(move); 3483 to_insert.push_back(move);
3462 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 3484 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
3463 } 3485 }
3464 } 3486 }
3465 3487
3466 3488
3467 } // namespace compiler 3489 } // namespace compiler
3468 } // namespace internal 3490 } // namespace internal
3469 } // namespace v8 3491 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698