Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(255)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 426233002: Land the Fan (disabled) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Review feedback, rebase and "git cl format" Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/compiler/representation-change.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/compiler/register-allocator.h"
6 6
7 #include "src/compiler/linkage.h"
7 #include "src/hydrogen.h" 8 #include "src/hydrogen.h"
8 #include "src/lithium-allocator-inl.h"
9 #include "src/string-stream.h" 9 #include "src/string-stream.h"
10 10
11 #if V8_TARGET_ARCH_IA32
12 #include "src/ia32/lithium-ia32.h" // NOLINT
13 #elif V8_TARGET_ARCH_X64
14 #include "src/x64/lithium-x64.h" // NOLINT
15 #elif V8_TARGET_ARCH_ARM64
16 #include "src/arm64/lithium-arm64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/arm/lithium-arm.h" // NOLINT
19 #elif V8_TARGET_ARCH_MIPS
20 #include "src/mips/lithium-mips.h" // NOLINT
21 #elif V8_TARGET_ARCH_MIPS64
22 #include "src/mips64/lithium-mips64.h" // NOLINT
23 #elif V8_TARGET_ARCH_X87
24 #include "src/x87/lithium-x87.h" // NOLINT
25 #else
26 #error "Unknown architecture."
27 #endif
28
29 namespace v8 { 11 namespace v8 {
30 namespace internal { 12 namespace internal {
13 namespace compiler {
31 14
32 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 15 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
33 return a.Value() < b.Value() ? a : b; 16 return a.Value() < b.Value() ? a : b;
34 } 17 }
35 18
36 19
37 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 20 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
38 return a.Value() > b.Value() ? a : b; 21 return a.Value() > b.Value() ? a : b;
39 } 22 }
40 23
41 24
42 UsePosition::UsePosition(LifetimePosition pos, 25 UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
43 LOperand* operand, 26 InstructionOperand* hint)
44 LOperand* hint)
45 : operand_(operand), 27 : operand_(operand),
46 hint_(hint), 28 hint_(hint),
47 pos_(pos), 29 pos_(pos),
48 next_(NULL), 30 next_(NULL),
49 requires_reg_(false), 31 requires_reg_(false),
50 register_beneficial_(true) { 32 register_beneficial_(true) {
51 if (operand_ != NULL && operand_->IsUnallocated()) { 33 if (operand_ != NULL && operand_->IsUnallocated()) {
52 LUnallocated* unalloc = LUnallocated::cast(operand_); 34 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
53 requires_reg_ = unalloc->HasRegisterPolicy() || 35 requires_reg_ = unalloc->HasRegisterPolicy();
54 unalloc->HasDoubleRegisterPolicy();
55 register_beneficial_ = !unalloc->HasAnyPolicy(); 36 register_beneficial_ = !unalloc->HasAnyPolicy();
56 } 37 }
57 ASSERT(pos_.IsValid()); 38 ASSERT(pos_.IsValid());
58 } 39 }
59 40
60 41
61 bool UsePosition::HasHint() const { 42 bool UsePosition::HasHint() const {
62 return hint_ != NULL && !hint_->IsUnallocated(); 43 return hint_ != NULL && !hint_->IsUnallocated();
63 } 44 }
64 45
65 46
66 bool UsePosition::RequiresRegister() const { 47 bool UsePosition::RequiresRegister() const { return requires_reg_; }
67 return requires_reg_;
68 }
69 48
70 49
71 bool UsePosition::RegisterIsBeneficial() const { 50 bool UsePosition::RegisterIsBeneficial() const { return register_beneficial_; }
72 return register_beneficial_;
73 }
74 51
75 52
76 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) { 53 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
77 ASSERT(Contains(pos) && pos.Value() != start().Value()); 54 ASSERT(Contains(pos) && pos.Value() != start().Value());
78 UseInterval* after = new(zone) UseInterval(pos, end_); 55 UseInterval* after = new (zone) UseInterval(pos, end_);
79 after->next_ = next_; 56 after->next_ = next_;
80 next_ = after; 57 next_ = after;
81 end_ = pos; 58 end_ = pos;
82 } 59 }
83 60
84 61
85 #ifdef DEBUG 62 #ifdef DEBUG
86 63
87 64
88 void LiveRange::Verify() const { 65 void LiveRange::Verify() const {
(...skipping 19 matching lines...) Expand all
108 return false; 85 return false;
109 } 86 }
110 87
111 88
112 #endif 89 #endif
113 90
114 91
115 LiveRange::LiveRange(int id, Zone* zone) 92 LiveRange::LiveRange(int id, Zone* zone)
116 : id_(id), 93 : id_(id),
117 spilled_(false), 94 spilled_(false),
95 is_phi_(false),
96 is_non_loop_phi_(false),
118 kind_(UNALLOCATED_REGISTERS), 97 kind_(UNALLOCATED_REGISTERS),
119 assigned_register_(kInvalidAssignment), 98 assigned_register_(kInvalidAssignment),
120 last_interval_(NULL), 99 last_interval_(NULL),
121 first_interval_(NULL), 100 first_interval_(NULL),
122 first_pos_(NULL), 101 first_pos_(NULL),
123 parent_(NULL), 102 parent_(NULL),
124 next_(NULL), 103 next_(NULL),
125 current_interval_(NULL), 104 current_interval_(NULL),
126 last_processed_use_(NULL), 105 last_processed_use_(NULL),
127 current_hint_operand_(NULL), 106 current_hint_operand_(NULL),
128 spill_operand_(new(zone) LOperand()), 107 spill_operand_(new (zone) InstructionOperand()),
129 spill_start_index_(kMaxInt) { } 108 spill_start_index_(kMaxInt) {}
130 109
131 110
132 void LiveRange::set_assigned_register(int reg, Zone* zone) { 111 void LiveRange::set_assigned_register(int reg, Zone* zone) {
133 ASSERT(!HasRegisterAssigned() && !IsSpilled()); 112 ASSERT(!HasRegisterAssigned() && !IsSpilled());
134 assigned_register_ = reg; 113 assigned_register_ = reg;
135 ConvertOperands(zone); 114 ConvertOperands(zone);
136 } 115 }
137 116
138 117
139 void LiveRange::MakeSpilled(Zone* zone) { 118 void LiveRange::MakeSpilled(Zone* zone) {
140 ASSERT(!IsSpilled()); 119 ASSERT(!IsSpilled());
141 ASSERT(TopLevel()->HasAllocatedSpillOperand()); 120 ASSERT(TopLevel()->HasAllocatedSpillOperand());
142 spilled_ = true; 121 spilled_ = true;
143 assigned_register_ = kInvalidAssignment; 122 assigned_register_ = kInvalidAssignment;
144 ConvertOperands(zone); 123 ConvertOperands(zone);
145 } 124 }
146 125
147 126
148 bool LiveRange::HasAllocatedSpillOperand() const { 127 bool LiveRange::HasAllocatedSpillOperand() const {
149 ASSERT(spill_operand_ != NULL); 128 ASSERT(spill_operand_ != NULL);
150 return !spill_operand_->IsIgnored(); 129 return !spill_operand_->IsIgnored();
151 } 130 }
152 131
153 132
154 void LiveRange::SetSpillOperand(LOperand* operand) { 133 void LiveRange::SetSpillOperand(InstructionOperand* operand) {
155 ASSERT(!operand->IsUnallocated()); 134 ASSERT(!operand->IsUnallocated());
156 ASSERT(spill_operand_ != NULL); 135 ASSERT(spill_operand_ != NULL);
157 ASSERT(spill_operand_->IsIgnored()); 136 ASSERT(spill_operand_->IsIgnored());
158 spill_operand_->ConvertTo(operand->kind(), operand->index()); 137 spill_operand_->ConvertTo(operand->kind(), operand->index());
159 } 138 }
160 139
161 140
162 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) { 141 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
163 UsePosition* use_pos = last_processed_use_; 142 UsePosition* use_pos = last_processed_use_;
164 if (use_pos == NULL) use_pos = first_pos(); 143 if (use_pos == NULL) use_pos = first_pos();
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
199 } 178 }
200 return pos; 179 return pos;
201 } 180 }
202 181
203 182
204 bool LiveRange::CanBeSpilled(LifetimePosition pos) { 183 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
205 // We cannot spill a live range that has a use requiring a register 184 // We cannot spill a live range that has a use requiring a register
206 // at the current or the immediate next position. 185 // at the current or the immediate next position.
207 UsePosition* use_pos = NextRegisterPosition(pos); 186 UsePosition* use_pos = NextRegisterPosition(pos);
208 if (use_pos == NULL) return true; 187 if (use_pos == NULL) return true;
209 return 188 return use_pos->pos().Value() >
210 use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value(); 189 pos.NextInstruction().InstructionEnd().Value();
211 } 190 }
212 191
213 192
214 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) { 193 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
215 LOperand* op = NULL; 194 InstructionOperand* op = NULL;
216 if (HasRegisterAssigned()) { 195 if (HasRegisterAssigned()) {
217 ASSERT(!IsSpilled()); 196 ASSERT(!IsSpilled());
218 switch (Kind()) { 197 switch (Kind()) {
219 case GENERAL_REGISTERS: 198 case GENERAL_REGISTERS:
220 op = LRegister::Create(assigned_register(), zone); 199 op = RegisterOperand::Create(assigned_register(), zone);
221 break; 200 break;
222 case DOUBLE_REGISTERS: 201 case DOUBLE_REGISTERS:
223 op = LDoubleRegister::Create(assigned_register(), zone); 202 op = DoubleRegisterOperand::Create(assigned_register(), zone);
224 break; 203 break;
225 default: 204 default:
226 UNREACHABLE(); 205 UNREACHABLE();
227 } 206 }
228 } else if (IsSpilled()) { 207 } else if (IsSpilled()) {
229 ASSERT(!HasRegisterAssigned()); 208 ASSERT(!HasRegisterAssigned());
230 op = TopLevel()->GetSpillOperand(); 209 op = TopLevel()->GetSpillOperand();
231 ASSERT(!op->IsUnallocated()); 210 ASSERT(!op->IsUnallocated());
232 } else { 211 } else {
233 LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE); 212 UnallocatedOperand* unalloc =
213 new (zone) UnallocatedOperand(UnallocatedOperand::NONE);
234 unalloc->set_virtual_register(id_); 214 unalloc->set_virtual_register(id_);
235 op = unalloc; 215 op = unalloc;
236 } 216 }
237 return op; 217 return op;
238 } 218 }
239 219
240 220
241 UseInterval* LiveRange::FirstSearchIntervalForPosition( 221 UseInterval* LiveRange::FirstSearchIntervalForPosition(
242 LifetimePosition position) const { 222 LifetimePosition position) const {
243 if (current_interval_ == NULL) return first_interval_; 223 if (current_interval_ == NULL) return first_interval_;
244 if (current_interval_->start().Value() > position.Value()) { 224 if (current_interval_->start().Value() > position.Value()) {
245 current_interval_ = NULL; 225 current_interval_ = NULL;
246 return first_interval_; 226 return first_interval_;
247 } 227 }
248 return current_interval_; 228 return current_interval_;
249 } 229 }
250 230
251 231
252 void LiveRange::AdvanceLastProcessedMarker( 232 void LiveRange::AdvanceLastProcessedMarker(
253 UseInterval* to_start_of, LifetimePosition but_not_past) const { 233 UseInterval* to_start_of, LifetimePosition but_not_past) const {
254 if (to_start_of == NULL) return; 234 if (to_start_of == NULL) return;
255 if (to_start_of->start().Value() > but_not_past.Value()) return; 235 if (to_start_of->start().Value() > but_not_past.Value()) return;
256 LifetimePosition start = 236 LifetimePosition start = current_interval_ == NULL
257 current_interval_ == NULL ? LifetimePosition::Invalid() 237 ? LifetimePosition::Invalid()
258 : current_interval_->start(); 238 : current_interval_->start();
259 if (to_start_of->start().Value() > start.Value()) { 239 if (to_start_of->start().Value() > start.Value()) {
260 current_interval_ = to_start_of; 240 current_interval_ = to_start_of;
261 } 241 }
262 } 242 }
263 243
264 244
265 void LiveRange::SplitAt(LifetimePosition position, 245 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result,
266 LiveRange* result,
267 Zone* zone) { 246 Zone* zone) {
268 ASSERT(Start().Value() < position.Value()); 247 ASSERT(Start().Value() < position.Value());
269 ASSERT(result->IsEmpty()); 248 ASSERT(result->IsEmpty());
270 // Find the last interval that ends before the position. If the 249 // Find the last interval that ends before the position. If the
271 // position is contained in one of the intervals in the chain, we 250 // position is contained in one of the intervals in the chain, we
272 // split that interval and use the first part. 251 // split that interval and use the first part.
273 UseInterval* current = FirstSearchIntervalForPosition(position); 252 UseInterval* current = FirstSearchIntervalForPosition(position);
274 253
275 // If the split position coincides with the beginning of a use interval 254 // If the split position coincides with the beginning of a use interval
276 // we need to split use positons in a special way. 255 // we need to split use positons in a special way.
(...skipping 13 matching lines...) Expand all
290 if (next->start().Value() >= position.Value()) { 269 if (next->start().Value() >= position.Value()) {
291 split_at_start = (next->start().Value() == position.Value()); 270 split_at_start = (next->start().Value() == position.Value());
292 break; 271 break;
293 } 272 }
294 current = next; 273 current = next;
295 } 274 }
296 275
297 // Partition original use intervals to the two live ranges. 276 // Partition original use intervals to the two live ranges.
298 UseInterval* before = current; 277 UseInterval* before = current;
299 UseInterval* after = before->next(); 278 UseInterval* after = before->next();
300 result->last_interval_ = (last_interval_ == before) 279 result->last_interval_ =
301 ? after // Only interval in the range after split. 280 (last_interval_ == before)
302 : last_interval_; // Last interval of the original range. 281 ? after // Only interval in the range after split.
282 : last_interval_; // Last interval of the original range.
303 result->first_interval_ = after; 283 result->first_interval_ = after;
304 last_interval_ = before; 284 last_interval_ = before;
305 285
306 // Find the last use position before the split and the first use 286 // Find the last use position before the split and the first use
307 // position after it. 287 // position after it.
308 UsePosition* use_after = first_pos_; 288 UsePosition* use_after = first_pos_;
309 UsePosition* use_before = NULL; 289 UsePosition* use_before = NULL;
310 if (split_at_start) { 290 if (split_at_start) {
311 // The split position coincides with the beginning of a use interval (the 291 // The split position coincides with the beginning of a use interval (the
312 // end of a lifetime hole). Use at this position should be attributed to 292 // end of a lifetime hole). Use at this position should be attributed to
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
362 if (pos == NULL) return false; 342 if (pos == NULL) return false;
363 UsePosition* other_pos = other->first_pos(); 343 UsePosition* other_pos = other->first_pos();
364 if (other_pos == NULL) return true; 344 if (other_pos == NULL) return true;
365 return pos->pos().Value() < other_pos->pos().Value(); 345 return pos->pos().Value() < other_pos->pos().Value();
366 } 346 }
367 return start.Value() < other_start.Value(); 347 return start.Value() < other_start.Value();
368 } 348 }
369 349
370 350
371 void LiveRange::ShortenTo(LifetimePosition start) { 351 void LiveRange::ShortenTo(LifetimePosition start) {
372 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value()); 352 RegisterAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_,
353 start.Value());
373 ASSERT(first_interval_ != NULL); 354 ASSERT(first_interval_ != NULL);
374 ASSERT(first_interval_->start().Value() <= start.Value()); 355 ASSERT(first_interval_->start().Value() <= start.Value());
375 ASSERT(start.Value() < first_interval_->end().Value()); 356 ASSERT(start.Value() < first_interval_->end().Value());
376 first_interval_->set_start(start); 357 first_interval_->set_start(start);
377 } 358 }
378 359
379 360
380 void LiveRange::EnsureInterval(LifetimePosition start, 361 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end,
381 LifetimePosition end,
382 Zone* zone) { 362 Zone* zone) {
383 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n", 363 RegisterAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
384 id_, 364 id_, start.Value(), end.Value());
385 start.Value(),
386 end.Value());
387 LifetimePosition new_end = end; 365 LifetimePosition new_end = end;
388 while (first_interval_ != NULL && 366 while (first_interval_ != NULL &&
389 first_interval_->start().Value() <= end.Value()) { 367 first_interval_->start().Value() <= end.Value()) {
390 if (first_interval_->end().Value() > end.Value()) { 368 if (first_interval_->end().Value() > end.Value()) {
391 new_end = first_interval_->end(); 369 new_end = first_interval_->end();
392 } 370 }
393 first_interval_ = first_interval_->next(); 371 first_interval_ = first_interval_->next();
394 } 372 }
395 373
396 UseInterval* new_interval = new(zone) UseInterval(start, new_end); 374 UseInterval* new_interval = new (zone) UseInterval(start, new_end);
397 new_interval->next_ = first_interval_; 375 new_interval->next_ = first_interval_;
398 first_interval_ = new_interval; 376 first_interval_ = new_interval;
399 if (new_interval->next() == NULL) { 377 if (new_interval->next() == NULL) {
400 last_interval_ = new_interval; 378 last_interval_ = new_interval;
401 } 379 }
402 } 380 }
403 381
404 382
405 void LiveRange::AddUseInterval(LifetimePosition start, 383 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end,
406 LifetimePosition end,
407 Zone* zone) { 384 Zone* zone) {
408 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", 385 RegisterAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n", id_,
409 id_, 386 start.Value(), end.Value());
410 start.Value(),
411 end.Value());
412 if (first_interval_ == NULL) { 387 if (first_interval_ == NULL) {
413 UseInterval* interval = new(zone) UseInterval(start, end); 388 UseInterval* interval = new (zone) UseInterval(start, end);
414 first_interval_ = interval; 389 first_interval_ = interval;
415 last_interval_ = interval; 390 last_interval_ = interval;
416 } else { 391 } else {
417 if (end.Value() == first_interval_->start().Value()) { 392 if (end.Value() == first_interval_->start().Value()) {
418 first_interval_->set_start(start); 393 first_interval_->set_start(start);
419 } else if (end.Value() < first_interval_->start().Value()) { 394 } else if (end.Value() < first_interval_->start().Value()) {
420 UseInterval* interval = new(zone) UseInterval(start, end); 395 UseInterval* interval = new (zone) UseInterval(start, end);
421 interval->set_next(first_interval_); 396 interval->set_next(first_interval_);
422 first_interval_ = interval; 397 first_interval_ = interval;
423 } else { 398 } else {
424 // Order of instruction's processing (see ProcessInstructions) guarantees 399 // Order of instruction's processing (see ProcessInstructions) guarantees
425 // that each new use interval either precedes or intersects with 400 // that each new use interval either precedes or intersects with
426 // last added interval. 401 // last added interval.
427 ASSERT(start.Value() < first_interval_->end().Value()); 402 ASSERT(start.Value() < first_interval_->end().Value());
428 first_interval_->start_ = Min(start, first_interval_->start_); 403 first_interval_->start_ = Min(start, first_interval_->start_);
429 first_interval_->end_ = Max(end, first_interval_->end_); 404 first_interval_->end_ = Max(end, first_interval_->end_);
430 } 405 }
431 } 406 }
432 } 407 }
433 408
434 409
435 void LiveRange::AddUsePosition(LifetimePosition pos, 410 void LiveRange::AddUsePosition(LifetimePosition pos,
436 LOperand* operand, 411 InstructionOperand* operand,
437 LOperand* hint, 412 InstructionOperand* hint, Zone* zone) {
438 Zone* zone) { 413 RegisterAllocator::TraceAlloc("Add to live range %d use position %d\n", id_,
439 LAllocator::TraceAlloc("Add to live range %d use position %d\n", 414 pos.Value());
440 id_, 415 UsePosition* use_pos = new (zone) UsePosition(pos, operand, hint);
441 pos.Value());
442 UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
443 UsePosition* prev_hint = NULL; 416 UsePosition* prev_hint = NULL;
444 UsePosition* prev = NULL; 417 UsePosition* prev = NULL;
445 UsePosition* current = first_pos_; 418 UsePosition* current = first_pos_;
446 while (current != NULL && current->pos().Value() < pos.Value()) { 419 while (current != NULL && current->pos().Value() < pos.Value()) {
447 prev_hint = current->HasHint() ? current : prev_hint; 420 prev_hint = current->HasHint() ? current : prev_hint;
448 prev = current; 421 prev = current;
449 current = current->next(); 422 current = current->next();
450 } 423 }
451 424
452 if (prev == NULL) { 425 if (prev == NULL) {
453 use_pos->set_next(first_pos_); 426 use_pos->set_next(first_pos_);
454 first_pos_ = use_pos; 427 first_pos_ = use_pos;
455 } else { 428 } else {
456 use_pos->next_ = prev->next_; 429 use_pos->next_ = prev->next_;
457 prev->next_ = use_pos; 430 prev->next_ = use_pos;
458 } 431 }
459 432
460 if (prev_hint == NULL && use_pos->HasHint()) { 433 if (prev_hint == NULL && use_pos->HasHint()) {
461 current_hint_operand_ = hint; 434 current_hint_operand_ = hint;
462 } 435 }
463 } 436 }
464 437
465 438
466 void LiveRange::ConvertOperands(Zone* zone) { 439 void LiveRange::ConvertOperands(Zone* zone) {
467 LOperand* op = CreateAssignedOperand(zone); 440 InstructionOperand* op = CreateAssignedOperand(zone);
468 UsePosition* use_pos = first_pos(); 441 UsePosition* use_pos = first_pos();
469 while (use_pos != NULL) { 442 while (use_pos != NULL) {
470 ASSERT(Start().Value() <= use_pos->pos().Value() && 443 ASSERT(Start().Value() <= use_pos->pos().Value() &&
471 use_pos->pos().Value() <= End().Value()); 444 use_pos->pos().Value() <= End().Value());
472 445
473 if (use_pos->HasOperand()) { 446 if (use_pos->HasOperand()) {
474 ASSERT(op->IsRegister() || op->IsDoubleRegister() || 447 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
475 !use_pos->RequiresRegister()); 448 !use_pos->RequiresRegister());
476 use_pos->operand()->ConvertTo(op->kind(), op->index()); 449 use_pos->operand()->ConvertTo(op->kind(), op->index());
477 } 450 }
478 use_pos = use_pos->next(); 451 use_pos = use_pos->next();
479 } 452 }
480 } 453 }
481 454
482 455
483 bool LiveRange::CanCover(LifetimePosition position) const { 456 bool LiveRange::CanCover(LifetimePosition position) const {
484 if (IsEmpty()) return false; 457 if (IsEmpty()) return false;
485 return Start().Value() <= position.Value() && 458 return Start().Value() <= position.Value() &&
486 position.Value() < End().Value(); 459 position.Value() < End().Value();
487 } 460 }
488 461
489 462
490 bool LiveRange::Covers(LifetimePosition position) { 463 bool LiveRange::Covers(LifetimePosition position) {
491 if (!CanCover(position)) return false; 464 if (!CanCover(position)) return false;
492 UseInterval* start_search = FirstSearchIntervalForPosition(position); 465 UseInterval* start_search = FirstSearchIntervalForPosition(position);
493 for (UseInterval* interval = start_search; 466 for (UseInterval* interval = start_search; interval != NULL;
494 interval != NULL;
495 interval = interval->next()) { 467 interval = interval->next()) {
496 ASSERT(interval->next() == NULL || 468 ASSERT(interval->next() == NULL ||
497 interval->next()->start().Value() >= interval->start().Value()); 469 interval->next()->start().Value() >= interval->start().Value());
498 AdvanceLastProcessedMarker(interval, position); 470 AdvanceLastProcessedMarker(interval, position);
499 if (interval->Contains(position)) return true; 471 if (interval->Contains(position)) return true;
500 if (interval->start().Value() > position.Value()) return false; 472 if (interval->start().Value() > position.Value()) return false;
501 } 473 }
502 return false; 474 return false;
503 } 475 }
504 476
(...skipping 15 matching lines...) Expand all
520 if (a == NULL || a->start().Value() > other->End().Value()) break; 492 if (a == NULL || a->start().Value() > other->End().Value()) break;
521 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 493 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
522 } else { 494 } else {
523 b = b->next(); 495 b = b->next();
524 } 496 }
525 } 497 }
526 return LifetimePosition::Invalid(); 498 return LifetimePosition::Invalid();
527 } 499 }
528 500
529 501
530 LAllocator::LAllocator(int num_values, HGraph* graph) 502 RegisterAllocator::RegisterAllocator(InstructionSequence* code)
531 : zone_(graph->isolate()), 503 : zone_(code->isolate()),
532 chunk_(NULL), 504 code_(code),
533 live_in_sets_(graph->blocks()->length(), zone()), 505 live_in_sets_(code->BasicBlockCount(), zone()),
534 live_ranges_(num_values * 2, zone()), 506 live_ranges_(code->VirtualRegisterCount() * 2, zone()),
535 fixed_live_ranges_(NULL), 507 fixed_live_ranges_(NULL),
536 fixed_double_live_ranges_(NULL), 508 fixed_double_live_ranges_(NULL),
537 unhandled_live_ranges_(num_values * 2, zone()), 509 unhandled_live_ranges_(code->VirtualRegisterCount() * 2, zone()),
538 active_live_ranges_(8, zone()), 510 active_live_ranges_(8, zone()),
539 inactive_live_ranges_(8, zone()), 511 inactive_live_ranges_(8, zone()),
540 reusable_slots_(8, zone()), 512 reusable_slots_(8, zone()),
541 next_virtual_register_(num_values),
542 first_artificial_register_(num_values),
543 mode_(UNALLOCATED_REGISTERS), 513 mode_(UNALLOCATED_REGISTERS),
544 num_registers_(-1), 514 num_registers_(-1),
545 graph_(graph), 515 allocation_ok_(true) {}
546 has_osr_entry_(false),
547 allocation_ok_(true) { }
548 516
549 517
550 void LAllocator::InitializeLivenessAnalysis() { 518 void RegisterAllocator::InitializeLivenessAnalysis() {
551 // Initialize the live_in sets for each block to NULL. 519 // Initialize the live_in sets for each block to NULL.
552 int block_count = graph_->blocks()->length(); 520 int block_count = code()->BasicBlockCount();
553 live_in_sets_.Initialize(block_count, zone()); 521 live_in_sets_.Initialize(block_count, zone());
554 live_in_sets_.AddBlock(NULL, block_count, zone()); 522 live_in_sets_.AddBlock(NULL, block_count, zone());
555 } 523 }
556 524
557 525
558 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { 526 BitVector* RegisterAllocator::ComputeLiveOut(BasicBlock* block) {
559 // Compute live out for the given block, except not including backward 527 // Compute live out for the given block, except not including backward
560 // successor edges. 528 // successor edges.
561 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone()); 529 BitVector* live_out =
530 new (zone()) BitVector(code()->VirtualRegisterCount(), zone());
562 531
563 // Process all successor blocks. 532 // Process all successor blocks.
564 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 533 BasicBlock::Successors successors = block->successors();
534 for (BasicBlock::Successors::iterator i = successors.begin();
535 i != successors.end(); ++i) {
565 // Add values live on entry to the successor. Note the successor's 536 // Add values live on entry to the successor. Note the successor's
566 // live_in will not be computed yet for backwards edges. 537 // live_in will not be computed yet for backwards edges.
567 HBasicBlock* successor = it.Current(); 538 BasicBlock* successor = *i;
568 BitVector* live_in = live_in_sets_[successor->block_id()]; 539 BitVector* live_in = live_in_sets_[successor->rpo_number_];
569 if (live_in != NULL) live_out->Union(*live_in); 540 if (live_in != NULL) live_out->Union(*live_in);
570 541
571 // All phi input operands corresponding to this successor edge are live 542 // All phi input operands corresponding to this successor edge are live
572 // out from this block. 543 // out from this block.
573 int index = successor->PredecessorIndexOf(block); 544 int index = successor->PredecessorIndexOf(block);
574 const ZoneList<HPhi*>* phis = successor->phis(); 545 ASSERT(index >= 0);
575 for (int i = 0; i < phis->length(); ++i) { 546 ASSERT(index < static_cast<int>(successor->PredecessorCount()));
576 HPhi* phi = phis->at(i); 547 for (BasicBlock::const_iterator j = successor->begin();
577 if (!phi->OperandAt(index)->IsConstant()) { 548 j != successor->end(); ++j) {
578 live_out->Add(phi->OperandAt(index)->id()); 549 Node* phi = *j;
579 } 550 if (phi->opcode() != IrOpcode::kPhi) continue;
551 Node* input = phi->InputAt(index);
552 live_out->Add(input->id());
580 } 553 }
581 } 554 }
582 555
583 return live_out; 556 return live_out;
584 } 557 }
585 558
586 559
587 void LAllocator::AddInitialIntervals(HBasicBlock* block, 560 void RegisterAllocator::AddInitialIntervals(BasicBlock* block,
588 BitVector* live_out) { 561 BitVector* live_out) {
589 // Add an interval that includes the entire block to the live range for 562 // Add an interval that includes the entire block to the live range for
590 // each live_out value. 563 // each live_out value.
591 LifetimePosition start = LifetimePosition::FromInstructionIndex( 564 LifetimePosition start =
592 block->first_instruction_index()); 565 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
593 LifetimePosition end = LifetimePosition::FromInstructionIndex( 566 LifetimePosition end = LifetimePosition::FromInstructionIndex(
594 block->last_instruction_index()).NextInstruction(); 567 block->last_instruction_index()).NextInstruction();
595 BitVector::Iterator iterator(live_out); 568 BitVector::Iterator iterator(live_out);
596 while (!iterator.Done()) { 569 while (!iterator.Done()) {
597 int operand_index = iterator.Current(); 570 int operand_index = iterator.Current();
598 LiveRange* range = LiveRangeFor(operand_index); 571 LiveRange* range = LiveRangeFor(operand_index);
599 range->AddUseInterval(start, end, zone()); 572 range->AddUseInterval(start, end, zone());
600 iterator.Advance(); 573 iterator.Advance();
601 } 574 }
602 } 575 }
603 576
604 577
605 int LAllocator::FixedDoubleLiveRangeID(int index) { 578 int RegisterAllocator::FixedDoubleLiveRangeID(int index) {
606 return -index - 1 - Register::kMaxNumAllocatableRegisters; 579 return -index - 1 - Register::kMaxNumAllocatableRegisters;
607 } 580 }
608 581
609 582
610 LOperand* LAllocator::AllocateFixed(LUnallocated* operand, 583 InstructionOperand* RegisterAllocator::AllocateFixed(
611 int pos, 584 UnallocatedOperand* operand, int pos, bool is_tagged) {
612 bool is_tagged) {
613 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register()); 585 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
614 ASSERT(operand->HasFixedPolicy()); 586 ASSERT(operand->HasFixedPolicy());
615 if (operand->HasFixedSlotPolicy()) { 587 if (operand->HasFixedSlotPolicy()) {
616 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index()); 588 operand->ConvertTo(InstructionOperand::STACK_SLOT,
589 operand->fixed_slot_index());
617 } else if (operand->HasFixedRegisterPolicy()) { 590 } else if (operand->HasFixedRegisterPolicy()) {
618 int reg_index = operand->fixed_register_index(); 591 int reg_index = operand->fixed_register_index();
619 operand->ConvertTo(LOperand::REGISTER, reg_index); 592 operand->ConvertTo(InstructionOperand::REGISTER, reg_index);
620 } else if (operand->HasFixedDoubleRegisterPolicy()) { 593 } else if (operand->HasFixedDoubleRegisterPolicy()) {
621 int reg_index = operand->fixed_register_index(); 594 int reg_index = operand->fixed_register_index();
622 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); 595 operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index);
623 } else { 596 } else {
624 UNREACHABLE(); 597 UNREACHABLE();
625 } 598 }
626 if (is_tagged) { 599 if (is_tagged) {
627 TraceAlloc("Fixed reg is tagged at %d\n", pos); 600 TraceAlloc("Fixed reg is tagged at %d\n", pos);
628 LInstruction* instr = InstructionAt(pos); 601 Instruction* instr = InstructionAt(pos);
629 if (instr->HasPointerMap()) { 602 if (instr->HasPointerMap()) {
630 instr->pointer_map()->RecordPointer(operand, chunk()->zone()); 603 instr->pointer_map()->RecordPointer(operand, code_zone());
631 } 604 }
632 } 605 }
633 return operand; 606 return operand;
634 } 607 }
635 608
636 609
637 LiveRange* LAllocator::FixedLiveRangeFor(int index) { 610 LiveRange* RegisterAllocator::FixedLiveRangeFor(int index) {
638 ASSERT(index < Register::kMaxNumAllocatableRegisters); 611 ASSERT(index < Register::kMaxNumAllocatableRegisters);
639 LiveRange* result = fixed_live_ranges_[index]; 612 LiveRange* result = fixed_live_ranges_[index];
640 if (result == NULL) { 613 if (result == NULL) {
641 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone()); 614 // TODO(titzer): add a utility method to allocate a new LiveRange:
615 // The LiveRange object itself can go in this zone, but the
616 // InstructionOperand needs
617 // to go in the code zone, since it may survive register allocation.
618 result = new (zone()) LiveRange(FixedLiveRangeID(index), code_zone());
642 ASSERT(result->IsFixed()); 619 ASSERT(result->IsFixed());
643 result->kind_ = GENERAL_REGISTERS; 620 result->kind_ = GENERAL_REGISTERS;
644 SetLiveRangeAssignedRegister(result, index); 621 SetLiveRangeAssignedRegister(result, index);
645 fixed_live_ranges_[index] = result; 622 fixed_live_ranges_[index] = result;
646 } 623 }
647 return result; 624 return result;
648 } 625 }
649 626
650 627
651 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { 628 LiveRange* RegisterAllocator::FixedDoubleLiveRangeFor(int index) {
652 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); 629 ASSERT(index < DoubleRegister::NumAllocatableRegisters());
653 LiveRange* result = fixed_double_live_ranges_[index]; 630 LiveRange* result = fixed_double_live_ranges_[index];
654 if (result == NULL) { 631 if (result == NULL) {
655 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index), 632 result = new (zone()) LiveRange(FixedDoubleLiveRangeID(index), code_zone());
656 chunk()->zone());
657 ASSERT(result->IsFixed()); 633 ASSERT(result->IsFixed());
658 result->kind_ = DOUBLE_REGISTERS; 634 result->kind_ = DOUBLE_REGISTERS;
659 SetLiveRangeAssignedRegister(result, index); 635 SetLiveRangeAssignedRegister(result, index);
660 fixed_double_live_ranges_[index] = result; 636 fixed_double_live_ranges_[index] = result;
661 } 637 }
662 return result; 638 return result;
663 } 639 }
664 640
665 641
666 LiveRange* LAllocator::LiveRangeFor(int index) { 642 LiveRange* RegisterAllocator::LiveRangeFor(int index) {
667 if (index >= live_ranges_.length()) { 643 if (index >= live_ranges_.length()) {
668 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); 644 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
669 } 645 }
670 LiveRange* result = live_ranges_[index]; 646 LiveRange* result = live_ranges_[index];
671 if (result == NULL) { 647 if (result == NULL) {
672 result = new(zone()) LiveRange(index, chunk()->zone()); 648 result = new (zone()) LiveRange(index, code_zone());
673 live_ranges_[index] = result; 649 live_ranges_[index] = result;
674 } 650 }
675 return result; 651 return result;
676 } 652 }
677 653
678 654
679 LGap* LAllocator::GetLastGap(HBasicBlock* block) { 655 GapInstruction* RegisterAllocator::GetLastGap(BasicBlock* block) {
680 int last_instruction = block->last_instruction_index(); 656 int last_instruction = block->last_instruction_index();
681 int index = chunk_->NearestGapPos(last_instruction); 657 return code()->GapAt(last_instruction - 1);
682 return GapAt(index);
683 } 658 }
684 659
685 660
686 HPhi* LAllocator::LookupPhi(LOperand* operand) const { 661 LiveRange* RegisterAllocator::LiveRangeFor(InstructionOperand* operand) {
687 if (!operand->IsUnallocated()) return NULL;
688 int index = LUnallocated::cast(operand)->virtual_register();
689 HValue* instr = graph_->LookupValue(index);
690 if (instr != NULL && instr->IsPhi()) {
691 return HPhi::cast(instr);
692 }
693 return NULL;
694 }
695
696
697 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
698 if (operand->IsUnallocated()) { 662 if (operand->IsUnallocated()) {
699 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register()); 663 return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
700 } else if (operand->IsRegister()) { 664 } else if (operand->IsRegister()) {
701 return FixedLiveRangeFor(operand->index()); 665 return FixedLiveRangeFor(operand->index());
702 } else if (operand->IsDoubleRegister()) { 666 } else if (operand->IsDoubleRegister()) {
703 return FixedDoubleLiveRangeFor(operand->index()); 667 return FixedDoubleLiveRangeFor(operand->index());
704 } else { 668 } else {
705 return NULL; 669 return NULL;
706 } 670 }
707 } 671 }
708 672
709 673
710 void LAllocator::Define(LifetimePosition position, 674 void RegisterAllocator::Define(LifetimePosition position,
711 LOperand* operand, 675 InstructionOperand* operand,
712 LOperand* hint) { 676 InstructionOperand* hint) {
713 LiveRange* range = LiveRangeFor(operand); 677 LiveRange* range = LiveRangeFor(operand);
714 if (range == NULL) return; 678 if (range == NULL) return;
715 679
716 if (range->IsEmpty() || range->Start().Value() > position.Value()) { 680 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
717 // Can happen if there is a definition without use. 681 // Can happen if there is a definition without use.
718 range->AddUseInterval(position, position.NextInstruction(), zone()); 682 range->AddUseInterval(position, position.NextInstruction(), zone());
719 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone()); 683 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
720 } else { 684 } else {
721 range->ShortenTo(position); 685 range->ShortenTo(position);
722 } 686 }
723 687
724 if (operand->IsUnallocated()) { 688 if (operand->IsUnallocated()) {
725 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 689 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
726 range->AddUsePosition(position, unalloc_operand, hint, zone()); 690 range->AddUsePosition(position, unalloc_operand, hint, zone());
727 } 691 }
728 } 692 }
729 693
730 694
731 void LAllocator::Use(LifetimePosition block_start, 695 void RegisterAllocator::Use(LifetimePosition block_start,
732 LifetimePosition position, 696 LifetimePosition position,
733 LOperand* operand, 697 InstructionOperand* operand,
734 LOperand* hint) { 698 InstructionOperand* hint) {
735 LiveRange* range = LiveRangeFor(operand); 699 LiveRange* range = LiveRangeFor(operand);
736 if (range == NULL) return; 700 if (range == NULL) return;
737 if (operand->IsUnallocated()) { 701 if (operand->IsUnallocated()) {
738 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 702 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
739 range->AddUsePosition(position, unalloc_operand, hint, zone()); 703 range->AddUsePosition(position, unalloc_operand, hint, zone());
740 } 704 }
741 range->AddUseInterval(block_start, position, zone()); 705 range->AddUseInterval(block_start, position, zone());
742 } 706 }
743 707
744 708
745 void LAllocator::AddConstraintsGapMove(int index, 709 void RegisterAllocator::AddConstraintsGapMove(int index,
746 LOperand* from, 710 InstructionOperand* from,
747 LOperand* to) { 711 InstructionOperand* to) {
748 LGap* gap = GapAt(index); 712 GapInstruction* gap = code()->GapAt(index);
749 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 713 ParallelMove* move =
750 chunk()->zone()); 714 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
751 if (from->IsUnallocated()) { 715 if (from->IsUnallocated()) {
752 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 716 const ZoneList<MoveOperands>* move_operands = move->move_operands();
753 for (int i = 0; i < move_operands->length(); ++i) { 717 for (int i = 0; i < move_operands->length(); ++i) {
754 LMoveOperands cur = move_operands->at(i); 718 MoveOperands cur = move_operands->at(i);
755 LOperand* cur_to = cur.destination(); 719 InstructionOperand* cur_to = cur.destination();
756 if (cur_to->IsUnallocated()) { 720 if (cur_to->IsUnallocated()) {
757 if (LUnallocated::cast(cur_to)->virtual_register() == 721 if (UnallocatedOperand::cast(cur_to)->virtual_register() ==
758 LUnallocated::cast(from)->virtual_register()) { 722 UnallocatedOperand::cast(from)->virtual_register()) {
759 move->AddMove(cur.source(), to, chunk()->zone()); 723 move->AddMove(cur.source(), to, code_zone());
760 return; 724 return;
761 } 725 }
762 } 726 }
763 } 727 }
764 } 728 }
765 move->AddMove(from, to, chunk()->zone()); 729 move->AddMove(from, to, code_zone());
766 } 730 }
767 731
768 732
769 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { 733 void RegisterAllocator::MeetRegisterConstraints(BasicBlock* block) {
770 int start = block->first_instruction_index(); 734 int start = block->first_instruction_index();
771 int end = block->last_instruction_index(); 735 int end = block->last_instruction_index();
772 if (start == -1) return; 736 ASSERT_NE(-1, start);
773 for (int i = start; i <= end; ++i) { 737 for (int i = start; i <= end; ++i) {
774 if (IsGapAt(i)) { 738 if (code()->IsGapAt(i)) {
775 LInstruction* instr = NULL; 739 Instruction* instr = NULL;
776 LInstruction* prev_instr = NULL; 740 Instruction* prev_instr = NULL;
777 if (i < end) instr = InstructionAt(i + 1); 741 if (i < end) instr = InstructionAt(i + 1);
778 if (i > start) prev_instr = InstructionAt(i - 1); 742 if (i > start) prev_instr = InstructionAt(i - 1);
779 MeetConstraintsBetween(prev_instr, instr, i); 743 MeetConstraintsBetween(prev_instr, instr, i);
780 if (!AllocationOk()) return; 744 if (!AllocationOk()) return;
781 } 745 }
782 } 746 }
783 } 747 }
784 748
785 749
786 void LAllocator::MeetConstraintsBetween(LInstruction* first, 750 void RegisterAllocator::MeetConstraintsBetween(Instruction* first,
787 LInstruction* second, 751 Instruction* second,
788 int gap_index) { 752 int gap_index) {
789 // Handle fixed temporaries.
790 if (first != NULL) { 753 if (first != NULL) {
791 for (TempIterator it(first); !it.Done(); it.Advance()) { 754 // Handle fixed temporaries.
792 LUnallocated* temp = LUnallocated::cast(it.Current()); 755 for (size_t i = 0; i < first->TempCount(); i++) {
756 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
793 if (temp->HasFixedPolicy()) { 757 if (temp->HasFixedPolicy()) {
794 AllocateFixed(temp, gap_index - 1, false); 758 AllocateFixed(temp, gap_index - 1, false);
795 } 759 }
796 } 760 }
797 } 761
798 762 // Handle constant/fixed output operands.
799 // Handle fixed output operand. 763 for (size_t i = 0; i < first->OutputCount(); i++) {
800 if (first != NULL && first->Output() != NULL) { 764 InstructionOperand* output = first->OutputAt(i);
801 LUnallocated* first_output = LUnallocated::cast(first->Output()); 765 if (output->IsConstant()) {
802 LiveRange* range = LiveRangeFor(first_output->virtual_register()); 766 int output_vreg = output->index();
803 bool assigned = false; 767 LiveRange* range = LiveRangeFor(output_vreg);
804 if (first_output->HasFixedPolicy()) {
805 LUnallocated* output_copy = first_output->CopyUnconstrained(
806 chunk()->zone());
807 bool is_tagged = HasTaggedValue(first_output->virtual_register());
808 AllocateFixed(first_output, gap_index, is_tagged);
809
810 // This value is produced on the stack, we never need to spill it.
811 if (first_output->IsStackSlot()) {
812 range->SetSpillOperand(first_output);
813 range->SetSpillStartIndex(gap_index - 1); 768 range->SetSpillStartIndex(gap_index - 1);
814 assigned = true; 769 range->SetSpillOperand(output);
815 } 770 } else {
816 chunk_->AddGapMove(gap_index, first_output, output_copy); 771 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
817 } 772 LiveRange* range = LiveRangeFor(first_output->virtual_register());
818 773 bool assigned = false;
819 if (!assigned) { 774 if (first_output->HasFixedPolicy()) {
820 range->SetSpillStartIndex(gap_index); 775 UnallocatedOperand* output_copy =
821 776 first_output->CopyUnconstrained(code_zone());
822 // This move to spill operand is not a real use. Liveness analysis 777 bool is_tagged = HasTaggedValue(first_output->virtual_register());
823 // and splitting of live ranges do not account for it. 778 AllocateFixed(first_output, gap_index, is_tagged);
824 // Thus it should be inserted to a lifetime position corresponding to 779
825 // the instruction end. 780 // This value is produced on the stack, we never need to spill it.
826 LGap* gap = GapAt(gap_index); 781 if (first_output->IsStackSlot()) {
827 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, 782 range->SetSpillOperand(first_output);
828 chunk()->zone()); 783 range->SetSpillStartIndex(gap_index - 1);
829 move->AddMove(first_output, range->GetSpillOperand(), 784 assigned = true;
830 chunk()->zone()); 785 }
831 } 786 code()->AddGapMove(gap_index, first_output, output_copy);
832 } 787 }
833 788
834 // Handle fixed input operands of second instruction. 789 if (!assigned) {
790 range->SetSpillStartIndex(gap_index);
791
792 // This move to spill operand is not a real use. Liveness analysis
793 // and splitting of live ranges do not account for it.
794 // Thus it should be inserted to a lifetime position corresponding to
795 // the instruction end.
796 GapInstruction* gap = code()->GapAt(gap_index);
797 ParallelMove* move =
798 gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
799 move->AddMove(first_output, range->GetSpillOperand(), code_zone());
800 }
801 }
802 }
803 }
804
835 if (second != NULL) { 805 if (second != NULL) {
836 for (UseIterator it(second); !it.Done(); it.Advance()) { 806 // Handle fixed input operands of second instruction.
837 LUnallocated* cur_input = LUnallocated::cast(it.Current()); 807 for (size_t i = 0; i < second->InputCount(); i++) {
808 InstructionOperand* input = second->InputAt(i);
809 if (input->IsImmediate()) continue; // Ignore immediates.
810 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
838 if (cur_input->HasFixedPolicy()) { 811 if (cur_input->HasFixedPolicy()) {
839 LUnallocated* input_copy = cur_input->CopyUnconstrained( 812 UnallocatedOperand* input_copy =
840 chunk()->zone()); 813 cur_input->CopyUnconstrained(code_zone());
841 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); 814 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
842 AllocateFixed(cur_input, gap_index + 1, is_tagged); 815 AllocateFixed(cur_input, gap_index + 1, is_tagged);
843 AddConstraintsGapMove(gap_index, input_copy, cur_input); 816 AddConstraintsGapMove(gap_index, input_copy, cur_input);
844 } else if (cur_input->HasWritableRegisterPolicy()) { 817 }
845 // The live range of writable input registers always goes until the end 818 }
846 // of the instruction. 819
847 ASSERT(!cur_input->IsUsedAtStart()); 820 // Handle "output same as input" for second instruction.
848 821 for (size_t i = 0; i < second->OutputCount(); i++) {
849 LUnallocated* input_copy = cur_input->CopyUnconstrained( 822 InstructionOperand* output = second->Output();
850 chunk()->zone()); 823 if (!output->IsUnallocated()) continue;
851 int vreg = GetVirtualRegister(); 824 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
852 if (!AllocationOk()) return; 825 if (second_output->HasSameAsInputPolicy()) {
853 cur_input->set_virtual_register(vreg); 826 ASSERT(second->OutputCount() == 1); // Only valid for one output.
854 827 UnallocatedOperand* cur_input =
855 if (RequiredRegisterKind(input_copy->virtual_register()) == 828 UnallocatedOperand::cast(second->InputAt(0));
856 DOUBLE_REGISTERS) { 829 int output_vreg = second_output->virtual_register();
857 double_artificial_registers_.Add( 830 int input_vreg = cur_input->virtual_register();
858 cur_input->virtual_register() - first_artificial_register_, 831
859 zone()); 832 UnallocatedOperand* input_copy =
860 } 833 cur_input->CopyUnconstrained(code_zone());
861 834 cur_input->set_virtual_register(second_output->virtual_register());
862 AddConstraintsGapMove(gap_index, input_copy, cur_input); 835 AddConstraintsGapMove(gap_index, input_copy, cur_input);
863 } 836
864 } 837 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
865 } 838 int index = gap_index + 1;
866 839 Instruction* instr = InstructionAt(index);
867 // Handle "output same as input" for second instruction. 840 if (instr->HasPointerMap()) {
868 if (second != NULL && second->Output() != NULL) { 841 instr->pointer_map()->RecordPointer(input_copy, code_zone());
869 LUnallocated* second_output = LUnallocated::cast(second->Output()); 842 }
870 if (second_output->HasSameAsInputPolicy()) { 843 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
871 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); 844 // The input is assumed to immediately have a tagged representation,
872 int output_vreg = second_output->virtual_register(); 845 // before the pointer map can be used. I.e. the pointer map at the
873 int input_vreg = cur_input->virtual_register(); 846 // instruction will include the output operand (whose value at the
874 847 // beginning of the instruction is equal to the input operand). If
875 LUnallocated* input_copy = cur_input->CopyUnconstrained( 848 // this is not desired, then the pointer map at this instruction needs
876 chunk()->zone()); 849 // to be adjusted manually.
877 cur_input->set_virtual_register(second_output->virtual_register()); 850 }
878 AddConstraintsGapMove(gap_index, input_copy, cur_input); 851 }
879 852 }
880 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 853 }
881 int index = gap_index + 1; 854 }
882 LInstruction* instr = InstructionAt(index); 855
883 if (instr->HasPointerMap()) { 856
884 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone()); 857 bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) {
885 } 858 for (size_t i = 0; i < instr->OutputCount(); i++) {
886 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 859 InstructionOperand* output = instr->OutputAt(i);
887 // The input is assumed to immediately have a tagged representation, 860 if (output->IsRegister() && output->index() == index) return true;
888 // before the pointer map can be used. I.e. the pointer map at the 861 }
889 // instruction will include the output operand (whose value at the 862 return false;
890 // beginning of the instruction is equal to the input operand). If 863 }
891 // this is not desired, then the pointer map at this instruction needs 864
892 // to be adjusted manually. 865
893 } 866 bool RegisterAllocator::IsOutputDoubleRegisterOf(Instruction* instr,
894 } 867 int index) {
895 } 868 for (size_t i = 0; i < instr->OutputCount(); i++) {
896 } 869 InstructionOperand* output = instr->OutputAt(i);
897 870 if (output->IsDoubleRegister() && output->index() == index) return true;
898 871 }
899 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { 872 return false;
873 }
874
875
876 void RegisterAllocator::ProcessInstructions(BasicBlock* block,
877 BitVector* live) {
900 int block_start = block->first_instruction_index(); 878 int block_start = block->first_instruction_index();
901 int index = block->last_instruction_index();
902 879
903 LifetimePosition block_start_position = 880 LifetimePosition block_start_position =
904 LifetimePosition::FromInstructionIndex(block_start); 881 LifetimePosition::FromInstructionIndex(block_start);
905 882
906 while (index >= block_start) { 883 for (int index = block->last_instruction_index(); index >= block_start;
884 index--) {
907 LifetimePosition curr_position = 885 LifetimePosition curr_position =
908 LifetimePosition::FromInstructionIndex(index); 886 LifetimePosition::FromInstructionIndex(index);
909 887
910 if (IsGapAt(index)) { 888 Instruction* instr = InstructionAt(index);
911 // We have a gap at this position. 889 ASSERT(instr != NULL);
912 LGap* gap = GapAt(index); 890 if (instr->IsGapMoves()) {
913 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 891 // Process the moves of the gap instruction, making their sources live.
914 chunk()->zone()); 892 GapInstruction* gap = code()->GapAt(index);
915 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 893
894 // TODO(titzer): no need to create the parallel move if it doesn't exist.
895 ParallelMove* move =
896 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
897 const ZoneList<MoveOperands>* move_operands = move->move_operands();
916 for (int i = 0; i < move_operands->length(); ++i) { 898 for (int i = 0; i < move_operands->length(); ++i) {
917 LMoveOperands* cur = &move_operands->at(i); 899 MoveOperands* cur = &move_operands->at(i);
918 if (cur->IsIgnored()) continue; 900 if (cur->IsIgnored()) continue;
919 LOperand* from = cur->source(); 901 InstructionOperand* from = cur->source();
920 LOperand* to = cur->destination(); 902 InstructionOperand* to = cur->destination();
921 HPhi* phi = LookupPhi(to); 903 InstructionOperand* hint = to;
922 LOperand* hint = to; 904 if (to->IsUnallocated()) {
923 if (phi != NULL) { 905 int to_vreg = UnallocatedOperand::cast(to)->virtual_register();
924 // This is a phi resolving move. 906 LiveRange* to_range = LiveRangeFor(to_vreg);
925 if (!phi->block()->IsLoopHeader()) { 907 if (to_range->is_phi()) {
926 hint = LiveRangeFor(phi->id())->current_hint_operand(); 908 if (to_range->is_non_loop_phi()) {
927 } 909 hint = to_range->current_hint_operand();
928 } else { 910 }
929 if (to->IsUnallocated()) { 911 } else {
930 if (live->Contains(LUnallocated::cast(to)->virtual_register())) { 912 if (live->Contains(to_vreg)) {
931 Define(curr_position, to, from); 913 Define(curr_position, to, from);
932 live->Remove(LUnallocated::cast(to)->virtual_register()); 914 live->Remove(to_vreg);
933 } else { 915 } else {
934 cur->Eliminate(); 916 cur->Eliminate();
935 continue; 917 continue;
936 } 918 }
937 } else { 919 }
938 Define(curr_position, to, from); 920 } else {
939 } 921 Define(curr_position, to, from);
940 } 922 }
941 Use(block_start_position, curr_position, from, hint); 923 Use(block_start_position, curr_position, from, hint);
942 if (from->IsUnallocated()) { 924 if (from->IsUnallocated()) {
943 live->Add(LUnallocated::cast(from)->virtual_register()); 925 live->Add(UnallocatedOperand::cast(from)->virtual_register());
944 } 926 }
945 } 927 }
946 } else { 928 } else {
947 ASSERT(!IsGapAt(index)); 929 // Process output, inputs, and temps of this non-gap instruction.
948 LInstruction* instr = InstructionAt(index); 930 for (size_t i = 0; i < instr->OutputCount(); i++) {
949 931 InstructionOperand* output = instr->OutputAt(i);
950 if (instr != NULL) { 932 if (output->IsUnallocated()) {
951 LOperand* output = instr->Output(); 933 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
952 if (output != NULL) { 934 live->Remove(out_vreg);
953 if (output->IsUnallocated()) { 935 } else if (output->IsConstant()) {
954 live->Remove(LUnallocated::cast(output)->virtual_register()); 936 int out_vreg = output->index();
955 } 937 live->Remove(out_vreg);
956 Define(curr_position, output, NULL); 938 }
957 } 939 Define(curr_position, output, NULL);
958 940 }
959 if (instr->ClobbersRegisters()) { 941
960 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) { 942 if (instr->ClobbersRegisters()) {
961 if (output == NULL || !output->IsRegister() || 943 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
962 output->index() != i) { 944 if (!IsOutputRegisterOf(instr, i)) {
963 LiveRange* range = FixedLiveRangeFor(i); 945 LiveRange* range = FixedLiveRangeFor(i);
964 range->AddUseInterval(curr_position, 946 range->AddUseInterval(curr_position, curr_position.InstructionEnd(),
965 curr_position.InstructionEnd(), 947 zone());
966 zone()); 948 }
949 }
950 }
951
952 if (instr->ClobbersDoubleRegisters()) {
953 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
954 if (!IsOutputDoubleRegisterOf(instr, i)) {
955 LiveRange* range = FixedDoubleLiveRangeFor(i);
956 range->AddUseInterval(curr_position, curr_position.InstructionEnd(),
957 zone());
958 }
959 }
960 }
961
962 for (size_t i = 0; i < instr->InputCount(); i++) {
963 InstructionOperand* input = instr->InputAt(i);
964 if (input->IsImmediate()) continue; // Ignore immediates.
965 LifetimePosition use_pos;
966 if (input->IsUnallocated() &&
967 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
968 use_pos = curr_position;
969 } else {
970 use_pos = curr_position.InstructionEnd();
971 }
972
973 Use(block_start_position, use_pos, input, NULL);
974 if (input->IsUnallocated()) {
975 live->Add(UnallocatedOperand::cast(input)->virtual_register());
976 }
977 }
978
979 for (size_t i = 0; i < instr->TempCount(); i++) {
980 InstructionOperand* temp = instr->TempAt(i);
981 if (instr->ClobbersTemps()) {
982 if (temp->IsRegister()) continue;
983 if (temp->IsUnallocated()) {
984 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
985 if (temp_unalloc->HasFixedPolicy()) {
986 continue;
967 } 987 }
968 } 988 }
969 } 989 }
970 990 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
971 if (instr->ClobbersDoubleRegisters(isolate())) { 991 Define(curr_position, temp, NULL);
972 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { 992 }
973 if (output == NULL || !output->IsDoubleRegister() || 993 }
974 output->index() != i) { 994 }
975 LiveRange* range = FixedDoubleLiveRangeFor(i); 995 }
976 range->AddUseInterval(curr_position, 996
977 curr_position.InstructionEnd(), 997
978 zone()); 998 void RegisterAllocator::ResolvePhis(BasicBlock* block) {
979 } 999 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) {
980 } 1000 Node* phi = *i;
981 } 1001 if (phi->opcode() != IrOpcode::kPhi) continue;
982 1002
983 for (UseIterator it(instr); !it.Done(); it.Advance()) { 1003 UnallocatedOperand* phi_operand =
984 LOperand* input = it.Current(); 1004 new (code_zone()) UnallocatedOperand(UnallocatedOperand::NONE);
985
986 LifetimePosition use_pos;
987 if (input->IsUnallocated() &&
988 LUnallocated::cast(input)->IsUsedAtStart()) {
989 use_pos = curr_position;
990 } else {
991 use_pos = curr_position.InstructionEnd();
992 }
993
994 Use(block_start_position, use_pos, input, NULL);
995 if (input->IsUnallocated()) {
996 live->Add(LUnallocated::cast(input)->virtual_register());
997 }
998 }
999
1000 for (TempIterator it(instr); !it.Done(); it.Advance()) {
1001 LOperand* temp = it.Current();
1002 if (instr->ClobbersTemps()) {
1003 if (temp->IsRegister()) continue;
1004 if (temp->IsUnallocated()) {
1005 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1006 if (temp_unalloc->HasFixedPolicy()) {
1007 continue;
1008 }
1009 }
1010 }
1011 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1012 Define(curr_position, temp, NULL);
1013
1014 if (temp->IsUnallocated()) {
1015 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1016 if (temp_unalloc->HasDoubleRegisterPolicy()) {
1017 double_artificial_registers_.Add(
1018 temp_unalloc->virtual_register() - first_artificial_register_,
1019 zone());
1020 }
1021 }
1022 }
1023 }
1024 }
1025
1026 index = index - 1;
1027 }
1028 }
1029
1030
1031 void LAllocator::ResolvePhis(HBasicBlock* block) {
1032 const ZoneList<HPhi*>* phis = block->phis();
1033 for (int i = 0; i < phis->length(); ++i) {
1034 HPhi* phi = phis->at(i);
1035 LUnallocated* phi_operand =
1036 new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
1037 phi_operand->set_virtual_register(phi->id()); 1005 phi_operand->set_virtual_register(phi->id());
1038 for (int j = 0; j < phi->OperandCount(); ++j) { 1006
1039 HValue* op = phi->OperandAt(j); 1007 int j = 0;
1040 LOperand* operand = NULL; 1008 Node::Inputs inputs = phi->inputs();
1041 if (op->IsConstant() && op->EmitAtUses()) { 1009 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end();
1042 HConstant* constant = HConstant::cast(op); 1010 ++iter, ++j) {
1043 operand = chunk_->DefineConstantOperand(constant); 1011 Node* op = *iter;
1044 } else { 1012 // TODO(mstarzinger): Use a ValueInputIterator instead.
1045 ASSERT(!op->EmitAtUses()); 1013 if (j >= block->PredecessorCount()) continue;
1046 LUnallocated* unalloc = 1014 UnallocatedOperand* operand =
1047 new(chunk()->zone()) LUnallocated(LUnallocated::ANY); 1015 new (code_zone()) UnallocatedOperand(UnallocatedOperand::ANY);
1048 unalloc->set_virtual_register(op->id()); 1016 operand->set_virtual_register(op->id());
1049 operand = unalloc; 1017 BasicBlock* cur_block = block->PredecessorAt(j);
1050 }
1051 HBasicBlock* cur_block = block->predecessors()->at(j);
1052 // The gap move must be added without any special processing as in 1018 // The gap move must be added without any special processing as in
1053 // the AddConstraintsGapMove. 1019 // the AddConstraintsGapMove.
1054 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, 1020 code()->AddGapMove(cur_block->last_instruction_index() - 1, operand,
1055 operand,
1056 phi_operand); 1021 phi_operand);
1057 1022
1058 // We are going to insert a move before the branch instruction. 1023 Instruction* branch = InstructionAt(cur_block->last_instruction_index());
1059 // Some branch instructions (e.g. loops' back edges) 1024 ASSERT(!branch->HasPointerMap());
1060 // can potentially cause a GC so they have a pointer map. 1025 USE(branch);
1061 // By inserting a move we essentially create a copy of a
1062 // value which is invisible to PopulatePointerMaps(), because we store
1063 // it into a location different from the operand of a live range
1064 // covering a branch instruction.
1065 // Thus we need to manually record a pointer.
1066 LInstruction* branch =
1067 InstructionAt(cur_block->last_instruction_index());
1068 if (branch->HasPointerMap()) {
1069 if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1070 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1071 } else if (!phi->representation().IsDouble()) {
1072 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1073 }
1074 }
1075 } 1026 }
1076 1027
1077 LiveRange* live_range = LiveRangeFor(phi->id()); 1028 LiveRange* live_range = LiveRangeFor(phi->id());
1078 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); 1029 BlockStartInstruction* block_start = code()->GetBlockStart(block);
1079 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())-> 1030 block_start->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1080 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone()); 1031 ->AddMove(phi_operand, live_range->GetSpillOperand(), code_zone());
1081 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); 1032 live_range->SetSpillStartIndex(block->first_instruction_index());
1082 } 1033
1083 } 1034 // We use the phi-ness of some nodes in some later heuristics.
1084 1035 live_range->set_is_phi(true);
1085 1036 if (!block->IsLoopHeader()) {
1086 bool LAllocator::Allocate(LChunk* chunk) { 1037 live_range->set_is_non_loop_phi(true);
1087 ASSERT(chunk_ == NULL); 1038 }
1088 chunk_ = static_cast<LPlatformChunk*>(chunk); 1039 }
1089 assigned_registers_ = 1040 }
1090 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(), 1041
1091 chunk->zone()); 1042
1092 assigned_double_registers_ = 1043 bool RegisterAllocator::Allocate() {
1093 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), 1044 assigned_registers_ = new (code_zone())
1094 chunk->zone()); 1045 BitVector(Register::NumAllocatableRegisters(), code_zone());
1046 assigned_double_registers_ = new (code_zone())
1047 BitVector(DoubleRegister::NumAllocatableRegisters(), code_zone());
1095 MeetRegisterConstraints(); 1048 MeetRegisterConstraints();
1096 if (!AllocationOk()) return false; 1049 if (!AllocationOk()) return false;
1097 ResolvePhis(); 1050 ResolvePhis();
1098 BuildLiveRanges(); 1051 BuildLiveRanges();
1099 AllocateGeneralRegisters(); 1052 AllocateGeneralRegisters();
1100 if (!AllocationOk()) return false; 1053 if (!AllocationOk()) return false;
1101 AllocateDoubleRegisters(); 1054 AllocateDoubleRegisters();
1102 if (!AllocationOk()) return false; 1055 if (!AllocationOk()) return false;
1103 PopulatePointerMaps(); 1056 PopulatePointerMaps();
1104 ConnectRanges(); 1057 ConnectRanges();
1105 ResolveControlFlow(); 1058 ResolveControlFlow();
1059 code()->frame()->SetAllocatedRegisters(assigned_registers_);
1060 code()->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1106 return true; 1061 return true;
1107 } 1062 }
1108 1063
1109 1064
1110 void LAllocator::MeetRegisterConstraints() { 1065 void RegisterAllocator::MeetRegisterConstraints() {
1111 LAllocatorPhase phase("L_Register constraints", this); 1066 RegisterAllocatorPhase phase("L_Register constraints", this);
1112 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1067 for (int i = 0; i < code()->BasicBlockCount(); ++i) {
1113 for (int i = 0; i < blocks->length(); ++i) { 1068 MeetRegisterConstraints(code()->BlockAt(i));
1114 HBasicBlock* block = blocks->at(i);
1115 MeetRegisterConstraints(block);
1116 if (!AllocationOk()) return; 1069 if (!AllocationOk()) return;
1117 } 1070 }
1118 } 1071 }
1119 1072
1120 1073
1121 void LAllocator::ResolvePhis() { 1074 void RegisterAllocator::ResolvePhis() {
1122 LAllocatorPhase phase("L_Resolve phis", this); 1075 RegisterAllocatorPhase phase("L_Resolve phis", this);
1123 1076
1124 // Process the blocks in reverse order. 1077 // Process the blocks in reverse order.
1125 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1078 for (int i = code()->BasicBlockCount() - 1; i >= 0; --i) {
1126 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { 1079 ResolvePhis(code()->BlockAt(i));
1127 HBasicBlock* block = blocks->at(block_id);
1128 ResolvePhis(block);
1129 } 1080 }
1130 } 1081 }
1131 1082
1132 1083
1133 void LAllocator::ResolveControlFlow(LiveRange* range, 1084 void RegisterAllocator::ResolveControlFlow(LiveRange* range, BasicBlock* block,
1134 HBasicBlock* block, 1085 BasicBlock* pred) {
1135 HBasicBlock* pred) {
1136 LifetimePosition pred_end = 1086 LifetimePosition pred_end =
1137 LifetimePosition::FromInstructionIndex(pred->last_instruction_index()); 1087 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1138 LifetimePosition cur_start = 1088 LifetimePosition cur_start =
1139 LifetimePosition::FromInstructionIndex(block->first_instruction_index()); 1089 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1140 LiveRange* pred_cover = NULL; 1090 LiveRange* pred_cover = NULL;
1141 LiveRange* cur_cover = NULL; 1091 LiveRange* cur_cover = NULL;
1142 LiveRange* cur_range = range; 1092 LiveRange* cur_range = range;
1143 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) { 1093 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1144 if (cur_range->CanCover(cur_start)) { 1094 if (cur_range->CanCover(cur_start)) {
1145 ASSERT(cur_cover == NULL); 1095 ASSERT(cur_cover == NULL);
1146 cur_cover = cur_range; 1096 cur_cover = cur_range;
1147 } 1097 }
1148 if (cur_range->CanCover(pred_end)) { 1098 if (cur_range->CanCover(pred_end)) {
1149 ASSERT(pred_cover == NULL); 1099 ASSERT(pred_cover == NULL);
1150 pred_cover = cur_range; 1100 pred_cover = cur_range;
1151 } 1101 }
1152 cur_range = cur_range->next(); 1102 cur_range = cur_range->next();
1153 } 1103 }
1154 1104
1155 if (cur_cover->IsSpilled()) return; 1105 if (cur_cover->IsSpilled()) return;
1156 ASSERT(pred_cover != NULL && cur_cover != NULL); 1106 ASSERT(pred_cover != NULL && cur_cover != NULL);
1157 if (pred_cover != cur_cover) { 1107 if (pred_cover != cur_cover) {
1158 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone()); 1108 InstructionOperand* pred_op =
1159 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone()); 1109 pred_cover->CreateAssignedOperand(code_zone());
1110 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1160 if (!pred_op->Equals(cur_op)) { 1111 if (!pred_op->Equals(cur_op)) {
1161 LGap* gap = NULL; 1112 GapInstruction* gap = NULL;
1162 if (block->predecessors()->length() == 1) { 1113 if (block->PredecessorCount() == 1) {
1163 gap = GapAt(block->first_instruction_index()); 1114 gap = code()->GapAt(block->first_instruction_index());
1164 } else { 1115 } else {
1165 ASSERT(pred->end()->SecondSuccessor() == NULL); 1116 ASSERT(pred->SuccessorCount() == 1);
1166 gap = GetLastGap(pred); 1117 gap = GetLastGap(pred);
1167 1118
1168 // We are going to insert a move before the branch instruction. 1119 Instruction* branch = InstructionAt(pred->last_instruction_index());
1169 // Some branch instructions (e.g. loops' back edges) 1120 ASSERT(!branch->HasPointerMap());
1170 // can potentially cause a GC so they have a pointer map. 1121 USE(branch);
1171 // By inserting a move we essentially create a copy of a
1172 // value which is invisible to PopulatePointerMaps(), because we store
1173 // it into a location different from the operand of a live range
1174 // covering a branch instruction.
1175 // Thus we need to manually record a pointer.
1176 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1177 if (branch->HasPointerMap()) {
1178 if (HasTaggedValue(range->id())) {
1179 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1180 } else if (!cur_op->IsDoubleStackSlot() &&
1181 !cur_op->IsDoubleRegister()) {
1182 branch->pointer_map()->RemovePointer(cur_op);
1183 }
1184 }
1185 } 1122 }
1186 gap->GetOrCreateParallelMove( 1123 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1187 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op, 1124 ->AddMove(pred_op, cur_op, code_zone());
1188 chunk()->zone());
1189 } 1125 }
1190 } 1126 }
1191 } 1127 }
1192 1128
1193 1129
1194 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { 1130 ParallelMove* RegisterAllocator::GetConnectingParallelMove(
1131 LifetimePosition pos) {
1195 int index = pos.InstructionIndex(); 1132 int index = pos.InstructionIndex();
1196 if (IsGapAt(index)) { 1133 if (code()->IsGapAt(index)) {
1197 LGap* gap = GapAt(index); 1134 GapInstruction* gap = code()->GapAt(index);
1198 return gap->GetOrCreateParallelMove( 1135 return gap->GetOrCreateParallelMove(
1199 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone()); 1136 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END,
1137 code_zone());
1200 } 1138 }
1201 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1139 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1202 return GapAt(gap_pos)->GetOrCreateParallelMove( 1140 return code()->GapAt(gap_pos)->GetOrCreateParallelMove(
1203 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone()); 1141 (gap_pos < index) ? GapInstruction::AFTER : GapInstruction::BEFORE,
1142 code_zone());
1204 } 1143 }
1205 1144
1206 1145
1207 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { 1146 BasicBlock* RegisterAllocator::GetBlock(LifetimePosition pos) {
1208 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); 1147 return code()->GetBasicBlock(pos.InstructionIndex());
1209 return gap->block();
1210 } 1148 }
1211 1149
1212 1150
1213 void LAllocator::ConnectRanges() { 1151 void RegisterAllocator::ConnectRanges() {
1214 LAllocatorPhase phase("L_Connect ranges", this); 1152 RegisterAllocatorPhase phase("L_Connect ranges", this);
1215 for (int i = 0; i < live_ranges()->length(); ++i) { 1153 for (int i = 0; i < live_ranges()->length(); ++i) {
1216 LiveRange* first_range = live_ranges()->at(i); 1154 LiveRange* first_range = live_ranges()->at(i);
1217 if (first_range == NULL || first_range->parent() != NULL) continue; 1155 if (first_range == NULL || first_range->parent() != NULL) continue;
1218 1156
1219 LiveRange* second_range = first_range->next(); 1157 LiveRange* second_range = first_range->next();
1220 while (second_range != NULL) { 1158 while (second_range != NULL) {
1221 LifetimePosition pos = second_range->Start(); 1159 LifetimePosition pos = second_range->Start();
1222 1160
1223 if (!second_range->IsSpilled()) { 1161 if (!second_range->IsSpilled()) {
1224 // Add gap move if the two live ranges touch and there is no block 1162 // Add gap move if the two live ranges touch and there is no block
1225 // boundary. 1163 // boundary.
1226 if (first_range->End().Value() == pos.Value()) { 1164 if (first_range->End().Value() == pos.Value()) {
1227 bool should_insert = true; 1165 bool should_insert = true;
1228 if (IsBlockBoundary(pos)) { 1166 if (IsBlockBoundary(pos)) {
1229 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1167 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1230 } 1168 }
1231 if (should_insert) { 1169 if (should_insert) {
1232 LParallelMove* move = GetConnectingParallelMove(pos); 1170 ParallelMove* move = GetConnectingParallelMove(pos);
1233 LOperand* prev_operand = first_range->CreateAssignedOperand( 1171 InstructionOperand* prev_operand =
1234 chunk()->zone()); 1172 first_range->CreateAssignedOperand(code_zone());
1235 LOperand* cur_operand = second_range->CreateAssignedOperand( 1173 InstructionOperand* cur_operand =
1236 chunk()->zone()); 1174 second_range->CreateAssignedOperand(code_zone());
1237 move->AddMove(prev_operand, cur_operand, 1175 move->AddMove(prev_operand, cur_operand, code_zone());
1238 chunk()->zone());
1239 } 1176 }
1240 } 1177 }
1241 } 1178 }
1242 1179
1243 first_range = second_range; 1180 first_range = second_range;
1244 second_range = second_range->next(); 1181 second_range = second_range->next();
1245 } 1182 }
1246 } 1183 }
1247 } 1184 }
1248 1185
1249 1186
1250 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const { 1187 bool RegisterAllocator::CanEagerlyResolveControlFlow(BasicBlock* block) const {
1251 if (block->predecessors()->length() != 1) return false; 1188 if (block->PredecessorCount() != 1) return false;
1252 return block->predecessors()->first()->block_id() == block->block_id() - 1; 1189 return block->PredecessorAt(0)->rpo_number_ == block->rpo_number_ - 1;
1253 } 1190 }
1254 1191
1255 1192
1256 void LAllocator::ResolveControlFlow() { 1193 void RegisterAllocator::ResolveControlFlow() {
1257 LAllocatorPhase phase("L_Resolve control flow", this); 1194 RegisterAllocatorPhase phase("L_Resolve control flow", this);
1258 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1195 for (int block_id = 1; block_id < code()->BasicBlockCount(); ++block_id) {
1259 for (int block_id = 1; block_id < blocks->length(); ++block_id) { 1196 BasicBlock* block = code()->BlockAt(block_id);
1260 HBasicBlock* block = blocks->at(block_id);
1261 if (CanEagerlyResolveControlFlow(block)) continue; 1197 if (CanEagerlyResolveControlFlow(block)) continue;
1262 BitVector* live = live_in_sets_[block->block_id()]; 1198 BitVector* live = live_in_sets_[block->rpo_number_];
1263 BitVector::Iterator iterator(live); 1199 BitVector::Iterator iterator(live);
1264 while (!iterator.Done()) { 1200 while (!iterator.Done()) {
1265 int operand_index = iterator.Current(); 1201 int operand_index = iterator.Current();
1266 for (int i = 0; i < block->predecessors()->length(); ++i) { 1202 BasicBlock::Predecessors predecessors = block->predecessors();
1267 HBasicBlock* cur = block->predecessors()->at(i); 1203 for (BasicBlock::Predecessors::iterator i = predecessors.begin();
1204 i != predecessors.end(); ++i) {
1205 BasicBlock* cur = *i;
1268 LiveRange* cur_range = LiveRangeFor(operand_index); 1206 LiveRange* cur_range = LiveRangeFor(operand_index);
1269 ResolveControlFlow(cur_range, block, cur); 1207 ResolveControlFlow(cur_range, block, cur);
1270 } 1208 }
1271 iterator.Advance(); 1209 iterator.Advance();
1272 } 1210 }
1273 } 1211 }
1274 } 1212 }
1275 1213
1276 1214
1277 void LAllocator::BuildLiveRanges() { 1215 void RegisterAllocator::BuildLiveRanges() {
1278 LAllocatorPhase phase("L_Build live ranges", this); 1216 RegisterAllocatorPhase phase("L_Build live ranges", this);
1279 InitializeLivenessAnalysis(); 1217 InitializeLivenessAnalysis();
1280 // Process the blocks in reverse order. 1218 // Process the blocks in reverse order.
1281 const ZoneList<HBasicBlock*>* blocks = graph_->blocks(); 1219 for (int block_id = code()->BasicBlockCount() - 1; block_id >= 0;
1282 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) { 1220 --block_id) {
1283 HBasicBlock* block = blocks->at(block_id); 1221 BasicBlock* block = code()->BlockAt(block_id);
1284 BitVector* live = ComputeLiveOut(block); 1222 BitVector* live = ComputeLiveOut(block);
1285 // Initially consider all live_out values live for the entire block. We 1223 // Initially consider all live_out values live for the entire block. We
1286 // will shorten these intervals if necessary. 1224 // will shorten these intervals if necessary.
1287 AddInitialIntervals(block, live); 1225 AddInitialIntervals(block, live);
1288 1226
1289 // Process the instructions in reverse order, generating and killing 1227 // Process the instructions in reverse order, generating and killing
1290 // live values. 1228 // live values.
1291 ProcessInstructions(block, live); 1229 ProcessInstructions(block, live);
1292 // All phi output operands are killed by this block. 1230 // All phi output operands are killed by this block.
1293 const ZoneList<HPhi*>* phis = block->phis(); 1231 for (BasicBlock::const_iterator i = block->begin(); i != block->end();
1294 for (int i = 0; i < phis->length(); ++i) { 1232 ++i) {
1233 Node* phi = *i;
1234 if (phi->opcode() != IrOpcode::kPhi) continue;
1235
1295 // The live range interval already ends at the first instruction of the 1236 // The live range interval already ends at the first instruction of the
1296 // block. 1237 // block.
1297 HPhi* phi = phis->at(i);
1298 live->Remove(phi->id()); 1238 live->Remove(phi->id());
1299 1239
1300 LOperand* hint = NULL; 1240 InstructionOperand* hint = NULL;
1301 LOperand* phi_operand = NULL; 1241 InstructionOperand* phi_operand = NULL;
1302 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); 1242 GapInstruction* gap = GetLastGap(block->PredecessorAt(0));
1303 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, 1243
1304 chunk()->zone()); 1244 // TODO(titzer): no need to create the parallel move if it doesn't exit.
1245 ParallelMove* move =
1246 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone());
1305 for (int j = 0; j < move->move_operands()->length(); ++j) { 1247 for (int j = 0; j < move->move_operands()->length(); ++j) {
1306 LOperand* to = move->move_operands()->at(j).destination(); 1248 InstructionOperand* to = move->move_operands()->at(j).destination();
1307 if (to->IsUnallocated() && 1249 if (to->IsUnallocated() &&
1308 LUnallocated::cast(to)->virtual_register() == phi->id()) { 1250 UnallocatedOperand::cast(to)->virtual_register() == phi->id()) {
1309 hint = move->move_operands()->at(j).source(); 1251 hint = move->move_operands()->at(j).source();
1310 phi_operand = to; 1252 phi_operand = to;
1311 break; 1253 break;
1312 } 1254 }
1313 } 1255 }
1314 ASSERT(hint != NULL); 1256 ASSERT(hint != NULL);
1315 1257
1316 LifetimePosition block_start = LifetimePosition::FromInstructionIndex( 1258 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1317 block->first_instruction_index()); 1259 block->first_instruction_index());
1318 Define(block_start, phi_operand, hint); 1260 Define(block_start, phi_operand, hint);
1319 } 1261 }
1320 1262
1321 // Now live is live_in for this block except not including values live 1263 // Now live is live_in for this block except not including values live
1322 // out on backward successor edges. 1264 // out on backward successor edges.
1323 live_in_sets_[block_id] = live; 1265 live_in_sets_[block_id] = live;
1324 1266
1325 // If this block is a loop header go back and patch up the necessary
1326 // predecessor blocks.
1327 if (block->IsLoopHeader()) { 1267 if (block->IsLoopHeader()) {
1328 // TODO(kmillikin): Need to be able to get the last block of the loop 1268 // Add a live range stretching from the first loop instruction to the last
1329 // in the loop information. Add a live range stretching from the first 1269 // for each value live on entry to the header.
1330 // loop instruction to the last for each value live on entry to the
1331 // header.
1332 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1333 BitVector::Iterator iterator(live); 1270 BitVector::Iterator iterator(live);
1334 LifetimePosition start = LifetimePosition::FromInstructionIndex( 1271 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1335 block->first_instruction_index()); 1272 block->first_instruction_index());
1336 LifetimePosition end = LifetimePosition::FromInstructionIndex( 1273 int end_index =
1337 back_edge->last_instruction_index()).NextInstruction(); 1274 code()->BlockAt(block->loop_end_)->last_instruction_index();
1275 LifetimePosition end =
1276 LifetimePosition::FromInstructionIndex(end_index).NextInstruction();
1338 while (!iterator.Done()) { 1277 while (!iterator.Done()) {
1339 int operand_index = iterator.Current(); 1278 int operand_index = iterator.Current();
1340 LiveRange* range = LiveRangeFor(operand_index); 1279 LiveRange* range = LiveRangeFor(operand_index);
1341 range->EnsureInterval(start, end, zone()); 1280 range->EnsureInterval(start, end, zone());
1342 iterator.Advance(); 1281 iterator.Advance();
1343 } 1282 }
1344 1283
1345 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { 1284 // Insert all values into the live in sets of all blocks in the loop.
1285 for (int i = block->rpo_number_ + 1; i < block->loop_end_; ++i) {
1346 live_in_sets_[i]->Union(*live); 1286 live_in_sets_[i]->Union(*live);
1347 } 1287 }
1348 } 1288 }
1349 1289
1350 #ifdef DEBUG 1290 #ifdef DEBUG
1351 if (block_id == 0) { 1291 if (block_id == 0) {
1352 BitVector::Iterator iterator(live); 1292 BitVector::Iterator iterator(live);
1353 bool found = false; 1293 bool found = false;
1354 while (!iterator.Done()) { 1294 while (!iterator.Done()) {
1355 found = true; 1295 found = true;
1356 int operand_index = iterator.Current(); 1296 int operand_index = iterator.Current();
1357 if (chunk_->info()->IsStub()) { 1297 PrintF("Register allocator error: live v%d reached first block.\n",
1358 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey(); 1298 operand_index);
1359 PrintF("Function: %s\n", CodeStub::MajorName(major_key, false)); 1299 LiveRange* range = LiveRangeFor(operand_index);
1300 PrintF(" (first use is at %d)\n", range->first_pos()->pos().Value());
1301 CompilationInfo* info = code()->linkage()->info();
1302 if (info->IsStub()) {
1303 if (info->code_stub() == NULL) {
1304 PrintF("\n");
1305 } else {
1306 CodeStub::Major major_key = info->code_stub()->MajorKey();
1307 PrintF(" (function: %s)\n", CodeStub::MajorName(major_key, false));
1308 }
1360 } else { 1309 } else {
1361 ASSERT(chunk_->info()->IsOptimizing()); 1310 ASSERT(info->IsOptimizing());
1362 AllowHandleDereference allow_deref; 1311 AllowHandleDereference allow_deref;
1363 PrintF("Function: %s\n", 1312 PrintF(" (function: %s)\n",
1364 chunk_->info()->function()->debug_name()->ToCString().get()); 1313 info->function()->debug_name()->ToCString().get());
1365 } 1314 }
1366 PrintF("Value %d used before first definition!\n", operand_index);
1367 LiveRange* range = LiveRangeFor(operand_index);
1368 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1369 iterator.Advance(); 1315 iterator.Advance();
1370 } 1316 }
1371 ASSERT(!found); 1317 ASSERT(!found);
1372 } 1318 }
1373 #endif 1319 #endif
1374 } 1320 }
1375 1321
1376 for (int i = 0; i < live_ranges_.length(); ++i) { 1322 for (int i = 0; i < live_ranges_.length(); ++i) {
1377 if (live_ranges_[i] != NULL) { 1323 if (live_ranges_[i] != NULL) {
1378 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id()); 1324 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1325
1326 // TODO(bmeurer): This is a horrible hack to make sure that for constant
1327 // live ranges, every use requires the constant to be in a register.
1328 // Without this hack, all uses with "any" policy would get the constant
1329 // operand assigned.
1330 LiveRange* range = live_ranges_[i];
1331 if (range->HasAllocatedSpillOperand() &&
1332 range->GetSpillOperand()->IsConstant()) {
1333 for (UsePosition* pos = range->first_pos(); pos != NULL;
1334 pos = pos->next_) {
1335 pos->register_beneficial_ = true;
1336 pos->requires_reg_ = true;
1337 }
1338 }
1379 } 1339 }
1380 } 1340 }
1381 } 1341 }
1382 1342
1383 1343
1384 bool LAllocator::SafePointsAreInOrder() const { 1344 bool RegisterAllocator::SafePointsAreInOrder() const {
1385 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1386 int safe_point = 0; 1345 int safe_point = 0;
1387 for (int i = 0; i < pointer_maps->length(); ++i) { 1346 const PointerMapDeque* pointer_maps = code()->pointer_maps();
1388 LPointerMap* map = pointer_maps->at(i); 1347 for (PointerMapDeque::const_iterator it = pointer_maps->begin();
1389 if (safe_point > map->lithium_position()) return false; 1348 it != pointer_maps->end(); ++it) {
1390 safe_point = map->lithium_position(); 1349 PointerMap* map = *it;
1350 if (safe_point > map->instruction_position()) return false;
1351 safe_point = map->instruction_position();
1391 } 1352 }
1392 return true; 1353 return true;
1393 } 1354 }
1394 1355
1395 1356
1396 void LAllocator::PopulatePointerMaps() { 1357 void RegisterAllocator::PopulatePointerMaps() {
1397 LAllocatorPhase phase("L_Populate pointer maps", this); 1358 RegisterAllocatorPhase phase("L_Populate pointer maps", this);
1398 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1399 1359
1400 ASSERT(SafePointsAreInOrder()); 1360 ASSERT(SafePointsAreInOrder());
1401 1361
1402 // Iterate over all safe point positions and record a pointer 1362 // Iterate over all safe point positions and record a pointer
1403 // for all spilled live ranges at this point. 1363 // for all spilled live ranges at this point.
1404 int first_safe_point_index = 0;
1405 int last_range_start = 0; 1364 int last_range_start = 0;
1365 const PointerMapDeque* pointer_maps = code()->pointer_maps();
1366 PointerMapDeque::const_iterator first_it = pointer_maps->begin();
1406 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) { 1367 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1407 LiveRange* range = live_ranges()->at(range_idx); 1368 LiveRange* range = live_ranges()->at(range_idx);
1408 if (range == NULL) continue; 1369 if (range == NULL) continue;
1409 // Iterate over the first parts of multi-part live ranges. 1370 // Iterate over the first parts of multi-part live ranges.
1410 if (range->parent() != NULL) continue; 1371 if (range->parent() != NULL) continue;
1411 // Skip non-pointer values. 1372 // Skip non-reference values.
1412 if (!HasTaggedValue(range->id())) continue; 1373 if (!HasTaggedValue(range->id())) continue;
1413 // Skip empty live ranges. 1374 // Skip empty live ranges.
1414 if (range->IsEmpty()) continue; 1375 if (range->IsEmpty()) continue;
1415 1376
1416 // Find the extent of the range and its children. 1377 // Find the extent of the range and its children.
1417 int start = range->Start().InstructionIndex(); 1378 int start = range->Start().InstructionIndex();
1418 int end = 0; 1379 int end = 0;
1419 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) { 1380 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1420 LifetimePosition this_end = cur->End(); 1381 LifetimePosition this_end = cur->End();
1421 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex(); 1382 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1422 ASSERT(cur->Start().InstructionIndex() >= start); 1383 ASSERT(cur->Start().InstructionIndex() >= start);
1423 } 1384 }
1424 1385
1425 // Most of the ranges are in order, but not all. Keep an eye on when 1386 // Most of the ranges are in order, but not all. Keep an eye on when they
1426 // they step backwards and reset the first_safe_point_index so we don't 1387 // step backwards and reset the first_it so we don't miss any safe points.
1427 // miss any safe points. 1388 if (start < last_range_start) first_it = pointer_maps->begin();
1428 if (start < last_range_start) {
1429 first_safe_point_index = 0;
1430 }
1431 last_range_start = start; 1389 last_range_start = start;
1432 1390
1433 // Step across all the safe points that are before the start of this range, 1391 // Step across all the safe points that are before the start of this range,
1434 // recording how far we step in order to save doing this for the next range. 1392 // recording how far we step in order to save doing this for the next range.
1435 while (first_safe_point_index < pointer_maps->length()) { 1393 for (; first_it != pointer_maps->end(); ++first_it) {
1436 LPointerMap* map = pointer_maps->at(first_safe_point_index); 1394 PointerMap* map = *first_it;
1437 int safe_point = map->lithium_position(); 1395 if (map->instruction_position() >= start) break;
1438 if (safe_point >= start) break;
1439 first_safe_point_index++;
1440 } 1396 }
1441 1397
1442 // Step through the safe points to see whether they are in the range. 1398 // Step through the safe points to see whether they are in the range.
1443 for (int safe_point_index = first_safe_point_index; 1399 for (PointerMapDeque::const_iterator it = first_it;
1444 safe_point_index < pointer_maps->length(); 1400 it != pointer_maps->end(); ++it) {
1445 ++safe_point_index) { 1401 PointerMap* map = *it;
1446 LPointerMap* map = pointer_maps->at(safe_point_index); 1402 int safe_point = map->instruction_position();
1447 int safe_point = map->lithium_position();
1448 1403
1449 // The safe points are sorted so we can stop searching here. 1404 // The safe points are sorted so we can stop searching here.
1450 if (safe_point - 1 > end) break; 1405 if (safe_point - 1 > end) break;
1451 1406
1452 // Advance to the next active range that covers the current 1407 // Advance to the next active range that covers the current
1453 // safe point position. 1408 // safe point position.
1454 LifetimePosition safe_point_pos = 1409 LifetimePosition safe_point_pos =
1455 LifetimePosition::FromInstructionIndex(safe_point); 1410 LifetimePosition::FromInstructionIndex(safe_point);
1456 LiveRange* cur = range; 1411 LiveRange* cur = range;
1457 while (cur != NULL && !cur->Covers(safe_point_pos)) { 1412 while (cur != NULL && !cur->Covers(safe_point_pos)) {
1458 cur = cur->next(); 1413 cur = cur->next();
1459 } 1414 }
1460 if (cur == NULL) continue; 1415 if (cur == NULL) continue;
1461 1416
1462 // Check if the live range is spilled and the safe point is after 1417 // Check if the live range is spilled and the safe point is after
1463 // the spill position. 1418 // the spill position.
1464 if (range->HasAllocatedSpillOperand() && 1419 if (range->HasAllocatedSpillOperand() &&
1465 safe_point >= range->spill_start_index()) { 1420 safe_point >= range->spill_start_index() &&
1421 !range->GetSpillOperand()->IsConstant()) {
1466 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", 1422 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1467 range->id(), range->spill_start_index(), safe_point); 1423 range->id(), range->spill_start_index(), safe_point);
1468 map->RecordPointer(range->GetSpillOperand(), chunk()->zone()); 1424 map->RecordPointer(range->GetSpillOperand(), code_zone());
1469 } 1425 }
1470 1426
1471 if (!cur->IsSpilled()) { 1427 if (!cur->IsSpilled()) {
1472 TraceAlloc("Pointer in register for range %d (start at %d) " 1428 TraceAlloc(
1473 "at safe point %d\n", 1429 "Pointer in register for range %d (start at %d) "
1474 cur->id(), cur->Start().Value(), safe_point); 1430 "at safe point %d\n",
1475 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone()); 1431 cur->id(), cur->Start().Value(), safe_point);
1432 InstructionOperand* operand = cur->CreateAssignedOperand(code_zone());
1476 ASSERT(!operand->IsStackSlot()); 1433 ASSERT(!operand->IsStackSlot());
1477 map->RecordPointer(operand, chunk()->zone()); 1434 map->RecordPointer(operand, code_zone());
1478 } 1435 }
1479 } 1436 }
1480 } 1437 }
1481 } 1438 }
1482 1439
1483 1440
1484 void LAllocator::AllocateGeneralRegisters() { 1441 void RegisterAllocator::AllocateGeneralRegisters() {
1485 LAllocatorPhase phase("L_Allocate general registers", this); 1442 RegisterAllocatorPhase phase("L_Allocate general registers", this);
1486 num_registers_ = Register::NumAllocatableRegisters(); 1443 num_registers_ = Register::NumAllocatableRegisters();
1487 mode_ = GENERAL_REGISTERS; 1444 mode_ = GENERAL_REGISTERS;
1488 AllocateRegisters(); 1445 AllocateRegisters();
1489 } 1446 }
1490 1447
1491 1448
1492 void LAllocator::AllocateDoubleRegisters() { 1449 void RegisterAllocator::AllocateDoubleRegisters() {
1493 LAllocatorPhase phase("L_Allocate double registers", this); 1450 RegisterAllocatorPhase phase("L_Allocate double registers", this);
1494 num_registers_ = DoubleRegister::NumAllocatableRegisters(); 1451 num_registers_ = DoubleRegister::NumAllocatableRegisters();
1495 mode_ = DOUBLE_REGISTERS; 1452 mode_ = DOUBLE_REGISTERS;
1496 AllocateRegisters(); 1453 AllocateRegisters();
1497 } 1454 }
1498 1455
1499 1456
1500 void LAllocator::AllocateRegisters() { 1457 void RegisterAllocator::AllocateRegisters() {
1501 ASSERT(unhandled_live_ranges_.is_empty()); 1458 ASSERT(unhandled_live_ranges_.is_empty());
1502 1459
1503 for (int i = 0; i < live_ranges_.length(); ++i) { 1460 for (int i = 0; i < live_ranges_.length(); ++i) {
1504 if (live_ranges_[i] != NULL) { 1461 if (live_ranges_[i] != NULL) {
1505 if (live_ranges_[i]->Kind() == mode_) { 1462 if (live_ranges_[i]->Kind() == mode_) {
1506 AddToUnhandledUnsorted(live_ranges_[i]); 1463 AddToUnhandledUnsorted(live_ranges_[i]);
1507 } 1464 }
1508 } 1465 }
1509 } 1466 }
1510 SortUnhandled(); 1467 SortUnhandled();
(...skipping 21 matching lines...) Expand all
1532 } 1489 }
1533 1490
1534 while (!unhandled_live_ranges_.is_empty()) { 1491 while (!unhandled_live_ranges_.is_empty()) {
1535 ASSERT(UnhandledIsSorted()); 1492 ASSERT(UnhandledIsSorted());
1536 LiveRange* current = unhandled_live_ranges_.RemoveLast(); 1493 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1537 ASSERT(UnhandledIsSorted()); 1494 ASSERT(UnhandledIsSorted());
1538 LifetimePosition position = current->Start(); 1495 LifetimePosition position = current->Start();
1539 #ifdef DEBUG 1496 #ifdef DEBUG
1540 allocation_finger_ = position; 1497 allocation_finger_ = position;
1541 #endif 1498 #endif
1542 TraceAlloc("Processing interval %d start=%d\n", 1499 TraceAlloc("Processing interval %d start=%d\n", current->id(),
1543 current->id(),
1544 position.Value()); 1500 position.Value());
1545 1501
1546 if (current->HasAllocatedSpillOperand()) { 1502 if (current->HasAllocatedSpillOperand()) {
1547 TraceAlloc("Live range %d already has a spill operand\n", current->id()); 1503 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1548 LifetimePosition next_pos = position; 1504 LifetimePosition next_pos = position;
1549 if (IsGapAt(next_pos.InstructionIndex())) { 1505 if (code()->IsGapAt(next_pos.InstructionIndex())) {
1550 next_pos = next_pos.NextInstruction(); 1506 next_pos = next_pos.NextInstruction();
1551 } 1507 }
1552 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos); 1508 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1553 // If the range already has a spill operand and it doesn't need a 1509 // If the range already has a spill operand and it doesn't need a
1554 // register immediately, split it and spill the first part of the range. 1510 // register immediately, split it and spill the first part of the range.
1555 if (pos == NULL) { 1511 if (pos == NULL) {
1556 Spill(current); 1512 Spill(current);
1557 continue; 1513 continue;
1558 } else if (pos->pos().Value() > 1514 } else if (pos->pos().Value() >
1559 current->Start().NextInstruction().Value()) { 1515 current->Start().NextInstruction().Value()) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
1600 AddToActive(current); 1556 AddToActive(current);
1601 } 1557 }
1602 } 1558 }
1603 1559
1604 reusable_slots_.Rewind(0); 1560 reusable_slots_.Rewind(0);
1605 active_live_ranges_.Rewind(0); 1561 active_live_ranges_.Rewind(0);
1606 inactive_live_ranges_.Rewind(0); 1562 inactive_live_ranges_.Rewind(0);
1607 } 1563 }
1608 1564
1609 1565
1610 const char* LAllocator::RegisterName(int allocation_index) { 1566 const char* RegisterAllocator::RegisterName(int allocation_index) {
1611 if (mode_ == GENERAL_REGISTERS) { 1567 if (mode_ == GENERAL_REGISTERS) {
1612 return Register::AllocationIndexToString(allocation_index); 1568 return Register::AllocationIndexToString(allocation_index);
1613 } else { 1569 } else {
1614 return DoubleRegister::AllocationIndexToString(allocation_index); 1570 return DoubleRegister::AllocationIndexToString(allocation_index);
1615 } 1571 }
1616 } 1572 }
1617 1573
1618 1574
1619 void LAllocator::TraceAlloc(const char* msg, ...) { 1575 void RegisterAllocator::TraceAlloc(const char* msg, ...) {
1620 if (FLAG_trace_alloc) { 1576 if (FLAG_trace_alloc) {
1621 va_list arguments; 1577 va_list arguments;
1622 va_start(arguments, msg); 1578 va_start(arguments, msg);
1623 base::OS::VPrint(msg, arguments); 1579 base::OS::VPrint(msg, arguments);
1624 va_end(arguments); 1580 va_end(arguments);
1625 } 1581 }
1626 } 1582 }
1627 1583
1628 1584
1629 bool LAllocator::HasTaggedValue(int virtual_register) const { 1585 bool RegisterAllocator::HasTaggedValue(int virtual_register) const {
1630 HValue* value = graph_->LookupValue(virtual_register); 1586 return code()->IsReference(virtual_register);
1631 if (value == NULL) return false;
1632 return value->representation().IsTagged() && !value->type().IsSmi();
1633 } 1587 }
1634 1588
1635 1589
1636 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const { 1590 RegisterKind RegisterAllocator::RequiredRegisterKind(
1637 if (virtual_register < first_artificial_register_) { 1591 int virtual_register) const {
1638 HValue* value = graph_->LookupValue(virtual_register); 1592 return (code()->IsDouble(virtual_register)) ? DOUBLE_REGISTERS
1639 if (value != NULL && value->representation().IsDouble()) { 1593 : GENERAL_REGISTERS;
1640 return DOUBLE_REGISTERS;
1641 }
1642 } else if (double_artificial_registers_.Contains(
1643 virtual_register - first_artificial_register_)) {
1644 return DOUBLE_REGISTERS;
1645 }
1646
1647 return GENERAL_REGISTERS;
1648 } 1594 }
1649 1595
1650 1596
1651 void LAllocator::AddToActive(LiveRange* range) { 1597 void RegisterAllocator::AddToActive(LiveRange* range) {
1652 TraceAlloc("Add live range %d to active\n", range->id()); 1598 TraceAlloc("Add live range %d to active\n", range->id());
1653 active_live_ranges_.Add(range, zone()); 1599 active_live_ranges_.Add(range, zone());
1654 } 1600 }
1655 1601
1656 1602
1657 void LAllocator::AddToInactive(LiveRange* range) { 1603 void RegisterAllocator::AddToInactive(LiveRange* range) {
1658 TraceAlloc("Add live range %d to inactive\n", range->id()); 1604 TraceAlloc("Add live range %d to inactive\n", range->id());
1659 inactive_live_ranges_.Add(range, zone()); 1605 inactive_live_ranges_.Add(range, zone());
1660 } 1606 }
1661 1607
1662 1608
1663 void LAllocator::AddToUnhandledSorted(LiveRange* range) { 1609 void RegisterAllocator::AddToUnhandledSorted(LiveRange* range) {
1664 if (range == NULL || range->IsEmpty()) return; 1610 if (range == NULL || range->IsEmpty()) return;
1665 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1611 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1666 ASSERT(allocation_finger_.Value() <= range->Start().Value()); 1612 ASSERT(allocation_finger_.Value() <= range->Start().Value());
1667 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) { 1613 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1668 LiveRange* cur_range = unhandled_live_ranges_.at(i); 1614 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1669 if (range->ShouldBeAllocatedBefore(cur_range)) { 1615 if (range->ShouldBeAllocatedBefore(cur_range)) {
1670 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1); 1616 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1671 unhandled_live_ranges_.InsertAt(i + 1, range, zone()); 1617 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1672 ASSERT(UnhandledIsSorted()); 1618 ASSERT(UnhandledIsSorted());
1673 return; 1619 return;
1674 } 1620 }
1675 } 1621 }
1676 TraceAlloc("Add live range %d to unhandled at start\n", range->id()); 1622 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1677 unhandled_live_ranges_.InsertAt(0, range, zone()); 1623 unhandled_live_ranges_.InsertAt(0, range, zone());
1678 ASSERT(UnhandledIsSorted()); 1624 ASSERT(UnhandledIsSorted());
1679 } 1625 }
1680 1626
1681 1627
1682 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) { 1628 void RegisterAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1683 if (range == NULL || range->IsEmpty()) return; 1629 if (range == NULL || range->IsEmpty()) return;
1684 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled()); 1630 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1685 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id()); 1631 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1686 unhandled_live_ranges_.Add(range, zone()); 1632 unhandled_live_ranges_.Add(range, zone());
1687 } 1633 }
1688 1634
1689 1635
1690 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) { 1636 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1691 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) || 1637 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1692 !(*b)->ShouldBeAllocatedBefore(*a)); 1638 !(*b)->ShouldBeAllocatedBefore(*a));
1693 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1; 1639 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1694 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1; 1640 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1695 return (*a)->id() - (*b)->id(); 1641 return (*a)->id() - (*b)->id();
1696 } 1642 }
1697 1643
1698 1644
1699 // Sort the unhandled live ranges so that the ranges to be processed first are 1645 // Sort the unhandled live ranges so that the ranges to be processed first are
1700 // at the end of the array list. This is convenient for the register allocation 1646 // at the end of the array list. This is convenient for the register allocation
1701 // algorithm because it is efficient to remove elements from the end. 1647 // algorithm because it is efficient to remove elements from the end.
1702 void LAllocator::SortUnhandled() { 1648 void RegisterAllocator::SortUnhandled() {
1703 TraceAlloc("Sort unhandled\n"); 1649 TraceAlloc("Sort unhandled\n");
1704 unhandled_live_ranges_.Sort(&UnhandledSortHelper); 1650 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1705 } 1651 }
1706 1652
1707 1653
1708 bool LAllocator::UnhandledIsSorted() { 1654 bool RegisterAllocator::UnhandledIsSorted() {
1709 int len = unhandled_live_ranges_.length(); 1655 int len = unhandled_live_ranges_.length();
1710 for (int i = 1; i < len; i++) { 1656 for (int i = 1; i < len; i++) {
1711 LiveRange* a = unhandled_live_ranges_.at(i - 1); 1657 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1712 LiveRange* b = unhandled_live_ranges_.at(i); 1658 LiveRange* b = unhandled_live_ranges_.at(i);
1713 if (a->Start().Value() < b->Start().Value()) return false; 1659 if (a->Start().Value() < b->Start().Value()) return false;
1714 } 1660 }
1715 return true; 1661 return true;
1716 } 1662 }
1717 1663
1718 1664
1719 void LAllocator::FreeSpillSlot(LiveRange* range) { 1665 void RegisterAllocator::FreeSpillSlot(LiveRange* range) {
1720 // Check that we are the last range. 1666 // Check that we are the last range.
1721 if (range->next() != NULL) return; 1667 if (range->next() != NULL) return;
1722 1668
1723 if (!range->TopLevel()->HasAllocatedSpillOperand()) return; 1669 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1724 1670
1725 int index = range->TopLevel()->GetSpillOperand()->index(); 1671 InstructionOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1726 if (index >= 0) { 1672 if (spill_operand->IsConstant()) return;
1673 if (spill_operand->index() >= 0) {
1727 reusable_slots_.Add(range, zone()); 1674 reusable_slots_.Add(range, zone());
1728 } 1675 }
1729 } 1676 }
1730 1677
1731 1678
1732 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) { 1679 InstructionOperand* RegisterAllocator::TryReuseSpillSlot(LiveRange* range) {
1733 if (reusable_slots_.is_empty()) return NULL; 1680 if (reusable_slots_.is_empty()) return NULL;
1734 if (reusable_slots_.first()->End().Value() > 1681 if (reusable_slots_.first()->End().Value() >
1735 range->TopLevel()->Start().Value()) { 1682 range->TopLevel()->Start().Value()) {
1736 return NULL; 1683 return NULL;
1737 } 1684 }
1738 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand(); 1685 InstructionOperand* result =
1686 reusable_slots_.first()->TopLevel()->GetSpillOperand();
1739 reusable_slots_.Remove(0); 1687 reusable_slots_.Remove(0);
1740 return result; 1688 return result;
1741 } 1689 }
1742 1690
1743 1691
1744 void LAllocator::ActiveToHandled(LiveRange* range) { 1692 void RegisterAllocator::ActiveToHandled(LiveRange* range) {
1745 ASSERT(active_live_ranges_.Contains(range)); 1693 ASSERT(active_live_ranges_.Contains(range));
1746 active_live_ranges_.RemoveElement(range); 1694 active_live_ranges_.RemoveElement(range);
1747 TraceAlloc("Moving live range %d from active to handled\n", range->id()); 1695 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1748 FreeSpillSlot(range); 1696 FreeSpillSlot(range);
1749 } 1697 }
1750 1698
1751 1699
1752 void LAllocator::ActiveToInactive(LiveRange* range) { 1700 void RegisterAllocator::ActiveToInactive(LiveRange* range) {
1753 ASSERT(active_live_ranges_.Contains(range)); 1701 ASSERT(active_live_ranges_.Contains(range));
1754 active_live_ranges_.RemoveElement(range); 1702 active_live_ranges_.RemoveElement(range);
1755 inactive_live_ranges_.Add(range, zone()); 1703 inactive_live_ranges_.Add(range, zone());
1756 TraceAlloc("Moving live range %d from active to inactive\n", range->id()); 1704 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1757 } 1705 }
1758 1706
1759 1707
1760 void LAllocator::InactiveToHandled(LiveRange* range) { 1708 void RegisterAllocator::InactiveToHandled(LiveRange* range) {
1761 ASSERT(inactive_live_ranges_.Contains(range)); 1709 ASSERT(inactive_live_ranges_.Contains(range));
1762 inactive_live_ranges_.RemoveElement(range); 1710 inactive_live_ranges_.RemoveElement(range);
1763 TraceAlloc("Moving live range %d from inactive to handled\n", range->id()); 1711 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1764 FreeSpillSlot(range); 1712 FreeSpillSlot(range);
1765 } 1713 }
1766 1714
1767 1715
1768 void LAllocator::InactiveToActive(LiveRange* range) { 1716 void RegisterAllocator::InactiveToActive(LiveRange* range) {
1769 ASSERT(inactive_live_ranges_.Contains(range)); 1717 ASSERT(inactive_live_ranges_.Contains(range));
1770 inactive_live_ranges_.RemoveElement(range); 1718 inactive_live_ranges_.RemoveElement(range);
1771 active_live_ranges_.Add(range, zone()); 1719 active_live_ranges_.Add(range, zone());
1772 TraceAlloc("Moving live range %d from inactive to active\n", range->id()); 1720 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1773 } 1721 }
1774 1722
1775 1723
1776 // TryAllocateFreeReg and AllocateBlockedReg assume this 1724 // TryAllocateFreeReg and AllocateBlockedReg assume this
1777 // when allocating local arrays. 1725 // when allocating local arrays.
1778 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >= 1726 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
1779 Register::kMaxNumAllocatableRegisters); 1727 Register::kMaxNumAllocatableRegisters);
1780 1728
1781 1729
1782 bool LAllocator::TryAllocateFreeReg(LiveRange* current) { 1730 bool RegisterAllocator::TryAllocateFreeReg(LiveRange* current) {
1783 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1731 LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1784 1732
1785 for (int i = 0; i < num_registers_; i++) { 1733 for (int i = 0; i < num_registers_; i++) {
1786 free_until_pos[i] = LifetimePosition::MaxPosition(); 1734 free_until_pos[i] = LifetimePosition::MaxPosition();
1787 } 1735 }
1788 1736
1789 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1737 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1790 LiveRange* cur_active = active_live_ranges_.at(i); 1738 LiveRange* cur_active = active_live_ranges_.at(i);
1791 free_until_pos[cur_active->assigned_register()] = 1739 free_until_pos[cur_active->assigned_register()] =
1792 LifetimePosition::FromInstructionIndex(0); 1740 LifetimePosition::FromInstructionIndex(0);
1793 } 1741 }
1794 1742
1795 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1743 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1796 LiveRange* cur_inactive = inactive_live_ranges_.at(i); 1744 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1797 ASSERT(cur_inactive->End().Value() > current->Start().Value()); 1745 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1798 LifetimePosition next_intersection = 1746 LifetimePosition next_intersection =
1799 cur_inactive->FirstIntersection(current); 1747 cur_inactive->FirstIntersection(current);
1800 if (!next_intersection.IsValid()) continue; 1748 if (!next_intersection.IsValid()) continue;
1801 int cur_reg = cur_inactive->assigned_register(); 1749 int cur_reg = cur_inactive->assigned_register();
1802 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection); 1750 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1803 } 1751 }
1804 1752
1805 LOperand* hint = current->FirstHint(); 1753 InstructionOperand* hint = current->FirstHint();
1806 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) { 1754 if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1807 int register_index = hint->index(); 1755 int register_index = hint->index();
1808 TraceAlloc( 1756 TraceAlloc(
1809 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n", 1757 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1810 RegisterName(register_index), 1758 RegisterName(register_index), free_until_pos[register_index].Value(),
1811 free_until_pos[register_index].Value(), 1759 current->id(), current->End().Value());
1812 current->id(),
1813 current->End().Value());
1814 1760
1815 // The desired register is free until the end of the current live range. 1761 // The desired register is free until the end of the current live range.
1816 if (free_until_pos[register_index].Value() >= current->End().Value()) { 1762 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1817 TraceAlloc("Assigning preferred reg %s to live range %d\n", 1763 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1818 RegisterName(register_index), 1764 RegisterName(register_index), current->id());
1819 current->id());
1820 SetLiveRangeAssignedRegister(current, register_index); 1765 SetLiveRangeAssignedRegister(current, register_index);
1821 return true; 1766 return true;
1822 } 1767 }
1823 } 1768 }
1824 1769
1825 // Find the register which stays free for the longest time. 1770 // Find the register which stays free for the longest time.
1826 int reg = 0; 1771 int reg = 0;
1827 for (int i = 1; i < RegisterCount(); ++i) { 1772 for (int i = 1; i < RegisterCount(); ++i) {
1828 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { 1773 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1829 reg = i; 1774 reg = i;
(...skipping 12 matching lines...) Expand all
1842 // the range end. Split current at position where it becomes blocked. 1787 // the range end. Split current at position where it becomes blocked.
1843 LiveRange* tail = SplitRangeAt(current, pos); 1788 LiveRange* tail = SplitRangeAt(current, pos);
1844 if (!AllocationOk()) return false; 1789 if (!AllocationOk()) return false;
1845 AddToUnhandledSorted(tail); 1790 AddToUnhandledSorted(tail);
1846 } 1791 }
1847 1792
1848 1793
1849 // Register reg is available at the range start and is free until 1794 // Register reg is available at the range start and is free until
1850 // the range end. 1795 // the range end.
1851 ASSERT(pos.Value() >= current->End().Value()); 1796 ASSERT(pos.Value() >= current->End().Value());
1852 TraceAlloc("Assigning free reg %s to live range %d\n", 1797 TraceAlloc("Assigning free reg %s to live range %d\n", RegisterName(reg),
1853 RegisterName(reg),
1854 current->id()); 1798 current->id());
1855 SetLiveRangeAssignedRegister(current, reg); 1799 SetLiveRangeAssignedRegister(current, reg);
1856 1800
1857 return true; 1801 return true;
1858 } 1802 }
1859 1803
1860 1804
1861 void LAllocator::AllocateBlockedReg(LiveRange* current) { 1805 void RegisterAllocator::AllocateBlockedReg(LiveRange* current) {
1862 UsePosition* register_use = current->NextRegisterPosition(current->Start()); 1806 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1863 if (register_use == NULL) { 1807 if (register_use == NULL) {
1864 // There is no use in the current live range that requires a register. 1808 // There is no use in the current live range that requires a register.
1865 // We can just spill it. 1809 // We can just spill it.
1866 Spill(current); 1810 Spill(current);
1867 return; 1811 return;
1868 } 1812 }
1869 1813
1870 1814
1871 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1815 LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1872 LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters]; 1816 LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1873 1817
1874 for (int i = 0; i < num_registers_; i++) { 1818 for (int i = 0; i < num_registers_; i++) {
1875 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition(); 1819 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1876 } 1820 }
1877 1821
1878 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1822 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1879 LiveRange* range = active_live_ranges_[i]; 1823 LiveRange* range = active_live_ranges_[i];
1880 int cur_reg = range->assigned_register(); 1824 int cur_reg = range->assigned_register();
1881 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { 1825 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1882 block_pos[cur_reg] = use_pos[cur_reg] = 1826 block_pos[cur_reg] = use_pos[cur_reg] =
1883 LifetimePosition::FromInstructionIndex(0); 1827 LifetimePosition::FromInstructionIndex(0);
1884 } else { 1828 } else {
1885 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial( 1829 UsePosition* next_use =
1886 current->Start()); 1830 range->NextUsePositionRegisterIsBeneficial(current->Start());
1887 if (next_use == NULL) { 1831 if (next_use == NULL) {
1888 use_pos[cur_reg] = range->End(); 1832 use_pos[cur_reg] = range->End();
1889 } else { 1833 } else {
1890 use_pos[cur_reg] = next_use->pos(); 1834 use_pos[cur_reg] = next_use->pos();
1891 } 1835 }
1892 } 1836 }
1893 } 1837 }
1894 1838
1895 for (int i = 0; i < inactive_live_ranges_.length(); ++i) { 1839 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1896 LiveRange* range = inactive_live_ranges_.at(i); 1840 LiveRange* range = inactive_live_ranges_.at(i);
(...skipping 21 matching lines...) Expand all
1918 if (pos.Value() < register_use->pos().Value()) { 1862 if (pos.Value() < register_use->pos().Value()) {
1919 // All registers are blocked before the first use that requires a register. 1863 // All registers are blocked before the first use that requires a register.
1920 // Spill starting part of live range up to that use. 1864 // Spill starting part of live range up to that use.
1921 SpillBetween(current, current->Start(), register_use->pos()); 1865 SpillBetween(current, current->Start(), register_use->pos());
1922 return; 1866 return;
1923 } 1867 }
1924 1868
1925 if (block_pos[reg].Value() < current->End().Value()) { 1869 if (block_pos[reg].Value() < current->End().Value()) {
1926 // Register becomes blocked before the current range end. Split before that 1870 // Register becomes blocked before the current range end. Split before that
1927 // position. 1871 // position.
1928 LiveRange* tail = SplitBetween(current, 1872 LiveRange* tail = SplitBetween(current, current->Start(),
1929 current->Start(),
1930 block_pos[reg].InstructionStart()); 1873 block_pos[reg].InstructionStart());
1931 if (!AllocationOk()) return; 1874 if (!AllocationOk()) return;
1932 AddToUnhandledSorted(tail); 1875 AddToUnhandledSorted(tail);
1933 } 1876 }
1934 1877
1935 // Register reg is not blocked for the whole range. 1878 // Register reg is not blocked for the whole range.
1936 ASSERT(block_pos[reg].Value() >= current->End().Value()); 1879 ASSERT(block_pos[reg].Value() >= current->End().Value());
1937 TraceAlloc("Assigning blocked reg %s to live range %d\n", 1880 TraceAlloc("Assigning blocked reg %s to live range %d\n", RegisterName(reg),
1938 RegisterName(reg),
1939 current->id()); 1881 current->id());
1940 SetLiveRangeAssignedRegister(current, reg); 1882 SetLiveRangeAssignedRegister(current, reg);
1941 1883
1942 // This register was not free. Thus we need to find and spill 1884 // This register was not free. Thus we need to find and spill
1943 // parts of active and inactive live regions that use the same register 1885 // parts of active and inactive live regions that use the same register
1944 // at the same lifetime positions as current. 1886 // at the same lifetime positions as current.
1945 SplitAndSpillIntersecting(current); 1887 SplitAndSpillIntersecting(current);
1946 } 1888 }
1947 1889
1948 1890
1949 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range, 1891 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
1950 LifetimePosition pos) { 1892 LiveRange* range, LifetimePosition pos) {
1951 HBasicBlock* block = GetBlock(pos.InstructionStart()); 1893 BasicBlock* block = GetBlock(pos.InstructionStart());
1952 HBasicBlock* loop_header = 1894 BasicBlock* loop_header =
1953 block->IsLoopHeader() ? block : block->parent_loop_header(); 1895 block->IsLoopHeader() ? block : code()->GetContainingLoop(block);
1954 1896
1955 if (loop_header == NULL) return pos; 1897 if (loop_header == NULL) return pos;
1956 1898
1957 UsePosition* prev_use = 1899 UsePosition* prev_use = range->PreviousUsePositionRegisterIsBeneficial(pos);
1958 range->PreviousUsePositionRegisterIsBeneficial(pos);
1959 1900
1960 while (loop_header != NULL) { 1901 while (loop_header != NULL) {
1961 // We are going to spill live range inside the loop. 1902 // We are going to spill live range inside the loop.
1962 // If possible try to move spilling position backwards to loop header. 1903 // If possible try to move spilling position backwards to loop header.
1963 // This will reduce number of memory moves on the back edge. 1904 // This will reduce number of memory moves on the back edge.
1964 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex( 1905 LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1965 loop_header->first_instruction_index()); 1906 loop_header->first_instruction_index());
1966 1907
1967 if (range->Covers(loop_start)) { 1908 if (range->Covers(loop_start)) {
1968 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) { 1909 if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1969 // No register beneficial use inside the loop before the pos. 1910 // No register beneficial use inside the loop before the pos.
1970 pos = loop_start; 1911 pos = loop_start;
1971 } 1912 }
1972 } 1913 }
1973 1914
1974 // Try hoisting out to an outer loop. 1915 // Try hoisting out to an outer loop.
1975 loop_header = loop_header->parent_loop_header(); 1916 loop_header = code()->GetContainingLoop(loop_header);
1976 } 1917 }
1977 1918
1978 return pos; 1919 return pos;
1979 } 1920 }
1980 1921
1981 1922
1982 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) { 1923 void RegisterAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1983 ASSERT(current->HasRegisterAssigned()); 1924 ASSERT(current->HasRegisterAssigned());
1984 int reg = current->assigned_register(); 1925 int reg = current->assigned_register();
1985 LifetimePosition split_pos = current->Start(); 1926 LifetimePosition split_pos = current->Start();
1986 for (int i = 0; i < active_live_ranges_.length(); ++i) { 1927 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1987 LiveRange* range = active_live_ranges_[i]; 1928 LiveRange* range = active_live_ranges_[i];
1988 if (range->assigned_register() == reg) { 1929 if (range->assigned_register() == reg) {
1989 UsePosition* next_pos = range->NextRegisterPosition(current->Start()); 1930 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1990 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos); 1931 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1991 if (next_pos == NULL) { 1932 if (next_pos == NULL) {
1992 SpillAfter(range, spill_pos); 1933 SpillAfter(range, spill_pos);
(...skipping 29 matching lines...) Expand all
2022 } 1963 }
2023 if (!AllocationOk()) return; 1964 if (!AllocationOk()) return;
2024 InactiveToHandled(range); 1965 InactiveToHandled(range);
2025 --i; 1966 --i;
2026 } 1967 }
2027 } 1968 }
2028 } 1969 }
2029 } 1970 }
2030 1971
2031 1972
2032 bool LAllocator::IsBlockBoundary(LifetimePosition pos) { 1973 bool RegisterAllocator::IsBlockBoundary(LifetimePosition pos) {
2033 return pos.IsInstructionStart() && 1974 return pos.IsInstructionStart() &&
2034 InstructionAt(pos.InstructionIndex())->IsLabel(); 1975 InstructionAt(pos.InstructionIndex())->IsBlockStart();
2035 } 1976 }
2036 1977
2037 1978
2038 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) { 1979 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
1980 LifetimePosition pos) {
2039 ASSERT(!range->IsFixed()); 1981 ASSERT(!range->IsFixed());
2040 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value()); 1982 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2041 1983
2042 if (pos.Value() <= range->Start().Value()) return range; 1984 if (pos.Value() <= range->Start().Value()) return range;
2043 1985
2044 // We can't properly connect liveranges if split occured at the end 1986 // We can't properly connect liveranges if split occured at the end
2045 // of control instruction. 1987 // of control instruction.
2046 ASSERT(pos.IsInstructionStart() || 1988 ASSERT(pos.IsInstructionStart() ||
2047 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); 1989 !InstructionAt(pos.InstructionIndex())->IsControl());
2048 1990
2049 int vreg = GetVirtualRegister(); 1991 int vreg = GetVirtualRegister();
2050 if (!AllocationOk()) return NULL; 1992 if (!AllocationOk()) return NULL;
2051 LiveRange* result = LiveRangeFor(vreg); 1993 LiveRange* result = LiveRangeFor(vreg);
2052 range->SplitAt(pos, result, zone()); 1994 range->SplitAt(pos, result, zone());
2053 return result; 1995 return result;
2054 } 1996 }
2055 1997
2056 1998
2057 LiveRange* LAllocator::SplitBetween(LiveRange* range, 1999 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2058 LifetimePosition start, 2000 LifetimePosition start,
2059 LifetimePosition end) { 2001 LifetimePosition end) {
2060 ASSERT(!range->IsFixed()); 2002 ASSERT(!range->IsFixed());
2061 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", 2003 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2062 range->id(), 2004 range->id(), start.Value(), end.Value());
2063 start.Value(),
2064 end.Value());
2065 2005
2066 LifetimePosition split_pos = FindOptimalSplitPos(start, end); 2006 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2067 ASSERT(split_pos.Value() >= start.Value()); 2007 ASSERT(split_pos.Value() >= start.Value());
2068 return SplitRangeAt(range, split_pos); 2008 return SplitRangeAt(range, split_pos);
2069 } 2009 }
2070 2010
2071 2011
2072 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start, 2012 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2073 LifetimePosition end) { 2013 LifetimePosition end) {
2074 int start_instr = start.InstructionIndex(); 2014 int start_instr = start.InstructionIndex();
2075 int end_instr = end.InstructionIndex(); 2015 int end_instr = end.InstructionIndex();
2076 ASSERT(start_instr <= end_instr); 2016 ASSERT(start_instr <= end_instr);
2077 2017
2078 // We have no choice 2018 // We have no choice
2079 if (start_instr == end_instr) return end; 2019 if (start_instr == end_instr) return end;
2080 2020
2081 HBasicBlock* start_block = GetBlock(start); 2021 BasicBlock* start_block = GetBlock(start);
2082 HBasicBlock* end_block = GetBlock(end); 2022 BasicBlock* end_block = GetBlock(end);
2083 2023
2084 if (end_block == start_block) { 2024 if (end_block == start_block) {
2085 // The interval is split in the same basic block. Split at the latest 2025 // The interval is split in the same basic block. Split at the latest
2086 // possible position. 2026 // possible position.
2087 return end; 2027 return end;
2088 } 2028 }
2089 2029
2090 HBasicBlock* block = end_block; 2030 BasicBlock* block = end_block;
2091 // Find header of outermost loop. 2031 // Find header of outermost loop.
2092 while (block->parent_loop_header() != NULL && 2032 // TODO(titzer): fix redundancy below.
2093 block->parent_loop_header()->block_id() > start_block->block_id()) { 2033 while (code()->GetContainingLoop(block) != NULL &&
2094 block = block->parent_loop_header(); 2034 code()->GetContainingLoop(block)->rpo_number_ >
2035 start_block->rpo_number_) {
2036 block = code()->GetContainingLoop(block);
2095 } 2037 }
2096 2038
2097 // We did not find any suitable outer loop. Split at the latest possible 2039 // We did not find any suitable outer loop. Split at the latest possible
2098 // position unless end_block is a loop header itself. 2040 // position unless end_block is a loop header itself.
2099 if (block == end_block && !end_block->IsLoopHeader()) return end; 2041 if (block == end_block && !end_block->IsLoopHeader()) return end;
2100 2042
2101 return LifetimePosition::FromInstructionIndex( 2043 return LifetimePosition::FromInstructionIndex(
2102 block->first_instruction_index()); 2044 block->first_instruction_index());
2103 } 2045 }
2104 2046
2105 2047
2106 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) { 2048 void RegisterAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2107 LiveRange* second_part = SplitRangeAt(range, pos); 2049 LiveRange* second_part = SplitRangeAt(range, pos);
2108 if (!AllocationOk()) return; 2050 if (!AllocationOk()) return;
2109 Spill(second_part); 2051 Spill(second_part);
2110 } 2052 }
2111 2053
2112 2054
2113 void LAllocator::SpillBetween(LiveRange* range, 2055 void RegisterAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
2114 LifetimePosition start, 2056 LifetimePosition end) {
2115 LifetimePosition end) {
2116 SpillBetweenUntil(range, start, start, end); 2057 SpillBetweenUntil(range, start, start, end);
2117 } 2058 }
2118 2059
2119 2060
2120 void LAllocator::SpillBetweenUntil(LiveRange* range, 2061 void RegisterAllocator::SpillBetweenUntil(LiveRange* range,
2121 LifetimePosition start, 2062 LifetimePosition start,
2122 LifetimePosition until, 2063 LifetimePosition until,
2123 LifetimePosition end) { 2064 LifetimePosition end) {
2124 CHECK(start.Value() < end.Value()); 2065 CHECK(start.Value() < end.Value());
2125 LiveRange* second_part = SplitRangeAt(range, start); 2066 LiveRange* second_part = SplitRangeAt(range, start);
2126 if (!AllocationOk()) return; 2067 if (!AllocationOk()) return;
2127 2068
2128 if (second_part->Start().Value() < end.Value()) { 2069 if (second_part->Start().Value() < end.Value()) {
2129 // The split result intersects with [start, end[. 2070 // The split result intersects with [start, end[.
2130 // Split it at position between ]start+1, end[, spill the middle part 2071 // Split it at position between ]start+1, end[, spill the middle part
2131 // and put the rest to unhandled. 2072 // and put the rest to unhandled.
2132 LiveRange* third_part = SplitBetween( 2073 LiveRange* third_part = SplitBetween(
2133 second_part, 2074 second_part, Max(second_part->Start().InstructionEnd(), until),
2134 Max(second_part->Start().InstructionEnd(), until),
2135 end.PrevInstruction().InstructionEnd()); 2075 end.PrevInstruction().InstructionEnd());
2136 if (!AllocationOk()) return; 2076 if (!AllocationOk()) return;
2137 2077
2138 ASSERT(third_part != second_part); 2078 ASSERT(third_part != second_part);
2139 2079
2140 Spill(second_part); 2080 Spill(second_part);
2141 AddToUnhandledSorted(third_part); 2081 AddToUnhandledSorted(third_part);
2142 } else { 2082 } else {
2143 // The split result does not intersect with [start, end[. 2083 // The split result does not intersect with [start, end[.
2144 // Nothing to spill. Just put it to unhandled as whole. 2084 // Nothing to spill. Just put it to unhandled as whole.
2145 AddToUnhandledSorted(second_part); 2085 AddToUnhandledSorted(second_part);
2146 } 2086 }
2147 } 2087 }
2148 2088
2149 2089
2150 void LAllocator::Spill(LiveRange* range) { 2090 void RegisterAllocator::Spill(LiveRange* range) {
2151 ASSERT(!range->IsSpilled()); 2091 ASSERT(!range->IsSpilled());
2152 TraceAlloc("Spilling live range %d\n", range->id()); 2092 TraceAlloc("Spilling live range %d\n", range->id());
2153 LiveRange* first = range->TopLevel(); 2093 LiveRange* first = range->TopLevel();
2154 2094
2155 if (!first->HasAllocatedSpillOperand()) { 2095 if (!first->HasAllocatedSpillOperand()) {
2156 LOperand* op = TryReuseSpillSlot(range); 2096 InstructionOperand* op = TryReuseSpillSlot(range);
2157 if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind()); 2097 if (op == NULL) {
2098 // Allocate a new operand referring to the spill slot.
2099 RegisterKind kind = range->Kind();
2100 int index = code()->frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
2101 if (kind == DOUBLE_REGISTERS) {
2102 op = DoubleStackSlotOperand::Create(index, zone());
2103 } else {
2104 ASSERT(kind == GENERAL_REGISTERS);
2105 op = StackSlotOperand::Create(index, zone());
2106 }
2107 }
2158 first->SetSpillOperand(op); 2108 first->SetSpillOperand(op);
2159 } 2109 }
2160 range->MakeSpilled(chunk()->zone()); 2110 range->MakeSpilled(code_zone());
2161 } 2111 }
2162 2112
2163 2113
2164 int LAllocator::RegisterCount() const { 2114 int RegisterAllocator::RegisterCount() const { return num_registers_; }
2165 return num_registers_;
2166 }
2167 2115
2168 2116
2169 #ifdef DEBUG 2117 #ifdef DEBUG
2170 2118
2171 2119
2172 void LAllocator::Verify() const { 2120 void RegisterAllocator::Verify() const {
2173 for (int i = 0; i < live_ranges()->length(); ++i) { 2121 for (int i = 0; i < live_ranges()->length(); ++i) {
2174 LiveRange* current = live_ranges()->at(i); 2122 LiveRange* current = live_ranges()->at(i);
2175 if (current != NULL) current->Verify(); 2123 if (current != NULL) current->Verify();
2176 } 2124 }
2177 } 2125 }
2178 2126
2179 2127
2180 #endif 2128 #endif
2181 2129
2182 2130
2183 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator) 2131 void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2184 : CompilationPhase(name, allocator->graph()->info()), 2132 int reg) {
2133 if (range->Kind() == DOUBLE_REGISTERS) {
2134 assigned_double_registers_->Add(reg);
2135 } else {
2136 ASSERT(range->Kind() == GENERAL_REGISTERS);
2137 assigned_registers_->Add(reg);
2138 }
2139 range->set_assigned_register(reg, code_zone());
2140 }
2141
2142
2143 RegisterAllocatorPhase::RegisterAllocatorPhase(const char* name,
2144 RegisterAllocator* allocator)
2145 : CompilationPhase(name, allocator->code()->linkage()->info()),
2185 allocator_(allocator) { 2146 allocator_(allocator) {
2186 if (FLAG_hydrogen_stats) { 2147 if (FLAG_turbo_stats) {
2187 allocator_zone_start_allocation_size_ = 2148 allocator_zone_start_allocation_size_ =
2188 allocator->zone()->allocation_size(); 2149 allocator->zone()->allocation_size();
2189 } 2150 }
2190 } 2151 }
2191 2152
2192 2153
2193 LAllocatorPhase::~LAllocatorPhase() { 2154 RegisterAllocatorPhase::~RegisterAllocatorPhase() {
2194 if (FLAG_hydrogen_stats) { 2155 if (FLAG_turbo_stats) {
2195 unsigned size = allocator_->zone()->allocation_size() - 2156 unsigned size = allocator_->zone()->allocation_size() -
2196 allocator_zone_start_allocation_size_; 2157 allocator_zone_start_allocation_size_;
2197 isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size); 2158 isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2198 } 2159 }
2199
2200 if (ShouldProduceTraceOutput()) {
2201 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2202 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2203 }
2204
2205 #ifdef DEBUG 2160 #ifdef DEBUG
2206 if (allocator_ != NULL) allocator_->Verify(); 2161 if (allocator_ != NULL) allocator_->Verify();
2207 #endif 2162 #endif
2208 } 2163 }
2209 2164 }
2210 2165 }
2211 } } // namespace v8::internal 2166 } // namespace v8::internal::compiler
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/compiler/representation-change.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698