Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(337)

Side by Side Diff: src/lithium-allocator.h

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/list-inl.h ('k') | src/lithium-allocator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_LITHIUM_ALLOCATOR_H_
29 #define V8_LITHIUM_ALLOCATOR_H_
30
31 #include "v8.h"
32
33 #include "zone.h"
34
35 namespace v8 {
36 namespace internal {
37
38 // Forward declarations.
39 class HBasicBlock;
40 class HGraph;
41 class HInstruction;
42 class HPhi;
43 class HTracer;
44 class HValue;
45 class BitVector;
46 class StringStream;
47
48 class LArgument;
49 class LChunk;
50 class LConstantOperand;
51 class LGap;
52 class LInstruction;
53 class LParallelMove;
54 class LPointerMap;
55 class LStackSlot;
56 class LRegister;
57
58 // This class represents a single point of a LOperand's lifetime.
59 // For each lithium instruction there are exactly two lifetime positions:
60 // the beginning and the end of the instruction. Lifetime positions for
61 // different lithium instructions are disjoint.
62 class LifetimePosition {
63 public:
64 // Return the lifetime position that corresponds to the beginning of
65 // the instruction with the given index.
66 static LifetimePosition FromInstructionIndex(int index) {
67 return LifetimePosition(index * kStep);
68 }
69
70 // Returns a numeric representation of this lifetime position.
71 int Value() const {
72 return value_;
73 }
74
75 // Returns the index of the instruction to which this lifetime position
76 // corresponds.
77 int InstructionIndex() const {
78 ASSERT(IsValid());
79 return value_ / kStep;
80 }
81
82 // Returns true if this lifetime position corresponds to the instruction
83 // start.
84 bool IsInstructionStart() const {
85 return (value_ & (kStep - 1)) == 0;
86 }
87
88 // Returns the lifetime position for the start of the instruction which
89 // corresponds to this lifetime position.
90 LifetimePosition InstructionStart() const {
91 ASSERT(IsValid());
92 return LifetimePosition(value_ & ~(kStep - 1));
93 }
94
95 // Returns the lifetime position for the end of the instruction which
96 // corresponds to this lifetime position.
97 LifetimePosition InstructionEnd() const {
98 ASSERT(IsValid());
99 return LifetimePosition(InstructionStart().Value() + kStep/2);
100 }
101
102 // Returns the lifetime position for the beginning of the next instruction.
103 LifetimePosition NextInstruction() const {
104 ASSERT(IsValid());
105 return LifetimePosition(InstructionStart().Value() + kStep);
106 }
107
108 // Returns the lifetime position for the beginning of the previous
109 // instruction.
110 LifetimePosition PrevInstruction() const {
111 ASSERT(IsValid());
112 ASSERT(value_ > 1);
113 return LifetimePosition(InstructionStart().Value() - kStep);
114 }
115
116 // Constructs the lifetime position which does not correspond to any
117 // instruction.
118 LifetimePosition() : value_(-1) {}
119
120 // Returns true if this lifetime positions corrensponds to some
121 // instruction.
122 bool IsValid() const { return value_ != -1; }
123
124 static LifetimePosition Invalid() { return LifetimePosition(); }
125
126 private:
127 static const int kStep = 2;
128
129 // Code relies on kStep being a power of two.
130 STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
131
132 explicit LifetimePosition(int value) : value_(value) { }
133
134 int value_;
135 };
136
137
138 class LOperand: public ZoneObject {
139 public:
140 enum Kind {
141 INVALID,
142 UNALLOCATED,
143 CONSTANT_OPERAND,
144 STACK_SLOT,
145 DOUBLE_STACK_SLOT,
146 REGISTER,
147 DOUBLE_REGISTER,
148 ARGUMENT
149 };
150
151 LOperand() : value_(KindField::encode(INVALID)) { }
152
153 Kind kind() const { return KindField::decode(value_); }
154 int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
155 bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
156 bool IsStackSlot() const { return kind() == STACK_SLOT; }
157 bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
158 bool IsRegister() const { return kind() == REGISTER; }
159 bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
160 bool IsArgument() const { return kind() == ARGUMENT; }
161 bool IsUnallocated() const { return kind() == UNALLOCATED; }
162 bool Equals(LOperand* other) const { return value_ == other->value_; }
163 int VirtualRegister();
164
165 void PrintTo(StringStream* stream);
166 void ConvertTo(Kind kind, int index) {
167 value_ = KindField::encode(kind);
168 value_ |= index << kKindFieldWidth;
169 ASSERT(this->index() == index);
170 }
171
172 protected:
173 static const int kKindFieldWidth = 3;
174 class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
175
176 LOperand(Kind kind, int index) { ConvertTo(kind, index); }
177
178 unsigned value_;
179 };
180
181
182 class LUnallocated: public LOperand {
183 public:
184 enum Policy {
185 NONE,
186 ANY,
187 FIXED_REGISTER,
188 FIXED_DOUBLE_REGISTER,
189 FIXED_SLOT,
190 MUST_HAVE_REGISTER,
191 WRITABLE_REGISTER,
192 SAME_AS_FIRST_INPUT,
193 SAME_AS_ANY_INPUT,
194 IGNORE
195 };
196
197 // Lifetime of operand inside the instruction.
198 enum Lifetime {
199 // USED_AT_START operand is guaranteed to be live only at
200 // instruction start. Register allocator is free to assign the same register
201 // to some other operand used inside instruction (i.e. temporary or
202 // output).
203 USED_AT_START,
204
205 // USED_AT_END operand is treated as live until the end of
206 // instruction. This means that register allocator will not reuse it's
207 // register for any other operand inside instruction.
208 USED_AT_END
209 };
210
211 explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
212 Initialize(policy, 0, USED_AT_END);
213 }
214
215 LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
216 Initialize(policy, fixed_index, USED_AT_END);
217 }
218
219 LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
220 Initialize(policy, 0, lifetime);
221 }
222
223 // The superclass has a KindField. Some policies have a signed fixed
224 // index in the upper bits.
225 static const int kPolicyWidth = 4;
226 static const int kLifetimeWidth = 1;
227 static const int kVirtualRegisterWidth = 17;
228
229 static const int kPolicyShift = kKindFieldWidth;
230 static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
231 static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
232 static const int kFixedIndexShift =
233 kVirtualRegisterShift + kVirtualRegisterWidth;
234
235 class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
236
237 class LifetimeField
238 : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
239 };
240
241 class VirtualRegisterField
242 : public BitField<unsigned,
243 kVirtualRegisterShift,
244 kVirtualRegisterWidth> {
245 };
246
247 static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
248 static const int kMaxFixedIndices = 128;
249
250 bool HasIgnorePolicy() const { return policy() == IGNORE; }
251 bool HasNoPolicy() const { return policy() == NONE; }
252 bool HasAnyPolicy() const {
253 return policy() == ANY;
254 }
255 bool HasFixedPolicy() const {
256 return policy() == FIXED_REGISTER ||
257 policy() == FIXED_DOUBLE_REGISTER ||
258 policy() == FIXED_SLOT;
259 }
260 bool HasRegisterPolicy() const {
261 return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
262 }
263 bool HasSameAsInputPolicy() const {
264 return policy() == SAME_AS_FIRST_INPUT || policy() == SAME_AS_ANY_INPUT;
265 }
266 Policy policy() const { return PolicyField::decode(value_); }
267 void set_policy(Policy policy) {
268 value_ &= ~PolicyField::mask();
269 value_ |= PolicyField::encode(policy);
270 }
271 int fixed_index() const {
272 return static_cast<int>(value_) >> kFixedIndexShift;
273 }
274
275 unsigned virtual_register() const {
276 return VirtualRegisterField::decode(value_);
277 }
278
279 void set_virtual_register(unsigned id) {
280 value_ &= ~VirtualRegisterField::mask();
281 value_ |= VirtualRegisterField::encode(id);
282 }
283
284 LUnallocated* CopyUnconstrained() {
285 LUnallocated* result = new LUnallocated(ANY);
286 result->set_virtual_register(virtual_register());
287 return result;
288 }
289
290 static LUnallocated* cast(LOperand* op) {
291 ASSERT(op->IsUnallocated());
292 return reinterpret_cast<LUnallocated*>(op);
293 }
294
295 bool IsUsedAtStart() {
296 return LifetimeField::decode(value_) == USED_AT_START;
297 }
298
299 private:
300 void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
301 value_ |= PolicyField::encode(policy);
302 value_ |= LifetimeField::encode(lifetime);
303 value_ |= fixed_index << kFixedIndexShift;
304 ASSERT(this->fixed_index() == fixed_index);
305 }
306 };
307
308
309 class LMoveOperands BASE_EMBEDDED {
310 public:
311 LMoveOperands(LOperand* from, LOperand* to) : from_(from), to_(to) { }
312
313 LOperand* from() const { return from_; }
314 LOperand* to() const { return to_; }
315 bool IsRedundant() const {
316 return IsEliminated() || from_->Equals(to_) || IsIgnored();
317 }
318 bool IsEliminated() const { return from_ == NULL; }
319 bool IsIgnored() const {
320 if (to_ != NULL && to_->IsUnallocated() &&
321 LUnallocated::cast(to_)->HasIgnorePolicy()) {
322 return true;
323 }
324 return false;
325 }
326
327 void Eliminate() { from_ = to_ = NULL; }
328
329 private:
330 LOperand* from_;
331 LOperand* to_;
332 };
333
334
335 class LConstantOperand: public LOperand {
336 public:
337 static LConstantOperand* Create(int index) {
338 ASSERT(index >= 0);
339 if (index < kNumCachedOperands) return &cache[index];
340 return new LConstantOperand(index);
341 }
342
343 static LConstantOperand* cast(LOperand* op) {
344 ASSERT(op->IsConstantOperand());
345 return reinterpret_cast<LConstantOperand*>(op);
346 }
347
348 static void SetupCache();
349
350 private:
351 static const int kNumCachedOperands = 128;
352 static LConstantOperand cache[];
353
354 LConstantOperand() : LOperand() { }
355 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
356 };
357
358
359 class LArgument: public LOperand {
360 public:
361 explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
362
363 static LArgument* cast(LOperand* op) {
364 ASSERT(op->IsArgument());
365 return reinterpret_cast<LArgument*>(op);
366 }
367 };
368
369
370 class LStackSlot: public LOperand {
371 public:
372 static LStackSlot* Create(int index) {
373 ASSERT(index >= 0);
374 if (index < kNumCachedOperands) return &cache[index];
375 return new LStackSlot(index);
376 }
377
378 static LStackSlot* cast(LOperand* op) {
379 ASSERT(op->IsStackSlot());
380 return reinterpret_cast<LStackSlot*>(op);
381 }
382
383 static void SetupCache();
384
385 private:
386 static const int kNumCachedOperands = 128;
387 static LStackSlot cache[];
388
389 LStackSlot() : LOperand() { }
390 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
391 };
392
393
394 class LDoubleStackSlot: public LOperand {
395 public:
396 static LDoubleStackSlot* Create(int index) {
397 ASSERT(index >= 0);
398 if (index < kNumCachedOperands) return &cache[index];
399 return new LDoubleStackSlot(index);
400 }
401
402 static LDoubleStackSlot* cast(LOperand* op) {
403 ASSERT(op->IsStackSlot());
404 return reinterpret_cast<LDoubleStackSlot*>(op);
405 }
406
407 static void SetupCache();
408
409 private:
410 static const int kNumCachedOperands = 128;
411 static LDoubleStackSlot cache[];
412
413 LDoubleStackSlot() : LOperand() { }
414 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
415 };
416
417
418 class LRegister: public LOperand {
419 public:
420 static LRegister* Create(int index) {
421 ASSERT(index >= 0);
422 if (index < kNumCachedOperands) return &cache[index];
423 return new LRegister(index);
424 }
425
426 static LRegister* cast(LOperand* op) {
427 ASSERT(op->IsRegister());
428 return reinterpret_cast<LRegister*>(op);
429 }
430
431 static void SetupCache();
432
433 private:
434 static const int kNumCachedOperands = 16;
435 static LRegister cache[];
436
437 LRegister() : LOperand() { }
438 explicit LRegister(int index) : LOperand(REGISTER, index) { }
439 };
440
441
442 class LDoubleRegister: public LOperand {
443 public:
444 static LDoubleRegister* Create(int index) {
445 ASSERT(index >= 0);
446 if (index < kNumCachedOperands) return &cache[index];
447 return new LDoubleRegister(index);
448 }
449
450 static LDoubleRegister* cast(LOperand* op) {
451 ASSERT(op->IsDoubleRegister());
452 return reinterpret_cast<LDoubleRegister*>(op);
453 }
454
455 static void SetupCache();
456
457 private:
458 static const int kNumCachedOperands = 16;
459 static LDoubleRegister cache[];
460
461 LDoubleRegister() : LOperand() { }
462 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
463 };
464
465
466 // A register-allocator view of a Lithium instruction. It contains the id of
467 // the output operand and a list of input operand uses.
468 class InstructionSummary: public ZoneObject {
469 public:
470 InstructionSummary()
471 : output_operand_(NULL), input_count_(0), operands_(4), is_call_(false) {}
472
473 // Output operands.
474 LOperand* Output() const { return output_operand_; }
475 void SetOutput(LOperand* output) {
476 ASSERT(output_operand_ == NULL);
477 output_operand_ = output;
478 }
479
480 // Input operands.
481 int InputCount() const { return input_count_; }
482 LOperand* InputAt(int i) const {
483 ASSERT(i < input_count_);
484 return operands_[i];
485 }
486 void AddInput(LOperand* input) {
487 operands_.InsertAt(input_count_, input);
488 input_count_++;
489 }
490
491 // Temporary operands.
492 int TempCount() const { return operands_.length() - input_count_; }
493 LOperand* TempAt(int i) const { return operands_[i + input_count_]; }
494 void AddTemp(LOperand* temp) { operands_.Add(temp); }
495
496 void MarkAsCall() { is_call_ = true; }
497 bool IsCall() const { return is_call_; }
498
499 private:
500 LOperand* output_operand_;
501 int input_count_;
502 ZoneList<LOperand*> operands_;
503 bool is_call_;
504 };
505
506 // Representation of the non-empty interval [start,end[.
507 class UseInterval: public ZoneObject {
508 public:
509 UseInterval(LifetimePosition start, LifetimePosition end)
510 : start_(start), end_(end), next_(NULL) {
511 ASSERT(start.Value() < end.Value());
512 }
513
514 LifetimePosition start() const { return start_; }
515 LifetimePosition end() const { return end_; }
516 UseInterval* next() const { return next_; }
517
518 // Split this interval at the given position without effecting the
519 // live range that owns it. The interval must contain the position.
520 void SplitAt(LifetimePosition pos);
521
522 // If this interval intersects with other return smallest position
523 // that belongs to both of them.
524 LifetimePosition Intersect(const UseInterval* other) const {
525 if (other->start().Value() < start_.Value()) return other->Intersect(this);
526 if (other->start().Value() < end_.Value()) return other->start();
527 return LifetimePosition::Invalid();
528 }
529
530 bool Contains(LifetimePosition point) const {
531 return start_.Value() <= point.Value() && point.Value() < end_.Value();
532 }
533
534 private:
535 void set_start(LifetimePosition start) { start_ = start; }
536 void set_next(UseInterval* next) { next_ = next; }
537
538 LifetimePosition start_;
539 LifetimePosition end_;
540 UseInterval* next_;
541
542 friend class LiveRange; // Assigns to start_.
543 };
544
545 // Representation of a use position.
546 class UsePosition: public ZoneObject {
547 public:
548 UsePosition(LifetimePosition pos, LOperand* operand)
549 : operand_(operand),
550 hint_(NULL),
551 pos_(pos),
552 next_(NULL),
553 requires_reg_(false),
554 register_beneficial_(true) {
555 if (operand_ != NULL && operand_->IsUnallocated()) {
556 LUnallocated* unalloc = LUnallocated::cast(operand_);
557 requires_reg_ = unalloc->HasRegisterPolicy();
558 register_beneficial_ = !unalloc->HasAnyPolicy();
559 }
560 ASSERT(pos_.IsValid());
561 }
562
563 LOperand* operand() const { return operand_; }
564 bool HasOperand() const { return operand_ != NULL; }
565
566 LOperand* hint() const { return hint_; }
567 void set_hint(LOperand* hint) { hint_ = hint; }
568 bool HasHint() const { return hint_ != NULL && !hint_->IsUnallocated(); }
569 bool RequiresRegister() const;
570 bool RegisterIsBeneficial() const;
571
572 LifetimePosition pos() const { return pos_; }
573 UsePosition* next() const { return next_; }
574
575 private:
576 void set_next(UsePosition* next) { next_ = next; }
577
578 LOperand* operand_;
579 LOperand* hint_;
580 LifetimePosition pos_;
581 UsePosition* next_;
582 bool requires_reg_;
583 bool register_beneficial_;
584
585 friend class LiveRange;
586 };
587
588 // Representation of SSA values' live ranges as a collection of (continuous)
589 // intervals over the instruction ordering.
590 class LiveRange: public ZoneObject {
591 public:
592 static const int kInvalidAssignment = 0x7fffffff;
593
594 explicit LiveRange(int id)
595 : id_(id),
596 spilled_(false),
597 assigned_double_(false),
598 assigned_register_(kInvalidAssignment),
599 last_interval_(NULL),
600 first_interval_(NULL),
601 first_pos_(NULL),
602 parent_(NULL),
603 next_(NULL),
604 current_interval_(NULL),
605 last_processed_use_(NULL),
606 spill_start_index_(kMaxInt) {
607 spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
608 }
609
610 UseInterval* first_interval() const { return first_interval_; }
611 UsePosition* first_pos() const { return first_pos_; }
612 LiveRange* parent() const { return parent_; }
613 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
614 LiveRange* next() const { return next_; }
615 bool IsChild() const { return parent() != NULL; }
616 bool IsParent() const { return parent() == NULL; }
617 int id() const { return id_; }
618 bool IsFixed() const { return id_ < 0; }
619 bool IsEmpty() const { return first_interval() == NULL; }
620 LOperand* CreateAssignedOperand();
621 int assigned_register() const { return assigned_register_; }
622 int spill_start_index() const { return spill_start_index_; }
623 void set_assigned_register(int reg, bool double_reg) {
624 ASSERT(!HasRegisterAssigned() && !IsSpilled());
625 assigned_register_ = reg;
626 assigned_double_ = double_reg;
627 ConvertOperands();
628 }
629 void MakeSpilled() {
630 ASSERT(!IsSpilled());
631 ASSERT(TopLevel()->HasAllocatedSpillOperand());
632 spilled_ = true;
633 assigned_register_ = kInvalidAssignment;
634 ConvertOperands();
635 }
636
637 // Returns use position in this live range that follows both start
638 // and last processed use position.
639 // Modifies internal state of live range!
640 UsePosition* NextUsePosition(LifetimePosition start);
641
642 // Returns use position for which register is required in this live
643 // range and which follows both start and last processed use position
644 // Modifies internal state of live range!
645 UsePosition* NextRegisterPosition(LifetimePosition start);
646
647 // Returns use position for which register is beneficial in this live
648 // range and which follows both start and last processed use position
649 // Modifies internal state of live range!
650 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start);
651
652 // Can this live range be spilled at this position.
653 bool CanBeSpilled(LifetimePosition pos);
654
655 void SplitAt(LifetimePosition position, LiveRange* result);
656
657 bool IsDouble() const { return assigned_double_; }
658 bool HasRegisterAssigned() const {
659 return assigned_register_ != kInvalidAssignment;
660 }
661 bool IsSpilled() const { return spilled_; }
662 UsePosition* FirstPosWithHint() const;
663
664 LOperand* FirstHint() const {
665 UsePosition* pos = FirstPosWithHint();
666 if (pos != NULL) return pos->hint();
667 return NULL;
668 }
669
670 LifetimePosition Start() const {
671 ASSERT(!IsEmpty());
672 return first_interval()->start();
673 }
674
675 LifetimePosition End() const {
676 ASSERT(!IsEmpty());
677 return last_interval_->end();
678 }
679
680 bool HasAllocatedSpillOperand() const {
681 return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
682 }
683 LOperand* GetSpillOperand() const { return spill_operand_; }
684 void SetSpillOperand(LOperand* operand) {
685 ASSERT(!operand->IsUnallocated());
686 ASSERT(spill_operand_ != NULL);
687 ASSERT(spill_operand_->IsUnallocated());
688 spill_operand_->ConvertTo(operand->kind(), operand->index());
689 }
690
691 void SetSpillStartIndex(int start) {
692 spill_start_index_ = Min(start, spill_start_index_);
693 }
694
695 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
696 bool CanCover(LifetimePosition position) const;
697 bool Covers(LifetimePosition position);
698 LifetimePosition FirstIntersection(LiveRange* other);
699
700
701 // Add a new interval or a new use position to this live range.
702 void EnsureInterval(LifetimePosition start, LifetimePosition end);
703 void AddUseInterval(LifetimePosition start, LifetimePosition end);
704 UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand);
705 UsePosition* AddUsePosition(LifetimePosition pos);
706
707 // Shorten the most recently added interval by setting a new start.
708 void ShortenTo(LifetimePosition start);
709
710 #ifdef DEBUG
711 // True if target overlaps an existing interval.
712 bool HasOverlap(UseInterval* target) const;
713 void Verify() const;
714 #endif
715
716 private:
717 void ConvertOperands();
718 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
719 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
720 LifetimePosition but_not_past) const;
721
722 int id_;
723 bool spilled_;
724 bool assigned_double_;
725 int assigned_register_;
726 UseInterval* last_interval_;
727 UseInterval* first_interval_;
728 UsePosition* first_pos_;
729 LiveRange* parent_;
730 LiveRange* next_;
731 // This is used as a cache, it doesn't affect correctness.
732 mutable UseInterval* current_interval_;
733 UsePosition* last_processed_use_;
734 LOperand* spill_operand_;
735 int spill_start_index_;
736 };
737
738
739 class LAllocator BASE_EMBEDDED {
740 public:
741 explicit LAllocator(int first_virtual_register, HGraph* graph)
742 : chunk_(NULL),
743 summaries_(0),
744 next_summary_(NULL),
745 summary_stack_(2),
746 live_in_sets_(0),
747 live_ranges_(16),
748 fixed_live_ranges_(8),
749 fixed_double_live_ranges_(8),
750 unhandled_live_ranges_(8),
751 active_live_ranges_(8),
752 inactive_live_ranges_(8),
753 reusable_slots_(8),
754 next_virtual_register_(first_virtual_register),
755 mode_(NONE),
756 num_registers_(-1),
757 graph_(graph),
758 has_osr_entry_(false) {}
759
760 static void Setup();
761 static void TraceAlloc(const char* msg, ...);
762
763 // Lithium translation support.
764 // Record a use of an input operand in the current instruction.
765 void RecordUse(HValue* value, LUnallocated* operand);
766 // Record the definition of the output operand.
767 void RecordDefinition(HInstruction* instr, LUnallocated* operand);
768 // Record a temporary operand.
769 void RecordTemporary(LUnallocated* operand);
770
771 // Marks the current instruction as a call.
772 void MarkAsCall();
773
774 // Checks whether the value of a given virtual register is tagged.
775 bool HasTaggedValue(int virtual_register) const;
776
777 // Checks whether the value of a given virtual register is a double.
778 bool HasDoubleValue(int virtual_register) const;
779
780 // Begin a new instruction.
781 void BeginInstruction();
782
783 // Summarize the current instruction.
784 void SummarizeInstruction(int index);
785
786 // Summarize the current instruction.
787 void OmitInstruction();
788
789 // Control max function size.
790 static int max_initial_value_ids();
791
792 void Allocate(LChunk* chunk);
793
794 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
795 const ZoneList<LiveRange*>* fixed_live_ranges() const {
796 return &fixed_live_ranges_;
797 }
798 const ZoneList<LiveRange*>* fixed_double_live_ranges() const {
799 return &fixed_double_live_ranges_;
800 }
801
802 LChunk* chunk() const { return chunk_; }
803 HGraph* graph() const { return graph_; }
804
805 void MarkAsOsrEntry() {
806 // There can be only one.
807 ASSERT(!has_osr_entry_);
808 // Simply set a flag to find and process instruction later.
809 has_osr_entry_ = true;
810 }
811
812 #ifdef DEBUG
813 void Verify() const;
814 #endif
815
816 private:
817 enum OperationMode {
818 NONE,
819 CPU_REGISTERS,
820 XMM_REGISTERS
821 };
822
823 void MeetRegisterConstraints();
824 void ResolvePhis();
825 void BuildLiveRanges();
826 void AllocateGeneralRegisters();
827 void AllocateDoubleRegisters();
828 void ConnectRanges();
829 void ResolveControlFlow();
830 void PopulatePointerMaps();
831 void ProcessOsrEntry();
832 void AllocateRegisters();
833 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
834 inline bool SafePointsAreInOrder() const;
835
836 // Liveness analysis support.
837 void InitializeLivenessAnalysis();
838 BitVector* ComputeLiveOut(HBasicBlock* block);
839 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
840 void ProcessInstructions(HBasicBlock* block, BitVector* live);
841 void MeetRegisterConstraints(HBasicBlock* block);
842 void MeetConstraintsBetween(InstructionSummary* first,
843 InstructionSummary* second,
844 int gap_index);
845 void ResolvePhis(HBasicBlock* block);
846
847 // Helper methods for building intervals.
848 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
849 LiveRange* LiveRangeFor(LOperand* operand);
850 void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
851 void Use(LifetimePosition block_start,
852 LifetimePosition position,
853 LOperand* operand,
854 LOperand* hint);
855 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
856
857 // Helper methods for updating the life range lists.
858 void AddToActive(LiveRange* range);
859 void AddToInactive(LiveRange* range);
860 void AddToUnhandledSorted(LiveRange* range);
861 void AddToUnhandledUnsorted(LiveRange* range);
862 void SortUnhandled();
863 bool UnhandledIsSorted();
864 void ActiveToHandled(LiveRange* range);
865 void ActiveToInactive(LiveRange* range);
866 void InactiveToHandled(LiveRange* range);
867 void InactiveToActive(LiveRange* range);
868 void FreeSpillSlot(LiveRange* range);
869 LOperand* TryReuseSpillSlot(LiveRange* range);
870
871 // Helper methods for allocating registers.
872 bool TryAllocateFreeReg(LiveRange* range);
873 void AllocateBlockedReg(LiveRange* range);
874 void SplitAndSpillIntersecting(LiveRange* range);
875 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
876 LifetimePosition end);
877 LiveRange* Split(LiveRange* range,
878 LifetimePosition start,
879 LifetimePosition end);
880 LiveRange* Split(LiveRange* range, LifetimePosition split_pos);
881 void SplitAndSpill(LiveRange* range,
882 LifetimePosition start,
883 LifetimePosition end);
884 void SplitAndSpill(LiveRange* range, LifetimePosition at);
885 void Spill(LiveRange* range);
886 bool IsBlockBoundary(LifetimePosition pos);
887 void AddGapMove(int pos, LiveRange* prev, LiveRange* next);
888
889 // Helper methods for resolving control flow.
890 void ResolveControlFlow(LiveRange* range,
891 HBasicBlock* block,
892 HBasicBlock* pred);
893
894 // Return parallel move that should be used to connect ranges split at the
895 // given position.
896 LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
897
898 // Return the block which contains give lifetime position.
899 HBasicBlock* GetBlock(LifetimePosition pos);
900
901 // Current active summary.
902 InstructionSummary* current_summary() const { return summary_stack_.last(); }
903
904 // Get summary for given instruction index.
905 InstructionSummary* GetSummary(int index) const { return summaries_[index]; }
906
907 // Helper methods for the fixed registers.
908 int RegisterCount() const;
909 static int FixedLiveRangeID(int index) { return -index - 1; }
910 static int FixedDoubleLiveRangeID(int index);
911 LiveRange* FixedLiveRangeFor(int index);
912 LiveRange* FixedDoubleLiveRangeFor(int index);
913 LiveRange* LiveRangeFor(int index);
914 HPhi* LookupPhi(LOperand* operand) const;
915 LGap* GetLastGap(HBasicBlock* block) const;
916
917 LChunk* chunk_;
918 ZoneList<InstructionSummary*> summaries_;
919 InstructionSummary* next_summary_;
920
921 ZoneList<InstructionSummary*> summary_stack_;
922
923 // During liveness analysis keep a mapping from block id to live_in sets
924 // for blocks already analyzed.
925 ZoneList<BitVector*> live_in_sets_;
926
927 // Liveness analysis results.
928 ZoneList<LiveRange*> live_ranges_;
929
930 // Lists of live ranges
931 ZoneList<LiveRange*> fixed_live_ranges_;
932 ZoneList<LiveRange*> fixed_double_live_ranges_;
933 ZoneList<LiveRange*> unhandled_live_ranges_;
934 ZoneList<LiveRange*> active_live_ranges_;
935 ZoneList<LiveRange*> inactive_live_ranges_;
936 ZoneList<LiveRange*> reusable_slots_;
937
938 // Next virtual register number to be assigned to temporaries.
939 int next_virtual_register_;
940
941 OperationMode mode_;
942 int num_registers_;
943
944 HGraph* graph_;
945
946 bool has_osr_entry_;
947
948 DISALLOW_COPY_AND_ASSIGN(LAllocator);
949 };
950
951
952 } } // namespace v8::internal
953
954 #endif // V8_LITHIUM_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « src/list-inl.h ('k') | src/lithium-allocator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698