Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(48)

Side by Side Diff: src/compiler/instruction.h

Issue 426233002: Land the Fan (disabled) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Review feedback, rebase and "git cl format" Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/compiler/ia32/linkage-ia32.cc ('k') | src/compiler/instruction.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_INSTRUCTION_H_
6 #define V8_COMPILER_INSTRUCTION_H_
7
8 #include <deque>
9 #include <map>
10 #include <set>
11
12 // TODO(titzer): don't include the assembler?
13 #include "src/assembler.h"
14 #include "src/compiler/common-operator.h"
15 #include "src/compiler/frame.h"
16 #include "src/compiler/graph.h"
17 #include "src/compiler/instruction-codes.h"
18 #include "src/compiler/opcodes.h"
19 #include "src/compiler/schedule.h"
20 #include "src/zone-allocator.h"
21
22 namespace v8 {
23 namespace internal {
24
25 // Forward declarations.
26 class OStream;
27
28 namespace compiler {
29
30 // Forward declarations.
31 class Linkage;
32
33 // A couple of reserved opcodes are used for internal use.
34 const InstructionCode kGapInstruction = -1;
35 const InstructionCode kBlockStartInstruction = -2;
36 const InstructionCode kSourcePositionInstruction = -3;
37
38
39 #define INSTRUCTION_OPERAND_LIST(V) \
40 V(Constant, CONSTANT, 128) \
41 V(Immediate, IMMEDIATE, 128) \
42 V(StackSlot, STACK_SLOT, 128) \
43 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
44 V(Register, REGISTER, Register::kNumRegisters) \
45 V(DoubleRegister, DOUBLE_REGISTER, DoubleRegister::kMaxNumRegisters)
46
47 class InstructionOperand : public ZoneObject {
48 public:
49 enum Kind {
50 INVALID,
51 UNALLOCATED,
52 CONSTANT,
53 IMMEDIATE,
54 STACK_SLOT,
55 DOUBLE_STACK_SLOT,
56 REGISTER,
57 DOUBLE_REGISTER
58 };
59
60 InstructionOperand() : value_(KindField::encode(INVALID)) {}
61 InstructionOperand(Kind kind, int index) { ConvertTo(kind, index); }
62
63 Kind kind() const { return KindField::decode(value_); }
64 int index() const { return static_cast<int>(value_) >> KindField::kSize; }
65 #define INSTRUCTION_OPERAND_PREDICATE(name, type, number) \
66 bool Is##name() const { return kind() == type; }
67 INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE)
68 INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
69 INSTRUCTION_OPERAND_PREDICATE(Ignored, INVALID, 0)
70 #undef INSTRUCTION_OPERAND_PREDICATE
71 bool Equals(InstructionOperand* other) const {
72 return value_ == other->value_;
73 }
74
75 void ConvertTo(Kind kind, int index) {
76 if (kind == REGISTER || kind == DOUBLE_REGISTER) ASSERT(index >= 0);
77 value_ = KindField::encode(kind);
78 value_ |= index << KindField::kSize;
79 ASSERT(this->index() == index);
80 }
81
82 // Calls SetUpCache()/TearDownCache() for each subclass.
83 static void SetUpCaches();
84 static void TearDownCaches();
85
86 protected:
87 typedef BitField<Kind, 0, 3> KindField;
88
89 unsigned value_;
90 };
91
92 OStream& operator<<(OStream& os, const InstructionOperand& op);
93
94 class UnallocatedOperand : public InstructionOperand {
95 public:
96 enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
97
98 enum ExtendedPolicy {
99 NONE,
100 ANY,
101 FIXED_REGISTER,
102 FIXED_DOUBLE_REGISTER,
103 MUST_HAVE_REGISTER,
104 SAME_AS_FIRST_INPUT
105 };
106
107 // Lifetime of operand inside the instruction.
108 enum Lifetime {
109 // USED_AT_START operand is guaranteed to be live only at
110 // instruction start. Register allocator is free to assign the same register
111 // to some other operand used inside instruction (i.e. temporary or
112 // output).
113 USED_AT_START,
114
115 // USED_AT_END operand is treated as live until the end of
116 // instruction. This means that register allocator will not reuse it's
117 // register for any other operand inside instruction.
118 USED_AT_END
119 };
120
121 explicit UnallocatedOperand(ExtendedPolicy policy)
122 : InstructionOperand(UNALLOCATED, 0) {
123 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
124 value_ |= ExtendedPolicyField::encode(policy);
125 value_ |= LifetimeField::encode(USED_AT_END);
126 }
127
128 UnallocatedOperand(BasicPolicy policy, int index)
129 : InstructionOperand(UNALLOCATED, 0) {
130 ASSERT(policy == FIXED_SLOT);
131 value_ |= BasicPolicyField::encode(policy);
132 value_ |= index << FixedSlotIndexField::kShift;
133 ASSERT(this->fixed_slot_index() == index);
134 }
135
136 UnallocatedOperand(ExtendedPolicy policy, int index)
137 : InstructionOperand(UNALLOCATED, 0) {
138 ASSERT(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
139 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
140 value_ |= ExtendedPolicyField::encode(policy);
141 value_ |= LifetimeField::encode(USED_AT_END);
142 value_ |= FixedRegisterField::encode(index);
143 }
144
145 UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime)
146 : InstructionOperand(UNALLOCATED, 0) {
147 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
148 value_ |= ExtendedPolicyField::encode(policy);
149 value_ |= LifetimeField::encode(lifetime);
150 }
151
152 UnallocatedOperand* CopyUnconstrained(Zone* zone) {
153 UnallocatedOperand* result = new (zone) UnallocatedOperand(ANY);
154 result->set_virtual_register(virtual_register());
155 return result;
156 }
157
158 static const UnallocatedOperand* cast(const InstructionOperand* op) {
159 ASSERT(op->IsUnallocated());
160 return static_cast<const UnallocatedOperand*>(op);
161 }
162
163 static UnallocatedOperand* cast(InstructionOperand* op) {
164 ASSERT(op->IsUnallocated());
165 return static_cast<UnallocatedOperand*>(op);
166 }
167
168 // The encoding used for UnallocatedOperand operands depends on the policy
169 // that is
170 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
171 // because it accommodates a larger pay-load.
172 //
173 // For FIXED_SLOT policy:
174 // +------------------------------------------+
175 // | slot_index | vreg | 0 | 001 |
176 // +------------------------------------------+
177 //
178 // For all other (extended) policies:
179 // +------------------------------------------+
180 // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime
181 // +------------------------------------------+ P ... Policy
182 //
183 // The slot index is a signed value which requires us to decode it manually
184 // instead of using the BitField utility class.
185
186 // The superclass has a KindField.
187 STATIC_ASSERT(KindField::kSize == 3);
188
189 // BitFields for all unallocated operands.
190 class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {};
191 class VirtualRegisterField : public BitField<unsigned, 4, 18> {};
192
193 // BitFields specific to BasicPolicy::FIXED_SLOT.
194 class FixedSlotIndexField : public BitField<int, 22, 10> {};
195
196 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
197 class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {};
198 class LifetimeField : public BitField<Lifetime, 25, 1> {};
199 class FixedRegisterField : public BitField<int, 26, 6> {};
200
201 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
202 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
203 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
204 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
205
206 // Predicates for the operand policy.
207 bool HasAnyPolicy() const {
208 return basic_policy() == EXTENDED_POLICY && extended_policy() == ANY;
209 }
210 bool HasFixedPolicy() const {
211 return basic_policy() == FIXED_SLOT ||
212 extended_policy() == FIXED_REGISTER ||
213 extended_policy() == FIXED_DOUBLE_REGISTER;
214 }
215 bool HasRegisterPolicy() const {
216 return basic_policy() == EXTENDED_POLICY &&
217 extended_policy() == MUST_HAVE_REGISTER;
218 }
219 bool HasSameAsInputPolicy() const {
220 return basic_policy() == EXTENDED_POLICY &&
221 extended_policy() == SAME_AS_FIRST_INPUT;
222 }
223 bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
224 bool HasFixedRegisterPolicy() const {
225 return basic_policy() == EXTENDED_POLICY &&
226 extended_policy() == FIXED_REGISTER;
227 }
228 bool HasFixedDoubleRegisterPolicy() const {
229 return basic_policy() == EXTENDED_POLICY &&
230 extended_policy() == FIXED_DOUBLE_REGISTER;
231 }
232
233 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
234 BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
235
236 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
237 ExtendedPolicy extended_policy() const {
238 ASSERT(basic_policy() == EXTENDED_POLICY);
239 return ExtendedPolicyField::decode(value_);
240 }
241
242 // [fixed_slot_index]: Only for FIXED_SLOT.
243 int fixed_slot_index() const {
244 ASSERT(HasFixedSlotPolicy());
245 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
246 }
247
248 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
249 int fixed_register_index() const {
250 ASSERT(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
251 return FixedRegisterField::decode(value_);
252 }
253
254 // [virtual_register]: The virtual register ID for this operand.
255 int virtual_register() const { return VirtualRegisterField::decode(value_); }
256 void set_virtual_register(unsigned id) {
257 value_ = VirtualRegisterField::update(value_, id);
258 }
259
260 // [lifetime]: Only for non-FIXED_SLOT.
261 bool IsUsedAtStart() {
262 ASSERT(basic_policy() == EXTENDED_POLICY);
263 return LifetimeField::decode(value_) == USED_AT_START;
264 }
265 };
266
267
268 class MoveOperands V8_FINAL BASE_EMBEDDED {
269 public:
270 MoveOperands(InstructionOperand* source, InstructionOperand* destination)
271 : source_(source), destination_(destination) {}
272
273 InstructionOperand* source() const { return source_; }
274 void set_source(InstructionOperand* operand) { source_ = operand; }
275
276 InstructionOperand* destination() const { return destination_; }
277 void set_destination(InstructionOperand* operand) { destination_ = operand; }
278
279 // The gap resolver marks moves as "in-progress" by clearing the
280 // destination (but not the source).
281 bool IsPending() const { return destination_ == NULL && source_ != NULL; }
282
283 // True if this move a move into the given destination operand.
284 bool Blocks(InstructionOperand* operand) const {
285 return !IsEliminated() && source()->Equals(operand);
286 }
287
288 // A move is redundant if it's been eliminated, if its source and
289 // destination are the same, or if its destination is unneeded or constant.
290 bool IsRedundant() const {
291 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
292 (destination_ != NULL && destination_->IsConstant());
293 }
294
295 bool IsIgnored() const {
296 return destination_ != NULL && destination_->IsIgnored();
297 }
298
299 // We clear both operands to indicate move that's been eliminated.
300 void Eliminate() { source_ = destination_ = NULL; }
301 bool IsEliminated() const {
302 ASSERT(source_ != NULL || destination_ == NULL);
303 return source_ == NULL;
304 }
305
306 private:
307 InstructionOperand* source_;
308 InstructionOperand* destination_;
309 };
310
311 OStream& operator<<(OStream& os, const MoveOperands& mo);
312
313 template <InstructionOperand::Kind kOperandKind, int kNumCachedOperands>
314 class SubKindOperand V8_FINAL : public InstructionOperand {
315 public:
316 static SubKindOperand* Create(int index, Zone* zone) {
317 ASSERT(index >= 0);
318 if (index < kNumCachedOperands) return &cache[index];
319 return new (zone) SubKindOperand(index);
320 }
321
322 static SubKindOperand* cast(InstructionOperand* op) {
323 ASSERT(op->kind() == kOperandKind);
324 return reinterpret_cast<SubKindOperand*>(op);
325 }
326
327 static void SetUpCache();
328 static void TearDownCache();
329
330 private:
331 static SubKindOperand* cache;
332
333 SubKindOperand() : InstructionOperand() {}
334 explicit SubKindOperand(int index)
335 : InstructionOperand(kOperandKind, index) {}
336 };
337
338
339 #define INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
340 typedef SubKindOperand<InstructionOperand::type, number> name##Operand;
341 INSTRUCTION_OPERAND_LIST(INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS)
342 #undef INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS
343
344
345 class ParallelMove V8_FINAL : public ZoneObject {
346 public:
347 explicit ParallelMove(Zone* zone) : move_operands_(4, zone) {}
348
349 void AddMove(InstructionOperand* from, InstructionOperand* to, Zone* zone) {
350 move_operands_.Add(MoveOperands(from, to), zone);
351 }
352
353 bool IsRedundant() const;
354
355 ZoneList<MoveOperands>* move_operands() { return &move_operands_; }
356 const ZoneList<MoveOperands>* move_operands() const {
357 return &move_operands_;
358 }
359
360 private:
361 ZoneList<MoveOperands> move_operands_;
362 };
363
364 OStream& operator<<(OStream& os, const ParallelMove& pm);
365
366 class PointerMap V8_FINAL : public ZoneObject {
367 public:
368 explicit PointerMap(Zone* zone)
369 : pointer_operands_(8, zone),
370 untagged_operands_(0, zone),
371 instruction_position_(-1) {}
372
373 const ZoneList<InstructionOperand*>* GetNormalizedOperands() {
374 for (int i = 0; i < untagged_operands_.length(); ++i) {
375 RemovePointer(untagged_operands_[i]);
376 }
377 untagged_operands_.Clear();
378 return &pointer_operands_;
379 }
380 int instruction_position() const { return instruction_position_; }
381
382 void set_instruction_position(int pos) {
383 ASSERT(instruction_position_ == -1);
384 instruction_position_ = pos;
385 }
386
387 void RecordPointer(InstructionOperand* op, Zone* zone);
388 void RemovePointer(InstructionOperand* op);
389 void RecordUntagged(InstructionOperand* op, Zone* zone);
390
391 private:
392 friend OStream& operator<<(OStream& os, const PointerMap& pm);
393
394 ZoneList<InstructionOperand*> pointer_operands_;
395 ZoneList<InstructionOperand*> untagged_operands_;
396 int instruction_position_;
397 };
398
399 OStream& operator<<(OStream& os, const PointerMap& pm);
400
401 // TODO(titzer): s/PointerMap/ReferenceMap/
402 class Instruction : public ZoneObject {
403 public:
404 size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
405 InstructionOperand* Output() const { return OutputAt(0); }
406 InstructionOperand* OutputAt(size_t i) const {
407 ASSERT(i < OutputCount());
408 return operands_[i];
409 }
410
411 size_t InputCount() const { return InputCountField::decode(bit_field_); }
412 InstructionOperand* InputAt(size_t i) const {
413 ASSERT(i < InputCount());
414 return operands_[OutputCount() + i];
415 }
416
417 size_t TempCount() const { return TempCountField::decode(bit_field_); }
418 InstructionOperand* TempAt(size_t i) const {
419 ASSERT(i < TempCount());
420 return operands_[OutputCount() + InputCount() + i];
421 }
422
423 InstructionCode opcode() const { return opcode_; }
424 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
425 AddressingMode addressing_mode() const {
426 return AddressingModeField::decode(opcode());
427 }
428 FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
429 FlagsCondition flags_condition() const {
430 return FlagsConditionField::decode(opcode());
431 }
432
433 // TODO(titzer): make control and call into flags.
434 static Instruction* New(Zone* zone, InstructionCode opcode) {
435 return New(zone, opcode, 0, NULL, 0, NULL, 0, NULL);
436 }
437
438 static Instruction* New(Zone* zone, InstructionCode opcode,
439 size_t output_count, InstructionOperand** outputs,
440 size_t input_count, InstructionOperand** inputs,
441 size_t temp_count, InstructionOperand** temps) {
442 ASSERT(opcode >= 0);
443 ASSERT(output_count == 0 || outputs != NULL);
444 ASSERT(input_count == 0 || inputs != NULL);
445 ASSERT(temp_count == 0 || temps != NULL);
446 InstructionOperand* none = NULL;
447 USE(none);
448 size_t size = RoundUp(sizeof(Instruction), kPointerSize) +
449 (output_count + input_count + temp_count - 1) * sizeof(none);
450 return new (zone->New(size)) Instruction(
451 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
452 }
453
454 // TODO(titzer): another holdover from lithium days; register allocator
455 // should not need to know about control instructions.
456 Instruction* MarkAsControl() {
457 bit_field_ = IsControlField::update(bit_field_, true);
458 return this;
459 }
460 Instruction* MarkAsCall() {
461 bit_field_ = IsCallField::update(bit_field_, true);
462 return this;
463 }
464 bool IsControl() const { return IsControlField::decode(bit_field_); }
465 bool IsCall() const { return IsCallField::decode(bit_field_); }
466 bool NeedsPointerMap() const { return IsCall(); }
467 bool HasPointerMap() const { return pointer_map_ != NULL; }
468
469 bool IsGapMoves() const {
470 return opcode() == kGapInstruction || opcode() == kBlockStartInstruction;
471 }
472 bool IsBlockStart() const { return opcode() == kBlockStartInstruction; }
473 bool IsSourcePosition() const {
474 return opcode() == kSourcePositionInstruction;
475 }
476
477 bool ClobbersRegisters() const { return IsCall(); }
478 bool ClobbersTemps() const { return IsCall(); }
479 bool ClobbersDoubleRegisters() const { return IsCall(); }
480 PointerMap* pointer_map() const { return pointer_map_; }
481
482 void set_pointer_map(PointerMap* map) {
483 ASSERT(NeedsPointerMap());
484 ASSERT_EQ(NULL, pointer_map_);
485 pointer_map_ = map;
486 }
487
488 // Placement new operator so that we can smash instructions into
489 // zone-allocated memory.
490 void* operator new(size_t, void* location) { return location; }
491
492 protected:
493 explicit Instruction(InstructionCode opcode)
494 : opcode_(opcode),
495 bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
496 TempCountField::encode(0) | IsCallField::encode(false) |
497 IsControlField::encode(false)),
498 pointer_map_(NULL) {}
499
500 Instruction(InstructionCode opcode, size_t output_count,
501 InstructionOperand** outputs, size_t input_count,
502 InstructionOperand** inputs, size_t temp_count,
503 InstructionOperand** temps)
504 : opcode_(opcode),
505 bit_field_(OutputCountField::encode(output_count) |
506 InputCountField::encode(input_count) |
507 TempCountField::encode(temp_count) |
508 IsCallField::encode(false) | IsControlField::encode(false)),
509 pointer_map_(NULL) {
510 for (size_t i = 0; i < output_count; ++i) {
511 operands_[i] = outputs[i];
512 }
513 for (size_t i = 0; i < input_count; ++i) {
514 operands_[output_count + i] = inputs[i];
515 }
516 for (size_t i = 0; i < temp_count; ++i) {
517 operands_[output_count + input_count + i] = temps[i];
518 }
519 }
520
521 protected:
522 typedef BitField<size_t, 0, 8> OutputCountField;
523 typedef BitField<size_t, 8, 16> InputCountField;
524 typedef BitField<size_t, 24, 6> TempCountField;
525 typedef BitField<bool, 30, 1> IsCallField;
526 typedef BitField<bool, 31, 1> IsControlField;
527
528 InstructionCode opcode_;
529 uint32_t bit_field_;
530 PointerMap* pointer_map_;
531 InstructionOperand* operands_[1];
532 };
533
534 OStream& operator<<(OStream& os, const Instruction& instr);
535
536 // Represents moves inserted before an instruction due to register allocation.
537 // TODO(titzer): squash GapInstruction back into Instruction, since essentially
538 // every instruction can possibly have moves inserted before it.
539 class GapInstruction : public Instruction {
540 public:
541 enum InnerPosition {
542 BEFORE,
543 START,
544 END,
545 AFTER,
546 FIRST_INNER_POSITION = BEFORE,
547 LAST_INNER_POSITION = AFTER
548 };
549
550 ParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
551 if (parallel_moves_[pos] == NULL) {
552 parallel_moves_[pos] = new (zone) ParallelMove(zone);
553 }
554 return parallel_moves_[pos];
555 }
556
557 ParallelMove* GetParallelMove(InnerPosition pos) {
558 return parallel_moves_[pos];
559 }
560
561 static GapInstruction* New(Zone* zone) {
562 void* buffer = zone->New(sizeof(GapInstruction));
563 return new (buffer) GapInstruction(kGapInstruction);
564 }
565
566 static GapInstruction* cast(Instruction* instr) {
567 ASSERT(instr->IsGapMoves());
568 return static_cast<GapInstruction*>(instr);
569 }
570
571 static const GapInstruction* cast(const Instruction* instr) {
572 ASSERT(instr->IsGapMoves());
573 return static_cast<const GapInstruction*>(instr);
574 }
575
576 protected:
577 explicit GapInstruction(InstructionCode opcode) : Instruction(opcode) {
578 parallel_moves_[BEFORE] = NULL;
579 parallel_moves_[START] = NULL;
580 parallel_moves_[END] = NULL;
581 parallel_moves_[AFTER] = NULL;
582 }
583
584 private:
585 friend OStream& operator<<(OStream& os, const Instruction& instr);
586 ParallelMove* parallel_moves_[LAST_INNER_POSITION + 1];
587 };
588
589
590 // This special kind of gap move instruction represents the beginning of a
591 // block of code.
592 // TODO(titzer): move code_start and code_end from BasicBlock to here.
593 class BlockStartInstruction V8_FINAL : public GapInstruction {
594 public:
595 BasicBlock* block() const { return block_; }
596 Label* label() { return &label_; }
597
598 static BlockStartInstruction* New(Zone* zone, BasicBlock* block) {
599 void* buffer = zone->New(sizeof(BlockStartInstruction));
600 return new (buffer) BlockStartInstruction(block);
601 }
602
603 static BlockStartInstruction* cast(Instruction* instr) {
604 ASSERT(instr->IsBlockStart());
605 return static_cast<BlockStartInstruction*>(instr);
606 }
607
608 private:
609 explicit BlockStartInstruction(BasicBlock* block)
610 : GapInstruction(kBlockStartInstruction), block_(block) {}
611
612 BasicBlock* block_;
613 Label label_;
614 };
615
616
617 class SourcePositionInstruction V8_FINAL : public Instruction {
618 public:
619 static SourcePositionInstruction* New(Zone* zone, SourcePosition position) {
620 void* buffer = zone->New(sizeof(SourcePositionInstruction));
621 return new (buffer) SourcePositionInstruction(position);
622 }
623
624 SourcePosition source_position() const { return source_position_; }
625
626 static SourcePositionInstruction* cast(Instruction* instr) {
627 ASSERT(instr->IsSourcePosition());
628 return static_cast<SourcePositionInstruction*>(instr);
629 }
630
631 static const SourcePositionInstruction* cast(const Instruction* instr) {
632 ASSERT(instr->IsSourcePosition());
633 return static_cast<const SourcePositionInstruction*>(instr);
634 }
635
636 private:
637 explicit SourcePositionInstruction(SourcePosition source_position)
638 : Instruction(kSourcePositionInstruction),
639 source_position_(source_position) {
640 ASSERT(!source_position_.IsInvalid());
641 ASSERT(!source_position_.IsUnknown());
642 }
643
644 SourcePosition source_position_;
645 };
646
647
648 class Constant V8_FINAL {
649 public:
650 enum Type { kInt32, kInt64, kFloat64, kExternalReference, kHeapObject };
651
652 explicit Constant(int32_t v) : type_(kInt32), value_(v) {}
653 explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
654 explicit Constant(double v) : type_(kFloat64), value_(BitCast<int64_t>(v)) {}
655 explicit Constant(ExternalReference ref)
656 : type_(kExternalReference), value_(BitCast<intptr_t>(ref)) {}
657 explicit Constant(Handle<HeapObject> obj)
658 : type_(kHeapObject), value_(BitCast<intptr_t>(obj)) {}
659
660 Type type() const { return type_; }
661
662 int32_t ToInt32() const {
663 ASSERT_EQ(kInt32, type());
664 return static_cast<int32_t>(value_);
665 }
666
667 int64_t ToInt64() const {
668 if (type() == kInt32) return ToInt32();
669 ASSERT_EQ(kInt64, type());
670 return value_;
671 }
672
673 double ToFloat64() const {
674 if (type() == kInt32) return ToInt32();
675 ASSERT_EQ(kFloat64, type());
676 return BitCast<double>(value_);
677 }
678
679 ExternalReference ToExternalReference() const {
680 ASSERT_EQ(kExternalReference, type());
681 return BitCast<ExternalReference>(static_cast<intptr_t>(value_));
682 }
683
684 Handle<HeapObject> ToHeapObject() const {
685 ASSERT_EQ(kHeapObject, type());
686 return BitCast<Handle<HeapObject> >(static_cast<intptr_t>(value_));
687 }
688
689 private:
690 Type type_;
691 int64_t value_;
692 };
693
694 OStream& operator<<(OStream& os, const Constant& constant);
695
696 typedef std::deque<Constant, zone_allocator<Constant> > ConstantDeque;
697 typedef std::map<int, Constant, std::less<int>,
698 zone_allocator<std::pair<int, Constant> > > ConstantMap;
699
700
701 typedef std::deque<Instruction*, zone_allocator<Instruction*> >
702 InstructionDeque;
703 typedef std::deque<PointerMap*, zone_allocator<PointerMap*> > PointerMapDeque;
704 typedef std::vector<FrameStateDescriptor, zone_allocator<FrameStateDescriptor> >
705 DeoptimizationVector;
706
707
708 // Represents architecture-specific generated code before, during, and after
709 // register allocation.
710 // TODO(titzer): s/IsDouble/IsFloat64/
711 class InstructionSequence V8_FINAL {
712 public:
713 InstructionSequence(Linkage* linkage, Graph* graph, Schedule* schedule)
714 : graph_(graph),
715 linkage_(linkage),
716 schedule_(schedule),
717 constants_(ConstantMap::key_compare(),
718 ConstantMap::allocator_type(zone())),
719 immediates_(ConstantDeque::allocator_type(zone())),
720 instructions_(InstructionDeque::allocator_type(zone())),
721 next_virtual_register_(graph->NodeCount()),
722 pointer_maps_(PointerMapDeque::allocator_type(zone())),
723 doubles_(std::less<int>(), VirtualRegisterSet::allocator_type(zone())),
724 references_(std::less<int>(),
725 VirtualRegisterSet::allocator_type(zone())),
726 deoptimization_entries_(DeoptimizationVector::allocator_type(zone())) {}
727
728 int NextVirtualRegister() { return next_virtual_register_++; }
729 int VirtualRegisterCount() const { return next_virtual_register_; }
730
731 int ValueCount() const { return graph_->NodeCount(); }
732
733 int BasicBlockCount() const {
734 return static_cast<int>(schedule_->rpo_order()->size());
735 }
736
737 BasicBlock* BlockAt(int rpo_number) const {
738 return (*schedule_->rpo_order())[rpo_number];
739 }
740
741 BasicBlock* GetContainingLoop(BasicBlock* block) {
742 return block->loop_header_;
743 }
744
745 int GetLoopEnd(BasicBlock* block) const { return block->loop_end_; }
746
747 BasicBlock* GetBasicBlock(int instruction_index);
748
749 int GetVirtualRegister(Node* node) const { return node->id(); }
750
751 bool IsReference(int virtual_register) const;
752 bool IsDouble(int virtual_register) const;
753
754 void MarkAsReference(int virtual_register);
755 void MarkAsDouble(int virtual_register);
756
757 void AddGapMove(int index, InstructionOperand* from, InstructionOperand* to);
758
759 Label* GetLabel(BasicBlock* block);
760 BlockStartInstruction* GetBlockStart(BasicBlock* block);
761
762 typedef InstructionDeque::const_iterator const_iterator;
763 const_iterator begin() const { return instructions_.begin(); }
764 const_iterator end() const { return instructions_.end(); }
765
766 GapInstruction* GapAt(int index) const {
767 return GapInstruction::cast(InstructionAt(index));
768 }
769 bool IsGapAt(int index) const { return InstructionAt(index)->IsGapMoves(); }
770 Instruction* InstructionAt(int index) const {
771 ASSERT(index >= 0);
772 ASSERT(index < static_cast<int>(instructions_.size()));
773 return instructions_[index];
774 }
775
776 Frame* frame() { return &frame_; }
777 Graph* graph() const { return graph_; }
778 Isolate* isolate() const { return zone()->isolate(); }
779 Linkage* linkage() const { return linkage_; }
780 Schedule* schedule() const { return schedule_; }
781 const PointerMapDeque* pointer_maps() const { return &pointer_maps_; }
782 Zone* zone() const { return graph_->zone(); }
783
784 // Used by the code generator while adding instructions.
785 int AddInstruction(Instruction* instr, BasicBlock* block);
786 void StartBlock(BasicBlock* block);
787 void EndBlock(BasicBlock* block);
788
789 void AddConstant(int virtual_register, Constant constant) {
790 ASSERT(constants_.find(virtual_register) == constants_.end());
791 constants_.insert(std::make_pair(virtual_register, constant));
792 }
793 Constant GetConstant(int virtual_register) const {
794 ConstantMap::const_iterator it = constants_.find(virtual_register);
795 ASSERT(it != constants_.end());
796 ASSERT_EQ(virtual_register, it->first);
797 return it->second;
798 }
799
800 typedef ConstantDeque Immediates;
801 const Immediates& immediates() const { return immediates_; }
802
803 int AddImmediate(Constant constant) {
804 int index = immediates_.size();
805 immediates_.push_back(constant);
806 return index;
807 }
808 Constant GetImmediate(int index) const {
809 ASSERT(index >= 0);
810 ASSERT(index < static_cast<int>(immediates_.size()));
811 return immediates_[index];
812 }
813
814 int AddDeoptimizationEntry(const FrameStateDescriptor& descriptor);
815 FrameStateDescriptor GetDeoptimizationEntry(int deoptimization_id);
816 int GetDeoptimizationEntryCount();
817
818 private:
819 friend OStream& operator<<(OStream& os, const InstructionSequence& code);
820
821 typedef std::set<int, std::less<int>, ZoneIntAllocator> VirtualRegisterSet;
822
823 Graph* graph_;
824 Linkage* linkage_;
825 Schedule* schedule_;
826 ConstantMap constants_;
827 ConstantDeque immediates_;
828 InstructionDeque instructions_;
829 int next_virtual_register_;
830 PointerMapDeque pointer_maps_;
831 VirtualRegisterSet doubles_;
832 VirtualRegisterSet references_;
833 Frame frame_;
834 DeoptimizationVector deoptimization_entries_;
835 };
836
837 OStream& operator<<(OStream& os, const InstructionSequence& code);
838
839 } // namespace compiler
840 } // namespace internal
841 } // namespace v8
842
843 #endif // V8_COMPILER_INSTRUCTION_H_
OLDNEW
« no previous file with comments | « src/compiler/ia32/linkage-ia32.cc ('k') | src/compiler/instruction.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698