Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(435)

Side by Side Diff: src/compiler/instruction.cc

Issue 426233002: Land the Fan (disabled) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Review feedback, rebase and "git cl format" Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/compiler/instruction.h ('k') | src/compiler/instruction-codes.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/instruction.h"
6
7 #include "src/compiler/common-operator.h"
8
9 namespace v8 {
10 namespace internal {
11 namespace compiler {
12
13 OStream& operator<<(OStream& os, const InstructionOperand& op) {
14 switch (op.kind()) {
15 case InstructionOperand::INVALID:
16 return os << "(0)";
17 case InstructionOperand::UNALLOCATED: {
18 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
19 os << "v" << unalloc->virtual_register();
20 if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
21 return os << "(=" << unalloc->fixed_slot_index() << "S)";
22 }
23 switch (unalloc->extended_policy()) {
24 case UnallocatedOperand::NONE:
25 return os;
26 case UnallocatedOperand::FIXED_REGISTER:
27 return os << "(=" << Register::AllocationIndexToString(
28 unalloc->fixed_register_index()) << ")";
29 case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
30 return os << "(=" << DoubleRegister::AllocationIndexToString(
31 unalloc->fixed_register_index()) << ")";
32 case UnallocatedOperand::MUST_HAVE_REGISTER:
33 return os << "(R)";
34 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
35 return os << "(1)";
36 case UnallocatedOperand::ANY:
37 return os << "(-)";
38 }
39 }
40 case InstructionOperand::CONSTANT:
41 return os << "[constant:" << op.index() << "]";
42 case InstructionOperand::IMMEDIATE:
43 return os << "[immediate:" << op.index() << "]";
44 case InstructionOperand::STACK_SLOT:
45 return os << "[stack:" << op.index() << "]";
46 case InstructionOperand::DOUBLE_STACK_SLOT:
47 return os << "[double_stack:" << op.index() << "]";
48 case InstructionOperand::REGISTER:
49 return os << "[" << Register::AllocationIndexToString(op.index())
50 << "|R]";
51 case InstructionOperand::DOUBLE_REGISTER:
52 return os << "[" << DoubleRegister::AllocationIndexToString(op.index())
53 << "|R]";
54 }
55 UNREACHABLE();
56 return os;
57 }
58
59
60 template <InstructionOperand::Kind kOperandKind, int kNumCachedOperands>
61 SubKindOperand<kOperandKind, kNumCachedOperands>*
62 SubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
63
64
65 template <InstructionOperand::Kind kOperandKind, int kNumCachedOperands>
66 void SubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
67 if (cache) return;
68 cache = new SubKindOperand[kNumCachedOperands];
69 for (int i = 0; i < kNumCachedOperands; i++) {
70 cache[i].ConvertTo(kOperandKind, i);
71 }
72 }
73
74
75 template <InstructionOperand::Kind kOperandKind, int kNumCachedOperands>
76 void SubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
77 delete[] cache;
78 }
79
80
81 void InstructionOperand::SetUpCaches() {
82 #define INSTRUCTION_OPERAND_SETUP(name, type, number) \
83 name##Operand::SetUpCache();
84 INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_SETUP)
85 #undef INSTRUCTION_OPERAND_SETUP
86 }
87
88
89 void InstructionOperand::TearDownCaches() {
90 #define INSTRUCTION_OPERAND_TEARDOWN(name, type, number) \
91 name##Operand::TearDownCache();
92 INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_TEARDOWN)
93 #undef INSTRUCTION_OPERAND_TEARDOWN
94 }
95
96
97 OStream& operator<<(OStream& os, const MoveOperands& mo) {
98 os << *mo.destination();
99 if (!mo.source()->Equals(mo.destination())) os << " = " << *mo.source();
100 return os << ";";
101 }
102
103
104 bool ParallelMove::IsRedundant() const {
105 for (int i = 0; i < move_operands_.length(); ++i) {
106 if (!move_operands_[i].IsRedundant()) return false;
107 }
108 return true;
109 }
110
111
112 OStream& operator<<(OStream& os, const ParallelMove& pm) {
113 bool first = true;
114 for (ZoneList<MoveOperands>::iterator move = pm.move_operands()->begin();
115 move != pm.move_operands()->end(); ++move) {
116 if (move->IsEliminated()) continue;
117 if (!first) os << " ";
118 first = false;
119 os << *move;
120 }
121 return os;
122 }
123
124
125 void PointerMap::RecordPointer(InstructionOperand* op, Zone* zone) {
126 // Do not record arguments as pointers.
127 if (op->IsStackSlot() && op->index() < 0) return;
128 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
129 pointer_operands_.Add(op, zone);
130 }
131
132
133 void PointerMap::RemovePointer(InstructionOperand* op) {
134 // Do not record arguments as pointers.
135 if (op->IsStackSlot() && op->index() < 0) return;
136 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
137 for (int i = 0; i < pointer_operands_.length(); ++i) {
138 if (pointer_operands_[i]->Equals(op)) {
139 pointer_operands_.Remove(i);
140 --i;
141 }
142 }
143 }
144
145
146 void PointerMap::RecordUntagged(InstructionOperand* op, Zone* zone) {
147 // Do not record arguments as pointers.
148 if (op->IsStackSlot() && op->index() < 0) return;
149 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
150 untagged_operands_.Add(op, zone);
151 }
152
153
154 OStream& operator<<(OStream& os, const PointerMap& pm) {
155 os << "{";
156 for (ZoneList<InstructionOperand*>::iterator op =
157 pm.pointer_operands_.begin();
158 op != pm.pointer_operands_.end(); ++op) {
159 if (op != pm.pointer_operands_.begin()) os << ";";
160 os << *op;
161 }
162 return os << "}";
163 }
164
165
166 OStream& operator<<(OStream& os, const ArchOpcode& ao) {
167 switch (ao) {
168 #define CASE(Name) \
169 case k##Name: \
170 return os << #Name;
171 ARCH_OPCODE_LIST(CASE)
172 #undef CASE
173 }
174 UNREACHABLE();
175 return os;
176 }
177
178
179 OStream& operator<<(OStream& os, const AddressingMode& am) {
180 switch (am) {
181 case kMode_None:
182 return os;
183 #define CASE(Name) \
184 case kMode_##Name: \
185 return os << #Name;
186 TARGET_ADDRESSING_MODE_LIST(CASE)
187 #undef CASE
188 }
189 UNREACHABLE();
190 return os;
191 }
192
193
194 OStream& operator<<(OStream& os, const FlagsMode& fm) {
195 switch (fm) {
196 case kFlags_none:
197 return os;
198 case kFlags_branch:
199 return os << "branch";
200 case kFlags_set:
201 return os << "set";
202 }
203 UNREACHABLE();
204 return os;
205 }
206
207
208 OStream& operator<<(OStream& os, const FlagsCondition& fc) {
209 switch (fc) {
210 case kEqual:
211 return os << "equal";
212 case kNotEqual:
213 return os << "not equal";
214 case kSignedLessThan:
215 return os << "signed less than";
216 case kSignedGreaterThanOrEqual:
217 return os << "signed greater than or equal";
218 case kSignedLessThanOrEqual:
219 return os << "signed less than or equal";
220 case kSignedGreaterThan:
221 return os << "signed greater than";
222 case kUnsignedLessThan:
223 return os << "unsigned less than";
224 case kUnsignedGreaterThanOrEqual:
225 return os << "unsigned greater than or equal";
226 case kUnsignedLessThanOrEqual:
227 return os << "unsigned less than or equal";
228 case kUnsignedGreaterThan:
229 return os << "unsigned greater than";
230 case kUnorderedEqual:
231 return os << "unordered equal";
232 case kUnorderedNotEqual:
233 return os << "unordered not equal";
234 case kUnorderedLessThan:
235 return os << "unordered less than";
236 case kUnorderedGreaterThanOrEqual:
237 return os << "unordered greater than or equal";
238 case kUnorderedLessThanOrEqual:
239 return os << "unordered less than or equal";
240 case kUnorderedGreaterThan:
241 return os << "unordered greater than";
242 }
243 UNREACHABLE();
244 return os;
245 }
246
247
248 OStream& operator<<(OStream& os, const Instruction& instr) {
249 if (instr.OutputCount() > 1) os << "(";
250 for (size_t i = 0; i < instr.OutputCount(); i++) {
251 if (i > 0) os << ", ";
252 os << *instr.OutputAt(i);
253 }
254
255 if (instr.OutputCount() > 1) os << ") = ";
256 if (instr.OutputCount() == 1) os << " = ";
257
258 if (instr.IsGapMoves()) {
259 const GapInstruction* gap = GapInstruction::cast(&instr);
260 os << (instr.IsBlockStart() ? " block-start" : "gap ");
261 for (int i = GapInstruction::FIRST_INNER_POSITION;
262 i <= GapInstruction::LAST_INNER_POSITION; i++) {
263 os << "(";
264 if (gap->parallel_moves_[i] != NULL) os << *gap->parallel_moves_[i];
265 os << ") ";
266 }
267 } else if (instr.IsSourcePosition()) {
268 const SourcePositionInstruction* pos =
269 SourcePositionInstruction::cast(&instr);
270 os << "position (" << pos->source_position().raw() << ")";
271 } else {
272 os << ArchOpcodeField::decode(instr.opcode());
273 AddressingMode am = AddressingModeField::decode(instr.opcode());
274 if (am != kMode_None) {
275 os << " : " << AddressingModeField::decode(instr.opcode());
276 }
277 FlagsMode fm = FlagsModeField::decode(instr.opcode());
278 if (fm != kFlags_none) {
279 os << " && " << fm << " if "
280 << FlagsConditionField::decode(instr.opcode());
281 }
282 }
283 if (instr.InputCount() > 0) {
284 for (size_t i = 0; i < instr.InputCount(); i++) {
285 os << " " << *instr.InputAt(i);
286 }
287 }
288 return os << "\n";
289 }
290
291
292 OStream& operator<<(OStream& os, const Constant& constant) {
293 switch (constant.type()) {
294 case Constant::kInt32:
295 return os << constant.ToInt32();
296 case Constant::kInt64:
297 return os << constant.ToInt64() << "l";
298 case Constant::kFloat64:
299 return os << constant.ToFloat64();
300 case Constant::kExternalReference:
301 return os << constant.ToExternalReference().address();
302 case Constant::kHeapObject:
303 return os << Brief(*constant.ToHeapObject());
304 }
305 UNREACHABLE();
306 return os;
307 }
308
309
310 Label* InstructionSequence::GetLabel(BasicBlock* block) {
311 return GetBlockStart(block)->label();
312 }
313
314
315 BlockStartInstruction* InstructionSequence::GetBlockStart(BasicBlock* block) {
316 return BlockStartInstruction::cast(InstructionAt(block->code_start_));
317 }
318
319
320 void InstructionSequence::StartBlock(BasicBlock* block) {
321 block->code_start_ = instructions_.size();
322 BlockStartInstruction* block_start =
323 BlockStartInstruction::New(zone(), block);
324 AddInstruction(block_start, block);
325 }
326
327
328 void InstructionSequence::EndBlock(BasicBlock* block) {
329 int end = instructions_.size();
330 ASSERT(block->code_start_ >= 0 && block->code_start_ < end);
331 block->code_end_ = end;
332 }
333
334
335 int InstructionSequence::AddInstruction(Instruction* instr, BasicBlock* block) {
336 // TODO(titzer): the order of these gaps is a holdover from Lithium.
337 GapInstruction* gap = GapInstruction::New(zone());
338 if (instr->IsControl()) instructions_.push_back(gap);
339 int index = instructions_.size();
340 instructions_.push_back(instr);
341 if (!instr->IsControl()) instructions_.push_back(gap);
342 if (instr->NeedsPointerMap()) {
343 ASSERT(instr->pointer_map() == NULL);
344 PointerMap* pointer_map = new (zone()) PointerMap(zone());
345 pointer_map->set_instruction_position(index);
346 instr->set_pointer_map(pointer_map);
347 pointer_maps_.push_back(pointer_map);
348 }
349 return index;
350 }
351
352
353 BasicBlock* InstructionSequence::GetBasicBlock(int instruction_index) {
354 // TODO(turbofan): Optimize this.
355 for (;;) {
356 ASSERT_LE(0, instruction_index);
357 Instruction* instruction = InstructionAt(instruction_index--);
358 if (instruction->IsBlockStart()) {
359 return BlockStartInstruction::cast(instruction)->block();
360 }
361 }
362 }
363
364
365 bool InstructionSequence::IsReference(int virtual_register) const {
366 return references_.find(virtual_register) != references_.end();
367 }
368
369
370 bool InstructionSequence::IsDouble(int virtual_register) const {
371 return doubles_.find(virtual_register) != doubles_.end();
372 }
373
374
375 void InstructionSequence::MarkAsReference(int virtual_register) {
376 references_.insert(virtual_register);
377 }
378
379
380 void InstructionSequence::MarkAsDouble(int virtual_register) {
381 doubles_.insert(virtual_register);
382 }
383
384
385 void InstructionSequence::AddGapMove(int index, InstructionOperand* from,
386 InstructionOperand* to) {
387 GapAt(index)->GetOrCreateParallelMove(GapInstruction::START, zone())->AddMove(
388 from, to, zone());
389 }
390
391
392 int InstructionSequence::AddDeoptimizationEntry(
393 const FrameStateDescriptor& descriptor) {
394 int deoptimization_id = deoptimization_entries_.size();
395 deoptimization_entries_.push_back(descriptor);
396 return deoptimization_id;
397 }
398
399 FrameStateDescriptor InstructionSequence::GetDeoptimizationEntry(
400 int deoptimization_id) {
401 return deoptimization_entries_[deoptimization_id];
402 }
403
404
405 int InstructionSequence::GetDeoptimizationEntryCount() {
406 return deoptimization_entries_.size();
407 }
408
409
410 OStream& operator<<(OStream& os, const InstructionSequence& code) {
411 for (size_t i = 0; i < code.immediates_.size(); ++i) {
412 Constant constant = code.immediates_[i];
413 os << "IMM#" << i << ": " << constant << "\n";
414 }
415 int i = 0;
416 for (ConstantMap::const_iterator it = code.constants_.begin();
417 it != code.constants_.end(); ++i, ++it) {
418 os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
419 }
420 for (int i = 0; i < code.BasicBlockCount(); i++) {
421 BasicBlock* block = code.BlockAt(i);
422
423 int bid = block->id();
424 os << "RPO#" << block->rpo_number_ << ": B" << bid;
425 CHECK(block->rpo_number_ == i);
426 if (block->IsLoopHeader()) {
427 os << " loop blocks: [" << block->rpo_number_ << ", " << block->loop_end_
428 << ")";
429 }
430 os << " instructions: [" << block->code_start_ << ", " << block->code_end_
431 << ")\n predecessors:";
432
433 BasicBlock::Predecessors predecessors = block->predecessors();
434 for (BasicBlock::Predecessors::iterator iter = predecessors.begin();
435 iter != predecessors.end(); ++iter) {
436 os << " B" << (*iter)->id();
437 }
438 os << "\n";
439
440 for (BasicBlock::const_iterator j = block->begin(); j != block->end();
441 ++j) {
442 Node* phi = *j;
443 if (phi->opcode() != IrOpcode::kPhi) continue;
444 os << " phi: v" << phi->id() << " =";
445 Node::Inputs inputs = phi->inputs();
446 for (Node::Inputs::iterator iter(inputs.begin()); iter != inputs.end();
447 ++iter) {
448 os << " v" << (*iter)->id();
449 }
450 os << "\n";
451 }
452
453 Vector<char> buf = Vector<char>::New(32);
454 for (int j = block->first_instruction_index();
455 j <= block->last_instruction_index(); j++) {
456 // TODO(svenpanne) Add some basic formatting to our streams.
457 SNPrintF(buf, "%5d", j);
458 os << " " << buf.start() << ": " << *code.InstructionAt(j);
459 }
460
461 os << " " << block->control_;
462
463 if (block->control_input_ != NULL) {
464 os << " v" << block->control_input_->id();
465 }
466
467 BasicBlock::Successors successors = block->successors();
468 for (BasicBlock::Successors::iterator iter = successors.begin();
469 iter != successors.end(); ++iter) {
470 os << " B" << (*iter)->id();
471 }
472 os << "\n";
473 }
474 return os;
475 }
476
477 } // namespace compiler
478 } // namespace internal
479 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/instruction.h ('k') | src/compiler/instruction-codes.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698