OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "src/compiler/pipeline.h" |
| 6 #include "src/compiler/raw-machine-assembler.h" |
| 7 #include "src/compiler/scheduler.h" |
| 8 |
| 9 namespace v8 { |
| 10 namespace internal { |
| 11 namespace compiler { |
| 12 |
| 13 RawMachineAssembler::RawMachineAssembler( |
| 14 Graph* graph, MachineCallDescriptorBuilder* call_descriptor_builder, |
| 15 MachineRepresentation word) |
| 16 : GraphBuilder(graph), |
| 17 schedule_(new (zone()) Schedule(zone())), |
| 18 machine_(zone(), word), |
| 19 common_(zone()), |
| 20 call_descriptor_builder_(call_descriptor_builder), |
| 21 parameters_(NULL), |
| 22 exit_label_(schedule()->exit()), |
| 23 current_block_(schedule()->entry()) { |
| 24 if (parameter_count() == 0) return; |
| 25 parameters_ = zone()->NewArray<Node*>(parameter_count()); |
| 26 for (int i = 0; i < parameter_count(); ++i) { |
| 27 parameters_[i] = NewNode(common()->Parameter(i)); |
| 28 } |
| 29 } |
| 30 |
| 31 |
| 32 Schedule* RawMachineAssembler::Export() { |
| 33 // Compute the correct codegen order. |
| 34 ASSERT(schedule_->rpo_order()->empty()); |
| 35 Scheduler scheduler(zone(), graph(), schedule_); |
| 36 scheduler.ComputeSpecialRPO(); |
| 37 // Invalidate MachineAssembler. |
| 38 Schedule* schedule = schedule_; |
| 39 schedule_ = NULL; |
| 40 return schedule; |
| 41 } |
| 42 |
| 43 |
| 44 Node* RawMachineAssembler::Parameter(int index) { |
| 45 ASSERT(0 <= index && index < parameter_count()); |
| 46 return parameters_[index]; |
| 47 } |
| 48 |
| 49 |
| 50 RawMachineAssembler::Label* RawMachineAssembler::Exit() { |
| 51 exit_label_.used_ = true; |
| 52 return &exit_label_; |
| 53 } |
| 54 |
| 55 |
| 56 void RawMachineAssembler::Goto(Label* label) { |
| 57 ASSERT(current_block_ != schedule()->exit()); |
| 58 schedule()->AddGoto(CurrentBlock(), Use(label)); |
| 59 current_block_ = NULL; |
| 60 } |
| 61 |
| 62 |
| 63 void RawMachineAssembler::Branch(Node* condition, Label* true_val, |
| 64 Label* false_val) { |
| 65 ASSERT(current_block_ != schedule()->exit()); |
| 66 Node* branch = NewNode(common()->Branch(), condition); |
| 67 schedule()->AddBranch(CurrentBlock(), branch, Use(true_val), Use(false_val)); |
| 68 current_block_ = NULL; |
| 69 } |
| 70 |
| 71 |
| 72 void RawMachineAssembler::Return(Node* value) { |
| 73 schedule()->AddReturn(CurrentBlock(), value); |
| 74 current_block_ = NULL; |
| 75 } |
| 76 |
| 77 |
| 78 void RawMachineAssembler::Deoptimize(Node* state) { |
| 79 Node* deopt = graph()->NewNode(common()->Deoptimize(), state); |
| 80 schedule()->AddDeoptimize(CurrentBlock(), deopt); |
| 81 current_block_ = NULL; |
| 82 } |
| 83 |
| 84 |
| 85 Node* RawMachineAssembler::CallJS0(Node* function, Node* receiver, |
| 86 Label* continuation, Label* deoptimization) { |
| 87 CallDescriptor* descriptor = Linkage::GetJSCallDescriptor(1, zone()); |
| 88 Node* call = graph()->NewNode(common()->Call(descriptor), function, receiver); |
| 89 schedule()->AddCall(CurrentBlock(), call, Use(continuation), |
| 90 Use(deoptimization)); |
| 91 current_block_ = NULL; |
| 92 return call; |
| 93 } |
| 94 |
| 95 |
| 96 Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function, |
| 97 Node* arg0, Label* continuation, |
| 98 Label* deoptimization) { |
| 99 CallDescriptor* descriptor = |
| 100 Linkage::GetRuntimeCallDescriptor(function, 1, Operator::kNoProperties, |
| 101 CallDescriptor::kCanDeoptimize, zone()); |
| 102 |
| 103 Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode()); |
| 104 Node* ref = NewNode( |
| 105 common()->ExternalConstant(ExternalReference(function, isolate()))); |
| 106 Node* arity = Int32Constant(1); |
| 107 Node* context = Parameter(1); |
| 108 |
| 109 Node* call = graph()->NewNode(common()->Call(descriptor), centry, arg0, ref, |
| 110 arity, context); |
| 111 schedule()->AddCall(CurrentBlock(), call, Use(continuation), |
| 112 Use(deoptimization)); |
| 113 current_block_ = NULL; |
| 114 return call; |
| 115 } |
| 116 |
| 117 |
| 118 void RawMachineAssembler::Bind(Label* label) { |
| 119 ASSERT(current_block_ == NULL); |
| 120 ASSERT(!label->bound_); |
| 121 label->bound_ = true; |
| 122 current_block_ = EnsureBlock(label); |
| 123 } |
| 124 |
| 125 |
| 126 BasicBlock* RawMachineAssembler::Use(Label* label) { |
| 127 label->used_ = true; |
| 128 return EnsureBlock(label); |
| 129 } |
| 130 |
| 131 |
| 132 BasicBlock* RawMachineAssembler::EnsureBlock(Label* label) { |
| 133 if (label->block_ == NULL) label->block_ = schedule()->NewBasicBlock(); |
| 134 return label->block_; |
| 135 } |
| 136 |
| 137 |
| 138 BasicBlock* RawMachineAssembler::CurrentBlock() { |
| 139 ASSERT(current_block_); |
| 140 return current_block_; |
| 141 } |
| 142 |
| 143 |
| 144 Node* RawMachineAssembler::MakeNode(Operator* op, int input_count, |
| 145 Node** inputs) { |
| 146 ASSERT(ScheduleValid()); |
| 147 ASSERT(current_block_ != NULL); |
| 148 Node* node = graph()->NewNode(op, input_count, inputs); |
| 149 BasicBlock* block = op->opcode() == IrOpcode::kParameter ? schedule()->start() |
| 150 : CurrentBlock(); |
| 151 schedule()->AddNode(block, node); |
| 152 return node; |
| 153 } |
| 154 |
| 155 } // namespace compiler |
| 156 } // namespace internal |
| 157 } // namespace v8 |
OLD | NEW |