OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #ifndef V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_ |
| 6 #define V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_ |
| 7 |
| 8 #include "src/compiler/instruction.h" |
| 9 #include "src/compiler/instruction-selector.h" |
| 10 #include "src/compiler/linkage.h" |
| 11 |
| 12 namespace v8 { |
| 13 namespace internal { |
| 14 namespace compiler { |
| 15 |
| 16 // A helper class for the instruction selector that simplifies construction of |
| 17 // Operands. This class implements a base for architecture-specific helpers. |
| 18 class OperandGenerator { |
| 19 public: |
| 20 explicit OperandGenerator(InstructionSelector* selector) |
| 21 : selector_(selector) {} |
| 22 |
| 23 InstructionOperand* DefineAsRegister(Node* node) { |
| 24 return Define(node, new (zone()) |
| 25 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER)); |
| 26 } |
| 27 |
| 28 InstructionOperand* DefineAsDoubleRegister(Node* node) { |
| 29 return Define(node, new (zone()) |
| 30 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER)); |
| 31 } |
| 32 |
| 33 InstructionOperand* DefineSameAsFirst(Node* result) { |
| 34 return Define(result, new (zone()) |
| 35 UnallocatedOperand(UnallocatedOperand::SAME_AS_FIRST_INPUT)); |
| 36 } |
| 37 |
| 38 InstructionOperand* DefineAsFixed(Node* node, Register reg) { |
| 39 return Define(node, new (zone()) |
| 40 UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, |
| 41 Register::ToAllocationIndex(reg))); |
| 42 } |
| 43 |
| 44 InstructionOperand* DefineAsFixedDouble(Node* node, DoubleRegister reg) { |
| 45 return Define(node, new (zone()) |
| 46 UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER, |
| 47 DoubleRegister::ToAllocationIndex(reg))); |
| 48 } |
| 49 |
| 50 InstructionOperand* DefineAsConstant(Node* node) { |
| 51 sequence()->AddConstant(node->id(), ToConstant(node)); |
| 52 return ConstantOperand::Create(node->id(), zone()); |
| 53 } |
| 54 |
| 55 InstructionOperand* DefineAsLocation(Node* node, LinkageLocation location) { |
| 56 return Define(node, ToUnallocatedOperand(location)); |
| 57 } |
| 58 |
| 59 InstructionOperand* Use(Node* node) { |
| 60 return Use(node, |
| 61 new (zone()) UnallocatedOperand( |
| 62 UnallocatedOperand::ANY, UnallocatedOperand::USED_AT_START)); |
| 63 } |
| 64 |
| 65 InstructionOperand* UseRegister(Node* node) { |
| 66 return Use(node, new (zone()) |
| 67 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, |
| 68 UnallocatedOperand::USED_AT_START)); |
| 69 } |
| 70 |
| 71 InstructionOperand* UseDoubleRegister(Node* node) { |
| 72 return Use(node, new (zone()) |
| 73 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, |
| 74 UnallocatedOperand::USED_AT_START)); |
| 75 } |
| 76 |
| 77 // Use register or operand for the node. If a register is chosen, it won't |
| 78 // alias any temporary or output registers. |
| 79 InstructionOperand* UseUnique(Node* node) { |
| 80 return Use(node, new (zone()) UnallocatedOperand(UnallocatedOperand::ANY)); |
| 81 } |
| 82 |
| 83 // Use a unique register for the node that does not alias any temporary or |
| 84 // output registers. |
| 85 InstructionOperand* UseUniqueRegister(Node* node) { |
| 86 return Use(node, new (zone()) |
| 87 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER)); |
| 88 } |
| 89 |
| 90 // Use a unique double register for the node that does not alias any temporary |
| 91 // or output double registers. |
| 92 InstructionOperand* UseUniqueDoubleRegister(Node* node) { |
| 93 return Use(node, new (zone()) |
| 94 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER)); |
| 95 } |
| 96 |
| 97 InstructionOperand* UseFixed(Node* node, Register reg) { |
| 98 return Use(node, new (zone()) |
| 99 UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, |
| 100 Register::ToAllocationIndex(reg))); |
| 101 } |
| 102 |
| 103 InstructionOperand* UseFixedDouble(Node* node, DoubleRegister reg) { |
| 104 return Use(node, new (zone()) |
| 105 UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER, |
| 106 DoubleRegister::ToAllocationIndex(reg))); |
| 107 } |
| 108 |
| 109 InstructionOperand* UseImmediate(Node* node) { |
| 110 int index = sequence()->AddImmediate(ToConstant(node)); |
| 111 return ImmediateOperand::Create(index, zone()); |
| 112 } |
| 113 |
| 114 InstructionOperand* UseLocation(Node* node, LinkageLocation location) { |
| 115 return Use(node, ToUnallocatedOperand(location)); |
| 116 } |
| 117 |
| 118 InstructionOperand* TempRegister() { |
| 119 UnallocatedOperand* op = |
| 120 new (zone()) UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, |
| 121 UnallocatedOperand::USED_AT_START); |
| 122 op->set_virtual_register(sequence()->NextVirtualRegister()); |
| 123 return op; |
| 124 } |
| 125 |
| 126 InstructionOperand* TempDoubleRegister() { |
| 127 UnallocatedOperand* op = |
| 128 new (zone()) UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER, |
| 129 UnallocatedOperand::USED_AT_START); |
| 130 op->set_virtual_register(sequence()->NextVirtualRegister()); |
| 131 sequence()->MarkAsDouble(op->virtual_register()); |
| 132 return op; |
| 133 } |
| 134 |
| 135 InstructionOperand* TempRegister(Register reg) { |
| 136 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, |
| 137 Register::ToAllocationIndex(reg)); |
| 138 } |
| 139 |
| 140 InstructionOperand* TempImmediate(int32_t imm) { |
| 141 int index = sequence()->AddImmediate(Constant(imm)); |
| 142 return ImmediateOperand::Create(index, zone()); |
| 143 } |
| 144 |
| 145 InstructionOperand* Label(BasicBlock* block) { |
| 146 // TODO(bmeurer): We misuse ImmediateOperand here. |
| 147 return ImmediateOperand::Create(block->id(), zone()); |
| 148 } |
| 149 |
| 150 protected: |
| 151 Graph* graph() const { return selector()->graph(); } |
| 152 InstructionSelector* selector() const { return selector_; } |
| 153 InstructionSequence* sequence() const { return selector()->sequence(); } |
| 154 Isolate* isolate() const { return zone()->isolate(); } |
| 155 Zone* zone() const { return selector()->instruction_zone(); } |
| 156 |
| 157 private: |
| 158 static Constant ToConstant(const Node* node) { |
| 159 switch (node->opcode()) { |
| 160 case IrOpcode::kInt32Constant: |
| 161 return Constant(ValueOf<int32_t>(node->op())); |
| 162 case IrOpcode::kInt64Constant: |
| 163 return Constant(ValueOf<int64_t>(node->op())); |
| 164 case IrOpcode::kNumberConstant: |
| 165 case IrOpcode::kFloat64Constant: |
| 166 return Constant(ValueOf<double>(node->op())); |
| 167 case IrOpcode::kExternalConstant: |
| 168 return Constant(ValueOf<ExternalReference>(node->op())); |
| 169 case IrOpcode::kHeapConstant: |
| 170 return Constant(ValueOf<Handle<HeapObject> >(node->op())); |
| 171 default: |
| 172 break; |
| 173 } |
| 174 UNREACHABLE(); |
| 175 return Constant(static_cast<int32_t>(0)); |
| 176 } |
| 177 |
| 178 UnallocatedOperand* Define(Node* node, UnallocatedOperand* operand) { |
| 179 ASSERT_NOT_NULL(node); |
| 180 ASSERT_NOT_NULL(operand); |
| 181 operand->set_virtual_register(node->id()); |
| 182 return operand; |
| 183 } |
| 184 |
| 185 UnallocatedOperand* Use(Node* node, UnallocatedOperand* operand) { |
| 186 selector_->MarkAsUsed(node); |
| 187 return Define(node, operand); |
| 188 } |
| 189 |
| 190 UnallocatedOperand* ToUnallocatedOperand(LinkageLocation location) { |
| 191 if (location.location_ == LinkageLocation::ANY_REGISTER) { |
| 192 return new (zone()) |
| 193 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER); |
| 194 } |
| 195 if (location.location_ < 0) { |
| 196 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_SLOT, |
| 197 location.location_); |
| 198 } |
| 199 if (location.rep_ == kMachineFloat64) { |
| 200 return new (zone()) UnallocatedOperand( |
| 201 UnallocatedOperand::FIXED_DOUBLE_REGISTER, location.location_); |
| 202 } |
| 203 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, |
| 204 location.location_); |
| 205 } |
| 206 |
| 207 InstructionSelector* selector_; |
| 208 }; |
| 209 |
| 210 |
| 211 // The flags continuation is a way to combine a branch or a materialization |
| 212 // of a boolean value with an instruction that sets the flags register. |
| 213 // The whole instruction is treated as a unit by the register allocator, and |
| 214 // thus no spills or moves can be introduced between the flags-setting |
| 215 // instruction and the branch or set it should be combined with. |
| 216 class FlagsContinuation V8_FINAL { |
| 217 public: |
| 218 // Creates a new flags continuation from the given condition and true/false |
| 219 // blocks. |
| 220 FlagsContinuation(FlagsCondition condition, BasicBlock* true_block, |
| 221 BasicBlock* false_block) |
| 222 : mode_(kFlags_branch), |
| 223 condition_(condition), |
| 224 true_block_(true_block), |
| 225 false_block_(false_block) { |
| 226 ASSERT_NOT_NULL(true_block); |
| 227 ASSERT_NOT_NULL(false_block); |
| 228 } |
| 229 |
| 230 // Creates a new flags continuation from the given condition and result node. |
| 231 FlagsContinuation(FlagsCondition condition, Node* result) |
| 232 : mode_(kFlags_set), condition_(condition), result_(result) { |
| 233 ASSERT_NOT_NULL(result); |
| 234 } |
| 235 |
| 236 bool IsNone() const { return mode_ == kFlags_none; } |
| 237 bool IsBranch() const { return mode_ == kFlags_branch; } |
| 238 bool IsSet() const { return mode_ == kFlags_set; } |
| 239 FlagsCondition condition() const { return condition_; } |
| 240 Node* result() const { |
| 241 ASSERT(IsSet()); |
| 242 return result_; |
| 243 } |
| 244 BasicBlock* true_block() const { |
| 245 ASSERT(IsBranch()); |
| 246 return true_block_; |
| 247 } |
| 248 BasicBlock* false_block() const { |
| 249 ASSERT(IsBranch()); |
| 250 return false_block_; |
| 251 } |
| 252 |
| 253 void Negate() { condition_ = static_cast<FlagsCondition>(condition_ ^ 1); } |
| 254 |
| 255 void Commute() { |
| 256 switch (condition_) { |
| 257 case kEqual: |
| 258 case kNotEqual: |
| 259 return; |
| 260 case kSignedLessThan: |
| 261 condition_ = kSignedGreaterThan; |
| 262 return; |
| 263 case kSignedGreaterThanOrEqual: |
| 264 condition_ = kSignedLessThanOrEqual; |
| 265 return; |
| 266 case kSignedLessThanOrEqual: |
| 267 condition_ = kSignedGreaterThanOrEqual; |
| 268 return; |
| 269 case kSignedGreaterThan: |
| 270 condition_ = kSignedLessThan; |
| 271 return; |
| 272 case kUnsignedLessThan: |
| 273 condition_ = kUnsignedGreaterThan; |
| 274 return; |
| 275 case kUnsignedGreaterThanOrEqual: |
| 276 condition_ = kUnsignedLessThanOrEqual; |
| 277 return; |
| 278 case kUnsignedLessThanOrEqual: |
| 279 condition_ = kUnsignedGreaterThanOrEqual; |
| 280 return; |
| 281 case kUnsignedGreaterThan: |
| 282 condition_ = kUnsignedLessThan; |
| 283 return; |
| 284 case kUnorderedEqual: |
| 285 case kUnorderedNotEqual: |
| 286 return; |
| 287 case kUnorderedLessThan: |
| 288 condition_ = kUnorderedGreaterThan; |
| 289 return; |
| 290 case kUnorderedGreaterThanOrEqual: |
| 291 condition_ = kUnorderedLessThanOrEqual; |
| 292 return; |
| 293 case kUnorderedLessThanOrEqual: |
| 294 condition_ = kUnorderedGreaterThanOrEqual; |
| 295 return; |
| 296 case kUnorderedGreaterThan: |
| 297 condition_ = kUnorderedLessThan; |
| 298 return; |
| 299 } |
| 300 UNREACHABLE(); |
| 301 } |
| 302 |
| 303 void OverwriteAndNegateIfEqual(FlagsCondition condition) { |
| 304 bool negate = condition_ == kEqual; |
| 305 condition_ = condition; |
| 306 if (negate) Negate(); |
| 307 } |
| 308 |
| 309 void SwapBlocks() { std::swap(true_block_, false_block_); } |
| 310 |
| 311 // Encodes this flags continuation into the given opcode. |
| 312 InstructionCode Encode(InstructionCode opcode) { |
| 313 return opcode | FlagsModeField::encode(mode_) | |
| 314 FlagsConditionField::encode(condition_); |
| 315 } |
| 316 |
| 317 private: |
| 318 FlagsMode mode_; |
| 319 FlagsCondition condition_; |
| 320 Node* result_; // Only valid if mode_ == kFlags_set. |
| 321 BasicBlock* true_block_; // Only valid if mode_ == kFlags_branch. |
| 322 BasicBlock* false_block_; // Only valid if mode_ == kFlags_branch. |
| 323 }; |
| 324 |
| 325 |
| 326 // An internal helper class for generating the operands to calls. |
| 327 // TODO(bmeurer): Get rid of the CallBuffer business and make |
| 328 // InstructionSelector::VisitCall platform independent instead. |
| 329 struct CallBuffer { |
| 330 CallBuffer(Zone* zone, CallDescriptor* descriptor); |
| 331 |
| 332 int output_count; |
| 333 CallDescriptor* descriptor; |
| 334 Node** output_nodes; |
| 335 InstructionOperand** outputs; |
| 336 InstructionOperand** fixed_and_control_args; |
| 337 int fixed_count; |
| 338 Node** pushed_nodes; |
| 339 int pushed_count; |
| 340 |
| 341 int input_count() { return descriptor->InputCount(); } |
| 342 |
| 343 int control_count() { return descriptor->CanLazilyDeoptimize() ? 2 : 0; } |
| 344 |
| 345 int fixed_and_control_count() { return fixed_count + control_count(); } |
| 346 }; |
| 347 |
| 348 } // namespace compiler |
| 349 } // namespace internal |
| 350 } // namespace v8 |
| 351 |
| 352 #endif // V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_ |
OLD | NEW |