| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector.h" | 5 #include "src/compiler/instruction-selector.h" |
| 6 | 6 |
| 7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
| 10 #include "src/compiler/pipeline.h" | 10 #include "src/compiler/pipeline.h" |
| (...skipping 22 matching lines...) Expand all Loading... |
| 33 UnallocatedOperand::kInvalidVirtualRegister, zone) { | 33 UnallocatedOperand::kInvalidVirtualRegister, zone) { |
| 34 instructions_.reserve(node_count); | 34 instructions_.reserve(node_count); |
| 35 } | 35 } |
| 36 | 36 |
| 37 | 37 |
| 38 void InstructionSelector::SelectInstructions() { | 38 void InstructionSelector::SelectInstructions() { |
| 39 // Mark the inputs of all phis in loop headers as used. | 39 // Mark the inputs of all phis in loop headers as used. |
| 40 BasicBlockVector* blocks = schedule()->rpo_order(); | 40 BasicBlockVector* blocks = schedule()->rpo_order(); |
| 41 for (auto const block : *blocks) { | 41 for (auto const block : *blocks) { |
| 42 if (!block->IsLoopHeader()) continue; | 42 if (!block->IsLoopHeader()) continue; |
| 43 DCHECK_LE(2, block->PredecessorCount()); | 43 DCHECK_LE(2u, block->PredecessorCount()); |
| 44 for (Node* const phi : *block) { | 44 for (Node* const phi : *block) { |
| 45 if (phi->opcode() != IrOpcode::kPhi) continue; | 45 if (phi->opcode() != IrOpcode::kPhi) continue; |
| 46 | 46 |
| 47 // Mark all inputs as used. | 47 // Mark all inputs as used. |
| 48 for (Node* const input : phi->inputs()) { | 48 for (Node* const input : phi->inputs()) { |
| 49 MarkAsUsed(input); | 49 MarkAsUsed(input); |
| 50 } | 50 } |
| 51 } | 51 } |
| 52 } | 52 } |
| 53 | 53 |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 171 } | 171 } |
| 172 | 172 |
| 173 | 173 |
| 174 bool InstructionSelector::CanCover(Node* user, Node* node) const { | 174 bool InstructionSelector::CanCover(Node* user, Node* node) const { |
| 175 return node->OwnedBy(user) && | 175 return node->OwnedBy(user) && |
| 176 schedule()->block(node) == schedule()->block(user); | 176 schedule()->block(node) == schedule()->block(user); |
| 177 } | 177 } |
| 178 | 178 |
| 179 | 179 |
| 180 int InstructionSelector::GetVirtualRegister(const Node* node) { | 180 int InstructionSelector::GetVirtualRegister(const Node* node) { |
| 181 DCHECK_NOT_NULL(node); | 181 DCHECK(node); |
| 182 size_t const id = node->id(); | 182 size_t const id = node->id(); |
| 183 DCHECK_LT(id, virtual_registers_.size()); | 183 DCHECK_LT(id, virtual_registers_.size()); |
| 184 int virtual_register = virtual_registers_[id]; | 184 int virtual_register = virtual_registers_[id]; |
| 185 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { | 185 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { |
| 186 virtual_register = sequence()->NextVirtualRegister(); | 186 virtual_register = sequence()->NextVirtualRegister(); |
| 187 virtual_registers_[id] = virtual_register; | 187 virtual_registers_[id] = virtual_register; |
| 188 } | 188 } |
| 189 return virtual_register; | 189 return virtual_register; |
| 190 } | 190 } |
| 191 | 191 |
| 192 | 192 |
| 193 const std::map<NodeId, int> InstructionSelector::GetVirtualRegistersForTesting() | 193 const std::map<NodeId, int> InstructionSelector::GetVirtualRegistersForTesting() |
| 194 const { | 194 const { |
| 195 std::map<NodeId, int> virtual_registers; | 195 std::map<NodeId, int> virtual_registers; |
| 196 for (size_t n = 0; n < virtual_registers_.size(); ++n) { | 196 for (size_t n = 0; n < virtual_registers_.size(); ++n) { |
| 197 if (virtual_registers_[n] != UnallocatedOperand::kInvalidVirtualRegister) { | 197 if (virtual_registers_[n] != UnallocatedOperand::kInvalidVirtualRegister) { |
| 198 NodeId const id = static_cast<NodeId>(n); | 198 NodeId const id = static_cast<NodeId>(n); |
| 199 virtual_registers.insert(std::make_pair(id, virtual_registers_[n])); | 199 virtual_registers.insert(std::make_pair(id, virtual_registers_[n])); |
| 200 } | 200 } |
| 201 } | 201 } |
| 202 return virtual_registers; | 202 return virtual_registers; |
| 203 } | 203 } |
| 204 | 204 |
| 205 | 205 |
| 206 bool InstructionSelector::IsDefined(Node* node) const { | 206 bool InstructionSelector::IsDefined(Node* node) const { |
| 207 DCHECK_NOT_NULL(node); | 207 DCHECK(node); |
| 208 size_t const id = node->id(); | 208 size_t const id = node->id(); |
| 209 DCHECK_LT(id, defined_.size()); | 209 DCHECK_LT(id, defined_.size()); |
| 210 return defined_[id]; | 210 return defined_[id]; |
| 211 } | 211 } |
| 212 | 212 |
| 213 | 213 |
| 214 void InstructionSelector::MarkAsDefined(Node* node) { | 214 void InstructionSelector::MarkAsDefined(Node* node) { |
| 215 DCHECK_NOT_NULL(node); | 215 DCHECK(node); |
| 216 size_t const id = node->id(); | 216 size_t const id = node->id(); |
| 217 DCHECK_LT(id, defined_.size()); | 217 DCHECK_LT(id, defined_.size()); |
| 218 defined_[id] = true; | 218 defined_[id] = true; |
| 219 } | 219 } |
| 220 | 220 |
| 221 | 221 |
| 222 bool InstructionSelector::IsUsed(Node* node) const { | 222 bool InstructionSelector::IsUsed(Node* node) const { |
| 223 DCHECK_NOT_NULL(node); | 223 DCHECK(node); |
| 224 if (!node->op()->HasProperty(Operator::kEliminatable)) return true; | 224 if (!node->op()->HasProperty(Operator::kEliminatable)) return true; |
| 225 size_t const id = node->id(); | 225 size_t const id = node->id(); |
| 226 DCHECK_LT(id, used_.size()); | 226 DCHECK_LT(id, used_.size()); |
| 227 return used_[id]; | 227 return used_[id]; |
| 228 } | 228 } |
| 229 | 229 |
| 230 | 230 |
| 231 void InstructionSelector::MarkAsUsed(Node* node) { | 231 void InstructionSelector::MarkAsUsed(Node* node) { |
| 232 DCHECK_NOT_NULL(node); | 232 DCHECK(node); |
| 233 size_t const id = node->id(); | 233 size_t const id = node->id(); |
| 234 DCHECK_LT(id, used_.size()); | 234 DCHECK_LT(id, used_.size()); |
| 235 used_[id] = true; | 235 used_[id] = true; |
| 236 } | 236 } |
| 237 | 237 |
| 238 | 238 |
| 239 bool InstructionSelector::IsDouble(const Node* node) const { | 239 bool InstructionSelector::IsDouble(const Node* node) const { |
| 240 DCHECK_NOT_NULL(node); | 240 DCHECK(node); |
| 241 int const virtual_register = virtual_registers_[node->id()]; | 241 int const virtual_register = virtual_registers_[node->id()]; |
| 242 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { | 242 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { |
| 243 return false; | 243 return false; |
| 244 } | 244 } |
| 245 return sequence()->IsDouble(virtual_register); | 245 return sequence()->IsDouble(virtual_register); |
| 246 } | 246 } |
| 247 | 247 |
| 248 | 248 |
| 249 void InstructionSelector::MarkAsDouble(Node* node) { | 249 void InstructionSelector::MarkAsDouble(Node* node) { |
| 250 DCHECK_NOT_NULL(node); | 250 DCHECK(node); |
| 251 DCHECK(!IsReference(node)); | 251 DCHECK(!IsReference(node)); |
| 252 sequence()->MarkAsDouble(GetVirtualRegister(node)); | 252 sequence()->MarkAsDouble(GetVirtualRegister(node)); |
| 253 } | 253 } |
| 254 | 254 |
| 255 | 255 |
| 256 bool InstructionSelector::IsReference(const Node* node) const { | 256 bool InstructionSelector::IsReference(const Node* node) const { |
| 257 DCHECK_NOT_NULL(node); | 257 DCHECK(node); |
| 258 int const virtual_register = virtual_registers_[node->id()]; | 258 int const virtual_register = virtual_registers_[node->id()]; |
| 259 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { | 259 if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { |
| 260 return false; | 260 return false; |
| 261 } | 261 } |
| 262 return sequence()->IsReference(virtual_register); | 262 return sequence()->IsReference(virtual_register); |
| 263 } | 263 } |
| 264 | 264 |
| 265 | 265 |
| 266 void InstructionSelector::MarkAsReference(Node* node) { | 266 void InstructionSelector::MarkAsReference(Node* node) { |
| 267 DCHECK_NOT_NULL(node); | 267 DCHECK(node); |
| 268 DCHECK(!IsDouble(node)); | 268 DCHECK(!IsDouble(node)); |
| 269 sequence()->MarkAsReference(GetVirtualRegister(node)); | 269 sequence()->MarkAsReference(GetVirtualRegister(node)); |
| 270 } | 270 } |
| 271 | 271 |
| 272 | 272 |
| 273 void InstructionSelector::MarkAsRepresentation(MachineType rep, | 273 void InstructionSelector::MarkAsRepresentation(MachineType rep, |
| 274 InstructionOperand* op) { | 274 InstructionOperand* op) { |
| 275 UnallocatedOperand* unalloc = UnallocatedOperand::cast(op); | 275 UnallocatedOperand* unalloc = UnallocatedOperand::cast(op); |
| 276 switch (RepresentationOf(rep)) { | 276 switch (RepresentationOf(rep)) { |
| 277 case kRepFloat32: | 277 case kRepFloat32: |
| 278 case kRepFloat64: | 278 case kRepFloat64: |
| 279 sequence()->MarkAsDouble(unalloc->virtual_register()); | 279 sequence()->MarkAsDouble(unalloc->virtual_register()); |
| 280 break; | 280 break; |
| 281 case kRepTagged: | 281 case kRepTagged: |
| 282 sequence()->MarkAsReference(unalloc->virtual_register()); | 282 sequence()->MarkAsReference(unalloc->virtual_register()); |
| 283 break; | 283 break; |
| 284 default: | 284 default: |
| 285 break; | 285 break; |
| 286 } | 286 } |
| 287 } | 287 } |
| 288 | 288 |
| 289 | 289 |
| 290 void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) { | 290 void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) { |
| 291 DCHECK_NOT_NULL(node); | 291 DCHECK(node); |
| 292 switch (RepresentationOf(rep)) { | 292 switch (RepresentationOf(rep)) { |
| 293 case kRepFloat32: | 293 case kRepFloat32: |
| 294 case kRepFloat64: | 294 case kRepFloat64: |
| 295 MarkAsDouble(node); | 295 MarkAsDouble(node); |
| 296 break; | 296 break; |
| 297 case kRepTagged: | 297 case kRepTagged: |
| 298 MarkAsReference(node); | 298 MarkAsReference(node); |
| 299 break; | 299 break; |
| 300 default: | 300 default: |
| 301 break; | 301 break; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 335 if (buffer->descriptor->ReturnCount() > 0) { | 335 if (buffer->descriptor->ReturnCount() > 0) { |
| 336 // Collect the projections that represent multiple outputs from this call. | 336 // Collect the projections that represent multiple outputs from this call. |
| 337 if (buffer->descriptor->ReturnCount() == 1) { | 337 if (buffer->descriptor->ReturnCount() == 1) { |
| 338 buffer->output_nodes.push_back(call); | 338 buffer->output_nodes.push_back(call); |
| 339 } else { | 339 } else { |
| 340 buffer->output_nodes.resize(buffer->descriptor->ReturnCount(), nullptr); | 340 buffer->output_nodes.resize(buffer->descriptor->ReturnCount(), nullptr); |
| 341 for (auto use : call->uses()) { | 341 for (auto use : call->uses()) { |
| 342 if (use->opcode() != IrOpcode::kProjection) continue; | 342 if (use->opcode() != IrOpcode::kProjection) continue; |
| 343 size_t const index = ProjectionIndexOf(use->op()); | 343 size_t const index = ProjectionIndexOf(use->op()); |
| 344 DCHECK_LT(index, buffer->output_nodes.size()); | 344 DCHECK_LT(index, buffer->output_nodes.size()); |
| 345 DCHECK_EQ(nullptr, buffer->output_nodes[index]); | 345 DCHECK(!buffer->output_nodes[index]); |
| 346 buffer->output_nodes[index] = use; | 346 buffer->output_nodes[index] = use; |
| 347 } | 347 } |
| 348 } | 348 } |
| 349 | 349 |
| 350 // Filter out the outputs that aren't live because no projection uses them. | 350 // Filter out the outputs that aren't live because no projection uses them. |
| 351 size_t outputs_needed_by_framestate = | 351 size_t outputs_needed_by_framestate = |
| 352 buffer->frame_state_descriptor == NULL | 352 buffer->frame_state_descriptor == NULL |
| 353 ? 0 | 353 ? 0 |
| 354 : buffer->frame_state_descriptor->state_combine() | 354 : buffer->frame_state_descriptor->state_combine() |
| 355 .ConsumedOutputCount(); | 355 .ConsumedOutputCount(); |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 428 DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState); | 428 DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState); |
| 429 if (index == 0) continue; // The first argument (callee) is already done. | 429 if (index == 0) continue; // The first argument (callee) is already done. |
| 430 InstructionOperand* op = | 430 InstructionOperand* op = |
| 431 g.UseLocation(*iter, buffer->descriptor->GetInputLocation(index), | 431 g.UseLocation(*iter, buffer->descriptor->GetInputLocation(index), |
| 432 buffer->descriptor->GetInputType(index)); | 432 buffer->descriptor->GetInputType(index)); |
| 433 if (UnallocatedOperand::cast(op)->HasFixedSlotPolicy()) { | 433 if (UnallocatedOperand::cast(op)->HasFixedSlotPolicy()) { |
| 434 int stack_index = -UnallocatedOperand::cast(op)->fixed_slot_index() - 1; | 434 int stack_index = -UnallocatedOperand::cast(op)->fixed_slot_index() - 1; |
| 435 if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) { | 435 if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) { |
| 436 buffer->pushed_nodes.resize(stack_index + 1, NULL); | 436 buffer->pushed_nodes.resize(stack_index + 1, NULL); |
| 437 } | 437 } |
| 438 DCHECK_EQ(NULL, buffer->pushed_nodes[stack_index]); | 438 DCHECK(!buffer->pushed_nodes[stack_index]); |
| 439 buffer->pushed_nodes[stack_index] = *iter; | 439 buffer->pushed_nodes[stack_index] = *iter; |
| 440 pushed_count++; | 440 pushed_count++; |
| 441 } else { | 441 } else { |
| 442 buffer->instruction_args.push_back(op); | 442 buffer->instruction_args.push_back(op); |
| 443 } | 443 } |
| 444 } | 444 } |
| 445 CHECK_EQ(pushed_count, static_cast<int>(buffer->pushed_nodes.size())); | 445 CHECK_EQ(pushed_count, static_cast<int>(buffer->pushed_nodes.size())); |
| 446 DCHECK(static_cast<size_t>(input_count) == | 446 DCHECK(static_cast<size_t>(input_count) == |
| 447 (buffer->instruction_args.size() + buffer->pushed_nodes.size() - | 447 (buffer->instruction_args.size() + buffer->pushed_nodes.size() - |
| 448 buffer->frame_state_value_count())); | 448 buffer->frame_state_value_count())); |
| 449 } | 449 } |
| 450 | 450 |
| 451 | 451 |
| 452 void InstructionSelector::VisitBlock(BasicBlock* block) { | 452 void InstructionSelector::VisitBlock(BasicBlock* block) { |
| 453 DCHECK_EQ(NULL, current_block_); | 453 DCHECK(!current_block_); |
| 454 current_block_ = block; | 454 current_block_ = block; |
| 455 int current_block_end = static_cast<int>(instructions_.size()); | 455 int current_block_end = static_cast<int>(instructions_.size()); |
| 456 | 456 |
| 457 // Generate code for the block control "top down", but schedule the code | 457 // Generate code for the block control "top down", but schedule the code |
| 458 // "bottom up". | 458 // "bottom up". |
| 459 VisitControl(block); | 459 VisitControl(block); |
| 460 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); | 460 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); |
| 461 | 461 |
| 462 // Visit code in reverse control flow order, because architecture-specific | 462 // Visit code in reverse control flow order, because architecture-specific |
| 463 // matching may cover more than one node at a time. | 463 // matching may cover more than one node at a time. |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 528 break; | 528 break; |
| 529 } | 529 } |
| 530 default: | 530 default: |
| 531 UNREACHABLE(); | 531 UNREACHABLE(); |
| 532 break; | 532 break; |
| 533 } | 533 } |
| 534 } | 534 } |
| 535 | 535 |
| 536 | 536 |
| 537 MachineType InstructionSelector::GetMachineType(Node* node) { | 537 MachineType InstructionSelector::GetMachineType(Node* node) { |
| 538 DCHECK_NOT_NULL(schedule()->block(node)); // should only use scheduled nodes. | 538 DCHECK(schedule()->block(node)); // should only use scheduled nodes. |
| 539 switch (node->opcode()) { | 539 switch (node->opcode()) { |
| 540 case IrOpcode::kStart: | 540 case IrOpcode::kStart: |
| 541 case IrOpcode::kLoop: | 541 case IrOpcode::kLoop: |
| 542 case IrOpcode::kEnd: | 542 case IrOpcode::kEnd: |
| 543 case IrOpcode::kBranch: | 543 case IrOpcode::kBranch: |
| 544 case IrOpcode::kIfTrue: | 544 case IrOpcode::kIfTrue: |
| 545 case IrOpcode::kIfFalse: | 545 case IrOpcode::kIfFalse: |
| 546 case IrOpcode::kEffectPhi: | 546 case IrOpcode::kEffectPhi: |
| 547 case IrOpcode::kEffectSet: | 547 case IrOpcode::kEffectSet: |
| 548 case IrOpcode::kMerge: | 548 case IrOpcode::kMerge: |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 659 return kMachBool; | 659 return kMachBool; |
| 660 default: | 660 default: |
| 661 V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d", | 661 V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d", |
| 662 node->opcode(), node->op()->mnemonic(), node->id()); | 662 node->opcode(), node->op()->mnemonic(), node->id()); |
| 663 } | 663 } |
| 664 return kMachNone; | 664 return kMachNone; |
| 665 } | 665 } |
| 666 | 666 |
| 667 | 667 |
| 668 void InstructionSelector::VisitNode(Node* node) { | 668 void InstructionSelector::VisitNode(Node* node) { |
| 669 DCHECK_NOT_NULL(schedule()->block(node)); // should only use scheduled nodes. | 669 DCHECK(schedule()->block(node)); // should only use scheduled nodes. |
| 670 SourcePosition source_position = source_positions_->GetSourcePosition(node); | 670 SourcePosition source_position = source_positions_->GetSourcePosition(node); |
| 671 if (!source_position.IsUnknown()) { | 671 if (!source_position.IsUnknown()) { |
| 672 DCHECK(!source_position.IsInvalid()); | 672 DCHECK(!source_position.IsInvalid()); |
| 673 if (FLAG_turbo_source_positions || node->opcode() == IrOpcode::kCall) { | 673 if (FLAG_turbo_source_positions || node->opcode() == IrOpcode::kCall) { |
| 674 Emit(SourcePositionInstruction::New(instruction_zone(), source_position)); | 674 Emit(SourcePositionInstruction::New(instruction_zone(), source_position)); |
| 675 } | 675 } |
| 676 } | 676 } |
| 677 switch (node->opcode()) { | 677 switch (node->opcode()) { |
| 678 case IrOpcode::kStart: | 678 case IrOpcode::kStart: |
| 679 case IrOpcode::kLoop: | 679 case IrOpcode::kLoop: |
| (...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1173 MachineOperatorBuilder::Flags | 1173 MachineOperatorBuilder::Flags |
| 1174 InstructionSelector::SupportedMachineOperatorFlags() { | 1174 InstructionSelector::SupportedMachineOperatorFlags() { |
| 1175 return MachineOperatorBuilder::Flag::kNoFlags; | 1175 return MachineOperatorBuilder::Flag::kNoFlags; |
| 1176 } | 1176 } |
| 1177 | 1177 |
| 1178 #endif // !V8_TURBOFAN_BACKEND | 1178 #endif // !V8_TURBOFAN_BACKEND |
| 1179 | 1179 |
| 1180 } // namespace compiler | 1180 } // namespace compiler |
| 1181 } // namespace internal | 1181 } // namespace internal |
| 1182 } // namespace v8 | 1182 } // namespace v8 |
| OLD | NEW |