Index: src/compiler/x64/instruction-selector-x64.cc |
diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc |
index 7f9e40afe2c4a2209b929b7d71440968b84576e1..b72afa202d181ba76a008cf34bd78fc2d6e19105 100644 |
--- a/src/compiler/x64/instruction-selector-x64.cc |
+++ b/src/compiler/x64/instruction-selector-x64.cc |
@@ -705,15 +705,50 @@ void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
} |
-void InstructionSelector::VisitInt32AddWithOverflow(Node* node, |
- FlagsContinuation* cont) { |
- VisitBinop(this, node, kX64Add32, cont); |
-} |
+void InstructionSelector::VisitCall(Node* node) { |
+ X64OperandGenerator g(this); |
+ CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node); |
+ FrameStateDescriptor* frame_state_descriptor = NULL; |
+ if (descriptor->NeedsFrameState()) { |
+ frame_state_descriptor = GetFrameStateDescriptor( |
+ node->InputAt(static_cast<int>(descriptor->InputCount()))); |
+ } |
+ |
+ CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
-void InstructionSelector::VisitInt32SubWithOverflow(Node* node, |
- FlagsContinuation* cont) { |
- VisitBinop(this, node, kX64Sub32, cont); |
+ // Compute InstructionOperands for inputs and outputs. |
+ InitializeCallBuffer(node, &buffer, true, true); |
+ |
+ // Push any stack arguments. |
+ for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); |
+ input != buffer.pushed_nodes.rend(); input++) { |
+ // TODO(titzer): handle pushing double parameters. |
+ Emit(kX64Push, NULL, |
+ g.CanBeImmediate(*input) ? g.UseImmediate(*input) : g.Use(*input)); |
+ } |
+ |
+ // Select the appropriate opcode based on the call type. |
+ InstructionCode opcode; |
+ switch (descriptor->kind()) { |
+ case CallDescriptor::kCallCodeObject: { |
+ opcode = kArchCallCodeObject; |
+ break; |
+ } |
+ case CallDescriptor::kCallJSFunction: |
+ opcode = kArchCallJSFunction; |
+ break; |
+ default: |
+ UNREACHABLE(); |
+ return; |
+ } |
+ opcode |= MiscField::encode(descriptor->flags()); |
+ |
+ // Emit the call instruction. |
+ Instruction* call_instr = |
+ Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), |
+ buffer.instruction_args.size(), &buffer.instruction_args.front()); |
+ call_instr->MarkAsCall(); |
} |
@@ -753,107 +788,261 @@ static void VisitWordCompare(InstructionSelector* selector, Node* node, |
} |
-void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { |
- switch (node->opcode()) { |
- case IrOpcode::kInt32Sub: |
- return VisitWordCompare(this, node, kX64Cmp32, cont, false); |
- case IrOpcode::kWord32And: |
- return VisitWordCompare(this, node, kX64Test32, cont, true); |
- default: |
- break; |
- } |
+static void VisitWordTest(InstructionSelector* selector, Node* node, |
+ InstructionCode opcode, FlagsContinuation* cont) { |
+ X64OperandGenerator g(selector); |
+ VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(-1), cont); |
+} |
- X64OperandGenerator g(this); |
- VisitCompare(this, kX64Test32, g.Use(node), g.TempImmediate(-1), cont); |
+ |
+static void VisitFloat64Compare(InstructionSelector* selector, Node* node, |
+ FlagsContinuation* cont) { |
+ X64OperandGenerator g(selector); |
+ Node* left = node->InputAt(0); |
+ Node* right = node->InputAt(1); |
+ VisitCompare(selector, kSSEFloat64Cmp, g.UseRegister(left), g.Use(right), |
+ cont); |
} |
-void InstructionSelector::VisitWord64Test(Node* node, FlagsContinuation* cont) { |
- switch (node->opcode()) { |
- case IrOpcode::kInt64Sub: |
- return VisitWordCompare(this, node, kX64Cmp, cont, false); |
- case IrOpcode::kWord64And: |
- return VisitWordCompare(this, node, kX64Test, cont, true); |
- default: |
+void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, |
+ BasicBlock* fbranch) { |
+ OperandGenerator g(this); |
+ Node* user = branch; |
+ Node* value = branch->InputAt(0); |
+ |
+ FlagsContinuation cont(kNotEqual, tbranch, fbranch); |
+ |
+ // If we can fall through to the true block, invert the branch. |
+ if (IsNextInAssemblyOrder(tbranch)) { |
+ cont.Negate(); |
+ cont.SwapBlocks(); |
+ } |
+ |
+ // Try to combine with comparisons against 0 by simply inverting the branch. |
+ while (CanCover(user, value)) { |
+ if (value->opcode() == IrOpcode::kWord32Equal) { |
+ Int32BinopMatcher m(value); |
+ if (m.right().Is(0)) { |
+ user = value; |
+ value = m.left().node(); |
+ cont.Negate(); |
+ } else { |
+ break; |
+ } |
+ } else if (value->opcode() == IrOpcode::kWord64Equal) { |
+ Int64BinopMatcher m(value); |
+ if (m.right().Is(0)) { |
+ user = value; |
+ value = m.left().node(); |
+ cont.Negate(); |
+ } else { |
+ break; |
+ } |
+ } else { |
break; |
+ } |
} |
- X64OperandGenerator g(this); |
- VisitCompare(this, kX64Test, g.Use(node), g.TempImmediate(-1), cont); |
+ // Try to combine the branch with a comparison. |
+ if (CanCover(user, value)) { |
+ switch (value->opcode()) { |
+ case IrOpcode::kWord32Equal: |
+ cont.OverwriteAndNegateIfEqual(kEqual); |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kInt32LessThan: |
+ cont.OverwriteAndNegateIfEqual(kSignedLessThan); |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kInt32LessThanOrEqual: |
+ cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kUint32LessThan: |
+ cont.OverwriteAndNegateIfEqual(kUnsignedLessThan); |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kUint32LessThanOrEqual: |
+ cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kWord64Equal: |
+ cont.OverwriteAndNegateIfEqual(kEqual); |
+ return VisitWordCompare(this, value, kX64Cmp, &cont, false); |
+ case IrOpcode::kInt64LessThan: |
+ cont.OverwriteAndNegateIfEqual(kSignedLessThan); |
+ return VisitWordCompare(this, value, kX64Cmp, &cont, false); |
+ case IrOpcode::kInt64LessThanOrEqual: |
+ cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
+ return VisitWordCompare(this, value, kX64Cmp, &cont, false); |
+ case IrOpcode::kUint64LessThan: |
+ cont.OverwriteAndNegateIfEqual(kUnsignedLessThan); |
+ return VisitWordCompare(this, value, kX64Cmp, &cont, false); |
+ case IrOpcode::kFloat64Equal: |
+ cont.OverwriteAndNegateIfEqual(kUnorderedEqual); |
+ return VisitFloat64Compare(this, value, &cont); |
+ case IrOpcode::kFloat64LessThan: |
+ cont.OverwriteAndNegateIfEqual(kUnorderedLessThan); |
+ return VisitFloat64Compare(this, value, &cont); |
+ case IrOpcode::kFloat64LessThanOrEqual: |
+ cont.OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); |
+ return VisitFloat64Compare(this, value, &cont); |
+ case IrOpcode::kProjection: |
+ // Check if this is the overflow output projection of an |
+ // <Operation>WithOverflow node. |
+ if (OpParameter<size_t>(value) == 1u) { |
+ // We cannot combine the <Operation>WithOverflow with this branch |
+ // unless the 0th projection (the use of the actual value of the |
+ // <Operation> is either NULL, which means there's no use of the |
+ // actual value, or was already defined, which means it is scheduled |
+ // *AFTER* this branch). |
+ Node* node = value->InputAt(0); |
+ Node* result = node->FindProjection(0); |
+ if (result == NULL || IsDefined(result)) { |
+ switch (node->opcode()) { |
+ case IrOpcode::kInt32AddWithOverflow: |
+ cont.OverwriteAndNegateIfEqual(kOverflow); |
+ return VisitBinop(this, node, kX64Add32, &cont); |
+ case IrOpcode::kInt32SubWithOverflow: |
+ cont.OverwriteAndNegateIfEqual(kOverflow); |
+ return VisitBinop(this, node, kX64Sub32, &cont); |
+ default: |
+ break; |
+ } |
+ } |
+ } |
+ break; |
+ case IrOpcode::kInt32Sub: |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kWord32And: |
+ return VisitWordCompare(this, value, kX64Test32, &cont, true); |
+ default: |
+ break; |
+ } |
+ } |
+ |
+ // Branch could not be combined with a compare, emit compare against 0. |
+ VisitWordTest(this, value, kX64Test32, &cont); |
+} |
+ |
+ |
+void InstructionSelector::VisitWord32Equal(Node* const node) { |
+ Node* const user = node; |
+ FlagsContinuation cont(kEqual, node); |
+ Int32BinopMatcher m(user); |
+ if (m.right().Is(0)) { |
+ Node* const value = m.left().node(); |
+ if (CanCover(user, value)) { |
+ switch (value->opcode()) { |
+ case IrOpcode::kInt32Sub: |
+ return VisitWordCompare(this, value, kX64Cmp32, &cont, false); |
+ case IrOpcode::kWord32And: |
+ return VisitWordCompare(this, value, kX64Test32, &cont, true); |
+ default: |
+ break; |
+ } |
+ return VisitWordTest(this, value, kX64Test32, &cont); |
+ } |
+ } |
+ VisitWordCompare(this, node, kX64Cmp32, &cont, false); |
} |
-void InstructionSelector::VisitWord32Compare(Node* node, |
- FlagsContinuation* cont) { |
- VisitWordCompare(this, node, kX64Cmp32, cont, false); |
+void InstructionSelector::VisitInt32LessThan(Node* node) { |
+ FlagsContinuation cont(kSignedLessThan, node); |
+ VisitWordCompare(this, node, kX64Cmp32, &cont, false); |
} |
-void InstructionSelector::VisitWord64Compare(Node* node, |
- FlagsContinuation* cont) { |
- VisitWordCompare(this, node, kX64Cmp, cont, false); |
+void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) { |
+ FlagsContinuation cont(kSignedLessThanOrEqual, node); |
+ VisitWordCompare(this, node, kX64Cmp32, &cont, false); |
} |
-void InstructionSelector::VisitFloat64Compare(Node* node, |
- FlagsContinuation* cont) { |
- X64OperandGenerator g(this); |
- Node* left = node->InputAt(0); |
- Node* right = node->InputAt(1); |
- VisitCompare(this, kSSEFloat64Cmp, g.UseRegister(left), g.Use(right), cont); |
+void InstructionSelector::VisitUint32LessThan(Node* node) { |
+ FlagsContinuation cont(kUnsignedLessThan, node); |
+ VisitWordCompare(this, node, kX64Cmp32, &cont, false); |
} |
-void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, |
- BasicBlock* deoptimization) { |
- X64OperandGenerator g(this); |
- CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call); |
+void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { |
+ FlagsContinuation cont(kUnsignedLessThanOrEqual, node); |
+ VisitWordCompare(this, node, kX64Cmp32, &cont, false); |
+} |
- FrameStateDescriptor* frame_state_descriptor = NULL; |
- if (descriptor->NeedsFrameState()) { |
- frame_state_descriptor = GetFrameStateDescriptor( |
- call->InputAt(static_cast<int>(descriptor->InputCount()))); |
- } |
- CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
+void InstructionSelector::VisitWord64Equal(Node* const node) { |
+ Node* const user = node; |
+ FlagsContinuation cont(kEqual, node); |
+ Int64BinopMatcher m(user); |
+ if (m.right().Is(0)) { |
+ Node* const value = m.left().node(); |
+ if (CanCover(user, value)) { |
+ switch (value->opcode()) { |
+ case IrOpcode::kInt64Sub: |
+ return VisitWordCompare(this, value, kX64Cmp, &cont, false); |
+ case IrOpcode::kWord64And: |
+ return VisitWordCompare(this, value, kX64Test, &cont, true); |
+ default: |
+ break; |
+ } |
+ return VisitWordTest(this, value, kX64Test, &cont); |
+ } |
+ } |
+ VisitWordCompare(this, node, kX64Cmp, &cont, false); |
+} |
- // Compute InstructionOperands for inputs and outputs. |
- InitializeCallBuffer(call, &buffer, true, true); |
- // Push any stack arguments. |
- for (NodeVectorRIter input = buffer.pushed_nodes.rbegin(); |
- input != buffer.pushed_nodes.rend(); input++) { |
- // TODO(titzer): handle pushing double parameters. |
- Emit(kX64Push, NULL, |
- g.CanBeImmediate(*input) ? g.UseImmediate(*input) : g.Use(*input)); |
+void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
+ if (Node* ovf = node->FindProjection(1)) { |
+ FlagsContinuation cont(kOverflow, ovf); |
+ VisitBinop(this, node, kX64Add32, &cont); |
} |
+ FlagsContinuation cont; |
+ VisitBinop(this, node, kX64Add32, &cont); |
+} |
- // Select the appropriate opcode based on the call type. |
- InstructionCode opcode; |
- switch (descriptor->kind()) { |
- case CallDescriptor::kCallCodeObject: { |
- opcode = kArchCallCodeObject; |
- break; |
- } |
- case CallDescriptor::kCallJSFunction: |
- opcode = kArchCallJSFunction; |
- break; |
- default: |
- UNREACHABLE(); |
- return; |
+ |
+void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
+ if (Node* ovf = node->FindProjection(1)) { |
+ FlagsContinuation cont(kOverflow, ovf); |
+ return VisitBinop(this, node, kX64Sub32, &cont); |
} |
- opcode |= MiscField::encode(descriptor->flags()); |
+ FlagsContinuation cont; |
+ VisitBinop(this, node, kX64Sub32, &cont); |
+} |
- // Emit the call instruction. |
- Instruction* call_instr = |
- Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), |
- buffer.instruction_args.size(), &buffer.instruction_args.front()); |
- call_instr->MarkAsCall(); |
- if (deoptimization != NULL) { |
- DCHECK(continuation != NULL); |
- call_instr->MarkAsControl(); |
- } |
+void InstructionSelector::VisitInt64LessThan(Node* node) { |
+ FlagsContinuation cont(kSignedLessThan, node); |
+ VisitWordCompare(this, node, kX64Cmp, &cont, false); |
+} |
+ |
+ |
+void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) { |
+ FlagsContinuation cont(kSignedLessThanOrEqual, node); |
+ VisitWordCompare(this, node, kX64Cmp, &cont, false); |
+} |
+ |
+ |
+void InstructionSelector::VisitUint64LessThan(Node* node) { |
+ FlagsContinuation cont(kUnsignedLessThan, node); |
+ VisitWordCompare(this, node, kX64Cmp, &cont, false); |
+} |
+ |
+ |
+void InstructionSelector::VisitFloat64Equal(Node* node) { |
+ FlagsContinuation cont(kUnorderedEqual, node); |
+ VisitFloat64Compare(this, node, &cont); |
+} |
+ |
+ |
+void InstructionSelector::VisitFloat64LessThan(Node* node) { |
+ FlagsContinuation cont(kUnorderedLessThan, node); |
+ VisitFloat64Compare(this, node, &cont); |
+} |
+ |
+ |
+void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { |
+ FlagsContinuation cont(kUnorderedLessThanOrEqual, node); |
+ VisitFloat64Compare(this, node, &cont); |
} |
} // namespace compiler |