| Index: src/compiler/arm64/instruction-selector-arm64.cc
|
| diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc
|
| index bd23dfd17e2a6fc9d7a85181df26f2c8a46a55c3..29f219cec819e95e226e07c91e4b97d00ef2a61f 100644
|
| --- a/src/compiler/arm64/instruction-selector-arm64.cc
|
| +++ b/src/compiler/arm64/instruction-selector-arm64.cc
|
| @@ -106,7 +106,6 @@ static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
|
| }
|
|
|
|
|
| -// Shared routine for multiple binary operations.
|
| static void VisitBinop(InstructionSelector* selector, Node* node,
|
| InstructionCode opcode, ImmediateMode operand_mode,
|
| FlagsContinuation* cont) {
|
| @@ -141,7 +140,6 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
| }
|
|
|
|
|
| -// Shared routine for multiple binary operations.
|
| static void VisitBinop(InstructionSelector* selector, Node* node,
|
| ArchOpcode opcode, ImmediateMode operand_mode) {
|
| FlagsContinuation cont;
|
| @@ -149,6 +147,18 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
| }
|
|
|
|
|
| +static void VisitBinop(InstructionSelector* selector, Node* node,
|
| + InstructionCode opcode,
|
| + FlagsCondition overflow_condition) {
|
| + if (Node* overflow = node->FindProjection(1)) {
|
| + FlagsContinuation cont(overflow_condition, overflow);
|
| + return VisitBinop(selector, node, opcode, kArithimeticImm, &cont);
|
| + }
|
| + FlagsContinuation cont;
|
| + return VisitBinop(selector, node, opcode, kArithimeticImm, &cont);
|
| +}
|
| +
|
| +
|
| void InstructionSelector::VisitLoad(Node* node) {
|
| MachineRepresentation rep = OpParameter<MachineRepresentation>(node);
|
| Arm64OperandGenerator g(this);
|
| @@ -334,6 +344,16 @@ void InstructionSelector::VisitInt32Add(Node* node) {
|
| }
|
|
|
|
|
| +void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
| + VisitBinop(this, node, kArm64Add32, kOverflow);
|
| +}
|
| +
|
| +
|
| +void InstructionSelector::VisitUint32AddWithOverflow(Node* node) {
|
| + VisitBinop(this, node, kArm64Add32, kUnsignedGreaterThanOrEqual);
|
| +}
|
| +
|
| +
|
| void InstructionSelector::VisitInt64Add(Node* node) {
|
| VisitBinop(this, node, kArm64Add, kArithimeticImm);
|
| }
|
| @@ -358,6 +378,16 @@ void InstructionSelector::VisitInt32Sub(Node* node) {
|
| }
|
|
|
|
|
| +void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
| + VisitBinop(this, node, kArm64Sub32, kOverflow);
|
| +}
|
| +
|
| +
|
| +void InstructionSelector::VisitUint32SubWithOverflow(Node* node) {
|
| + VisitBinop(this, node, kArm64Sub32, kUnsignedLessThan);
|
| +}
|
| +
|
| +
|
| void InstructionSelector::VisitInt64Sub(Node* node) {
|
| VisitSub<int64_t>(this, node, kArm64Sub, kArm64Neg);
|
| }
|
| @@ -479,18 +509,6 @@ void InstructionSelector::VisitFloat64Mod(Node* node) {
|
| }
|
|
|
|
|
| -void InstructionSelector::VisitInt32AddWithOverflow(Node* node,
|
| - FlagsContinuation* cont) {
|
| - VisitBinop(this, node, kArm64Add32, kArithimeticImm, cont);
|
| -}
|
| -
|
| -
|
| -void InstructionSelector::VisitInt32SubWithOverflow(Node* node,
|
| - FlagsContinuation* cont) {
|
| - VisitBinop(this, node, kArm64Sub32, kArithimeticImm, cont);
|
| -}
|
| -
|
| -
|
| // Shared routine for multiple compare operations.
|
| static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
| InstructionOperand* left, InstructionOperand* right,
|
| @@ -579,6 +597,126 @@ void InstructionSelector::VisitFloat64Compare(Node* node,
|
| }
|
|
|
|
|
| +void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
| + BasicBlock* fbranch) {
|
| + OperandGenerator g(this);
|
| + Node* user = branch;
|
| + Node* value = branch->InputAt(0);
|
| +
|
| + FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
| +
|
| + // If we can fall through to the true block, invert the branch.
|
| + if (IsNextInAssemblyOrder(tbranch)) {
|
| + cont.Negate();
|
| + cont.SwapBlocks();
|
| + }
|
| +
|
| + // Try to combine with comparisons against 0 by simply inverting the branch.
|
| + while (CanCover(user, value)) {
|
| + if (value->opcode() == IrOpcode::kWord32Equal) {
|
| + Int32BinopMatcher m(value);
|
| + if (m.right().Is(0)) {
|
| + user = value;
|
| + value = m.left().node();
|
| + cont.Negate();
|
| + } else {
|
| + break;
|
| + }
|
| + } else if (value->opcode() == IrOpcode::kWord64Equal) {
|
| + Int64BinopMatcher m(value);
|
| + if (m.right().Is(0)) {
|
| + user = value;
|
| + value = m.left().node();
|
| + cont.Negate();
|
| + } else {
|
| + break;
|
| + }
|
| + } else {
|
| + break;
|
| + }
|
| + }
|
| +
|
| + // Try to combine the branch with a comparison.
|
| + if (CanCover(user, value)) {
|
| + switch (value->opcode()) {
|
| + case IrOpcode::kWord32Equal:
|
| + cont.OverwriteAndNegateIfEqual(kEqual);
|
| + return VisitWord32Compare(value, &cont);
|
| + case IrOpcode::kInt32LessThan:
|
| + cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
| + return VisitWord32Compare(value, &cont);
|
| + case IrOpcode::kInt32LessThanOrEqual:
|
| + cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
| + return VisitWord32Compare(value, &cont);
|
| + case IrOpcode::kUint32LessThan:
|
| + cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
| + return VisitWord32Compare(value, &cont);
|
| + case IrOpcode::kUint32LessThanOrEqual:
|
| + cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
| + return VisitWord32Compare(value, &cont);
|
| + case IrOpcode::kWord64Equal:
|
| + cont.OverwriteAndNegateIfEqual(kEqual);
|
| + return VisitWord64Compare(value, &cont);
|
| + case IrOpcode::kInt64LessThan:
|
| + cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
| + return VisitWord64Compare(value, &cont);
|
| + case IrOpcode::kInt64LessThanOrEqual:
|
| + cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
| + return VisitWord64Compare(value, &cont);
|
| + case IrOpcode::kFloat64Equal:
|
| + cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
|
| + return VisitFloat64Compare(value, &cont);
|
| + case IrOpcode::kFloat64LessThan:
|
| + cont.OverwriteAndNegateIfEqual(kUnorderedLessThan);
|
| + return VisitFloat64Compare(value, &cont);
|
| + case IrOpcode::kFloat64LessThanOrEqual:
|
| + cont.OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual);
|
| + return VisitFloat64Compare(value, &cont);
|
| + case IrOpcode::kProjection:
|
| + // Check if this is the overflow output projection of an
|
| + // <Operation>WithOverflow node.
|
| + if (OpParameter<int32_t>(value) == 1) {
|
| + // We cannot combine the <Operation>WithOverflow with this branch
|
| + // unless the 0th projection (the use of the actual value of the
|
| + // <Operation> is either NULL, which means there's no use of the
|
| + // actual value, or was already defined, which means it is scheduled
|
| + // *AFTER* this branch).
|
| + Node* node = value->InputAt(0);
|
| + Node* result = node->FindProjection(0);
|
| + if (result == NULL || IsDefined(result)) {
|
| + switch (node->opcode()) {
|
| + case IrOpcode::kInt32AddWithOverflow:
|
| + cont.OverwriteAndNegateIfEqual(kOverflow);
|
| + return VisitBinop(this, node, kArm64Add32, kArithimeticImm,
|
| + &cont);
|
| + case IrOpcode::kUint32AddWithOverflow:
|
| + cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
|
| + return VisitBinop(this, node, kArm64Add32, kArithimeticImm,
|
| + &cont);
|
| + case IrOpcode::kInt32SubWithOverflow:
|
| + cont.OverwriteAndNegateIfEqual(kOverflow);
|
| + return VisitBinop(this, node, kArm64Sub32, kArithimeticImm,
|
| + &cont);
|
| + case IrOpcode::kUint32SubWithOverflow:
|
| + cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
| + return VisitBinop(this, node, kArm64Sub32, kArithimeticImm,
|
| + &cont);
|
| + default:
|
| + break;
|
| + }
|
| + }
|
| + }
|
| + break;
|
| + default:
|
| + break;
|
| + }
|
| + }
|
| +
|
| + // Branch could not be combined with a compare, emit compare against 0.
|
| + VisitWord32Test(value, &cont);
|
| +}
|
| +
|
| +
|
| void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
|
| BasicBlock* deoptimization) {
|
| Arm64OperandGenerator g(this);
|
|
|