| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
| 6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| (...skipping 795 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 806 } | 806 } |
| 807 | 807 |
| 808 | 808 |
| 809 void InstructionSelector::VisitFloat64Sqrt(Node* node) { | 809 void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
| 810 Arm64OperandGenerator g(this); | 810 Arm64OperandGenerator g(this); |
| 811 Emit(kArm64Float64Sqrt, g.DefineAsRegister(node), | 811 Emit(kArm64Float64Sqrt, g.DefineAsRegister(node), |
| 812 g.UseRegister(node->InputAt(0))); | 812 g.UseRegister(node->InputAt(0))); |
| 813 } | 813 } |
| 814 | 814 |
| 815 | 815 |
| 816 void InstructionSelector::VisitInt32AddWithOverflow(Node* node, | 816 void InstructionSelector::VisitCall(Node* node) { |
| 817 FlagsContinuation* cont) { | 817 Arm64OperandGenerator g(this); |
| 818 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, cont); | 818 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node); |
| 819 |
| 820 FrameStateDescriptor* frame_state_descriptor = NULL; |
| 821 if (descriptor->NeedsFrameState()) { |
| 822 frame_state_descriptor = |
| 823 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
| 824 } |
| 825 |
| 826 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 827 |
| 828 // Compute InstructionOperands for inputs and outputs. |
| 829 // TODO(turbofan): on ARM64 it's probably better to use the code object in a |
| 830 // register if there are multiple uses of it. Improve constant pool and the |
| 831 // heuristics in the register allocator for where to emit constants. |
| 832 InitializeCallBuffer(node, &buffer, true, false); |
| 833 |
| 834 // Push the arguments to the stack. |
| 835 bool pushed_count_uneven = buffer.pushed_nodes.size() & 1; |
| 836 int aligned_push_count = buffer.pushed_nodes.size(); |
| 837 // TODO(dcarney): claim and poke probably take small immediates, |
| 838 // loop here or whatever. |
| 839 // Bump the stack pointer(s). |
| 840 if (aligned_push_count > 0) { |
| 841 // TODO(dcarney): it would be better to bump the csp here only |
| 842 // and emit paired stores with increment for non c frames. |
| 843 Emit(kArm64Claim | MiscField::encode(aligned_push_count), NULL); |
| 844 } |
| 845 // Move arguments to the stack. |
| 846 { |
| 847 int slot = buffer.pushed_nodes.size() - 1; |
| 848 // Emit the uneven pushes. |
| 849 if (pushed_count_uneven) { |
| 850 Node* input = buffer.pushed_nodes[slot]; |
| 851 Emit(kArm64Poke | MiscField::encode(slot), NULL, g.UseRegister(input)); |
| 852 slot--; |
| 853 } |
| 854 // Now all pushes can be done in pairs. |
| 855 for (; slot >= 0; slot -= 2) { |
| 856 Emit(kArm64PokePair | MiscField::encode(slot), NULL, |
| 857 g.UseRegister(buffer.pushed_nodes[slot]), |
| 858 g.UseRegister(buffer.pushed_nodes[slot - 1])); |
| 859 } |
| 860 } |
| 861 |
| 862 // Select the appropriate opcode based on the call type. |
| 863 InstructionCode opcode; |
| 864 switch (descriptor->kind()) { |
| 865 case CallDescriptor::kCallCodeObject: { |
| 866 opcode = kArchCallCodeObject; |
| 867 break; |
| 868 } |
| 869 case CallDescriptor::kCallJSFunction: |
| 870 opcode = kArchCallJSFunction; |
| 871 break; |
| 872 default: |
| 873 UNREACHABLE(); |
| 874 return; |
| 875 } |
| 876 opcode |= MiscField::encode(descriptor->flags()); |
| 877 |
| 878 // Emit the call instruction. |
| 879 Instruction* call_instr = |
| 880 Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), |
| 881 buffer.instruction_args.size(), &buffer.instruction_args.front()); |
| 882 call_instr->MarkAsCall(); |
| 819 } | 883 } |
| 820 | 884 |
| 821 | 885 |
| 822 void InstructionSelector::VisitInt32SubWithOverflow(Node* node, | |
| 823 FlagsContinuation* cont) { | |
| 824 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, cont); | |
| 825 } | |
| 826 | |
| 827 | |
| 828 // Shared routine for multiple compare operations. | 886 // Shared routine for multiple compare operations. |
| 829 static void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 887 static void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
| 830 InstructionOperand* left, InstructionOperand* right, | 888 InstructionOperand* left, InstructionOperand* right, |
| 831 FlagsContinuation* cont) { | 889 FlagsContinuation* cont) { |
| 832 Arm64OperandGenerator g(selector); | 890 Arm64OperandGenerator g(selector); |
| 833 opcode = cont->Encode(opcode); | 891 opcode = cont->Encode(opcode); |
| 834 if (cont->IsBranch()) { | 892 if (cont->IsBranch()) { |
| 835 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()), | 893 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()), |
| 836 g.Label(cont->false_block()))->MarkAsControl(); | 894 g.Label(cont->false_block()))->MarkAsControl(); |
| 837 } else { | 895 } else { |
| (...skipping 19 matching lines...) Expand all Loading... |
| 857 if (!commutative) cont->Commute(); | 915 if (!commutative) cont->Commute(); |
| 858 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), | 916 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left), |
| 859 cont); | 917 cont); |
| 860 } else { | 918 } else { |
| 861 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), | 919 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right), |
| 862 cont); | 920 cont); |
| 863 } | 921 } |
| 864 } | 922 } |
| 865 | 923 |
| 866 | 924 |
| 867 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { | 925 static void VisitWord32Compare(InstructionSelector* selector, Node* node, |
| 868 switch (node->opcode()) { | 926 FlagsContinuation* cont) { |
| 869 case IrOpcode::kInt32Add: | 927 VisitWordCompare(selector, node, kArm64Cmp32, cont, false); |
| 870 return VisitWordCompare(this, node, kArm64Cmn32, cont, true); | 928 } |
| 871 case IrOpcode::kInt32Sub: | 929 |
| 872 return VisitWordCompare(this, node, kArm64Cmp32, cont, false); | 930 |
| 873 case IrOpcode::kWord32And: | 931 static void VisitWordTest(InstructionSelector* selector, Node* node, |
| 874 return VisitWordCompare(this, node, kArm64Tst32, cont, true); | 932 InstructionCode opcode, FlagsContinuation* cont) { |
| 875 default: | 933 Arm64OperandGenerator g(selector); |
| 876 break; | 934 VisitCompare(selector, opcode, g.UseRegister(node), g.UseRegister(node), |
| 877 } | |
| 878 | |
| 879 Arm64OperandGenerator g(this); | |
| 880 VisitCompare(this, kArm64Tst32, g.UseRegister(node), g.UseRegister(node), | |
| 881 cont); | 935 cont); |
| 882 } | 936 } |
| 883 | 937 |
| 884 | 938 |
| 885 void InstructionSelector::VisitWord64Test(Node* node, FlagsContinuation* cont) { | 939 static void VisitWord32Test(InstructionSelector* selector, Node* node, |
| 886 switch (node->opcode()) { | 940 FlagsContinuation* cont) { |
| 887 case IrOpcode::kWord64And: | 941 VisitWordTest(selector, node, kArm64Tst32, cont); |
| 888 return VisitWordCompare(this, node, kArm64Tst, cont, true); | 942 } |
| 889 default: | 943 |
| 890 break; | 944 |
| 891 } | 945 static void VisitWord64Test(InstructionSelector* selector, Node* node, |
| 892 | 946 FlagsContinuation* cont) { |
| 893 Arm64OperandGenerator g(this); | 947 VisitWordTest(selector, node, kArm64Tst, cont); |
| 894 VisitCompare(this, kArm64Tst, g.UseRegister(node), g.UseRegister(node), cont); | 948 } |
| 895 } | 949 |
| 896 | 950 |
| 897 | 951 // Shared routine for multiple float compare operations. |
| 898 void InstructionSelector::VisitWord32Compare(Node* node, | 952 static void VisitFloat64Compare(InstructionSelector* selector, Node* node, |
| 899 FlagsContinuation* cont) { | 953 FlagsContinuation* cont) { |
| 900 VisitWordCompare(this, node, kArm64Cmp32, cont, false); | 954 Arm64OperandGenerator g(selector); |
| 901 } | |
| 902 | |
| 903 | |
| 904 void InstructionSelector::VisitWord64Compare(Node* node, | |
| 905 FlagsContinuation* cont) { | |
| 906 VisitWordCompare(this, node, kArm64Cmp, cont, false); | |
| 907 } | |
| 908 | |
| 909 | |
| 910 void InstructionSelector::VisitFloat64Compare(Node* node, | |
| 911 FlagsContinuation* cont) { | |
| 912 Arm64OperandGenerator g(this); | |
| 913 Node* left = node->InputAt(0); | 955 Node* left = node->InputAt(0); |
| 914 Node* right = node->InputAt(1); | 956 Node* right = node->InputAt(1); |
| 915 VisitCompare(this, kArm64Float64Cmp, g.UseRegister(left), | 957 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(left), |
| 916 g.UseRegister(right), cont); | 958 g.UseRegister(right), cont); |
| 917 } | 959 } |
| 918 | 960 |
| 919 | 961 |
| 920 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, | 962 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, |
| 921 BasicBlock* deoptimization) { | 963 BasicBlock* fbranch) { |
| 922 Arm64OperandGenerator g(this); | 964 OperandGenerator g(this); |
| 923 CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call); | 965 Node* user = branch; |
| 924 | 966 Node* value = branch->InputAt(0); |
| 925 FrameStateDescriptor* frame_state_descriptor = NULL; | 967 |
| 926 if (descriptor->NeedsFrameState()) { | 968 FlagsContinuation cont(kNotEqual, tbranch, fbranch); |
| 927 frame_state_descriptor = | 969 |
| 928 GetFrameStateDescriptor(call->InputAt(descriptor->InputCount())); | 970 // If we can fall through to the true block, invert the branch. |
| 929 } | 971 if (IsNextInAssemblyOrder(tbranch)) { |
| 930 | 972 cont.Negate(); |
| 931 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 973 cont.SwapBlocks(); |
| 932 | 974 } |
| 933 // Compute InstructionOperands for inputs and outputs. | 975 |
| 934 // TODO(turbofan): on ARM64 it's probably better to use the code object in a | 976 // Try to combine with comparisons against 0 by simply inverting the branch. |
| 935 // register if there are multiple uses of it. Improve constant pool and the | 977 while (CanCover(user, value)) { |
| 936 // heuristics in the register allocator for where to emit constants. | 978 if (value->opcode() == IrOpcode::kWord32Equal) { |
| 937 InitializeCallBuffer(call, &buffer, true, false); | 979 Int32BinopMatcher m(value); |
| 938 | 980 if (m.right().Is(0)) { |
| 939 // Push the arguments to the stack. | 981 user = value; |
| 940 bool pushed_count_uneven = buffer.pushed_nodes.size() & 1; | 982 value = m.left().node(); |
| 941 int aligned_push_count = buffer.pushed_nodes.size(); | 983 cont.Negate(); |
| 942 // TODO(dcarney): claim and poke probably take small immediates, | 984 } else { |
| 943 // loop here or whatever. | 985 break; |
| 944 // Bump the stack pointer(s). | 986 } |
| 945 if (aligned_push_count > 0) { | 987 } else if (value->opcode() == IrOpcode::kWord64Equal) { |
| 946 // TODO(dcarney): it would be better to bump the csp here only | 988 Int64BinopMatcher m(value); |
| 947 // and emit paired stores with increment for non c frames. | 989 if (m.right().Is(0)) { |
| 948 Emit(kArm64Claim | MiscField::encode(aligned_push_count), NULL); | 990 user = value; |
| 949 } | 991 value = m.left().node(); |
| 950 // Move arguments to the stack. | 992 cont.Negate(); |
| 951 { | 993 } else { |
| 952 int slot = buffer.pushed_nodes.size() - 1; | 994 break; |
| 953 // Emit the uneven pushes. | 995 } |
| 954 if (pushed_count_uneven) { | 996 } else { |
| 955 Node* input = buffer.pushed_nodes[slot]; | |
| 956 Emit(kArm64Poke | MiscField::encode(slot), NULL, g.UseRegister(input)); | |
| 957 slot--; | |
| 958 } | |
| 959 // Now all pushes can be done in pairs. | |
| 960 for (; slot >= 0; slot -= 2) { | |
| 961 Emit(kArm64PokePair | MiscField::encode(slot), NULL, | |
| 962 g.UseRegister(buffer.pushed_nodes[slot]), | |
| 963 g.UseRegister(buffer.pushed_nodes[slot - 1])); | |
| 964 } | |
| 965 } | |
| 966 | |
| 967 // Select the appropriate opcode based on the call type. | |
| 968 InstructionCode opcode; | |
| 969 switch (descriptor->kind()) { | |
| 970 case CallDescriptor::kCallCodeObject: { | |
| 971 opcode = kArchCallCodeObject; | |
| 972 break; | 997 break; |
| 973 } | 998 } |
| 974 case CallDescriptor::kCallJSFunction: | 999 } |
| 975 opcode = kArchCallJSFunction; | 1000 |
| 976 break; | 1001 // Try to combine the branch with a comparison. |
| 977 default: | 1002 if (CanCover(user, value)) { |
| 978 UNREACHABLE(); | 1003 switch (value->opcode()) { |
| 979 return; | 1004 case IrOpcode::kWord32Equal: |
| 980 } | 1005 cont.OverwriteAndNegateIfEqual(kEqual); |
| 981 opcode |= MiscField::encode(descriptor->flags()); | 1006 return VisitWord32Compare(this, value, &cont); |
| 982 | 1007 case IrOpcode::kInt32LessThan: |
| 983 // Emit the call instruction. | 1008 cont.OverwriteAndNegateIfEqual(kSignedLessThan); |
| 984 Instruction* call_instr = | 1009 return VisitWord32Compare(this, value, &cont); |
| 985 Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(), | 1010 case IrOpcode::kInt32LessThanOrEqual: |
| 986 buffer.instruction_args.size(), &buffer.instruction_args.front()); | 1011 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
| 987 | 1012 return VisitWord32Compare(this, value, &cont); |
| 988 call_instr->MarkAsCall(); | 1013 case IrOpcode::kUint32LessThan: |
| 989 if (deoptimization != NULL) { | 1014 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan); |
| 990 DCHECK(continuation != NULL); | 1015 return VisitWord32Compare(this, value, &cont); |
| 991 call_instr->MarkAsControl(); | 1016 case IrOpcode::kUint32LessThanOrEqual: |
| 992 } | 1017 cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); |
| 1018 return VisitWord32Compare(this, value, &cont); |
| 1019 case IrOpcode::kWord64Equal: |
| 1020 cont.OverwriteAndNegateIfEqual(kEqual); |
| 1021 return VisitWordCompare(this, value, kArm64Cmp, &cont, false); |
| 1022 case IrOpcode::kInt64LessThan: |
| 1023 cont.OverwriteAndNegateIfEqual(kSignedLessThan); |
| 1024 return VisitWordCompare(this, value, kArm64Cmp, &cont, false); |
| 1025 case IrOpcode::kInt64LessThanOrEqual: |
| 1026 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); |
| 1027 return VisitWordCompare(this, value, kArm64Cmp, &cont, false); |
| 1028 case IrOpcode::kUint64LessThan: |
| 1029 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan); |
| 1030 return VisitWordCompare(this, value, kArm64Cmp, &cont, false); |
| 1031 case IrOpcode::kFloat64Equal: |
| 1032 cont.OverwriteAndNegateIfEqual(kUnorderedEqual); |
| 1033 return VisitFloat64Compare(this, value, &cont); |
| 1034 case IrOpcode::kFloat64LessThan: |
| 1035 cont.OverwriteAndNegateIfEqual(kUnorderedLessThan); |
| 1036 return VisitFloat64Compare(this, value, &cont); |
| 1037 case IrOpcode::kFloat64LessThanOrEqual: |
| 1038 cont.OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual); |
| 1039 return VisitFloat64Compare(this, value, &cont); |
| 1040 case IrOpcode::kProjection: |
| 1041 // Check if this is the overflow output projection of an |
| 1042 // <Operation>WithOverflow node. |
| 1043 if (OpParameter<size_t>(value) == 1u) { |
| 1044 // We cannot combine the <Operation>WithOverflow with this branch |
| 1045 // unless the 0th projection (the use of the actual value of the |
| 1046 // <Operation> is either NULL, which means there's no use of the |
| 1047 // actual value, or was already defined, which means it is scheduled |
| 1048 // *AFTER* this branch). |
| 1049 Node* node = value->InputAt(0); |
| 1050 Node* result = node->FindProjection(0); |
| 1051 if (result == NULL || IsDefined(result)) { |
| 1052 switch (node->opcode()) { |
| 1053 case IrOpcode::kInt32AddWithOverflow: |
| 1054 cont.OverwriteAndNegateIfEqual(kOverflow); |
| 1055 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, |
| 1056 kArithmeticImm, &cont); |
| 1057 case IrOpcode::kInt32SubWithOverflow: |
| 1058 cont.OverwriteAndNegateIfEqual(kOverflow); |
| 1059 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, |
| 1060 kArithmeticImm, &cont); |
| 1061 default: |
| 1062 break; |
| 1063 } |
| 1064 } |
| 1065 } |
| 1066 break; |
| 1067 case IrOpcode::kInt32Add: |
| 1068 return VisitWordCompare(this, value, kArm64Cmn32, &cont, true); |
| 1069 case IrOpcode::kInt32Sub: |
| 1070 return VisitWordCompare(this, value, kArm64Cmp32, &cont, false); |
| 1071 case IrOpcode::kWord32And: |
| 1072 return VisitWordCompare(this, value, kArm64Tst32, &cont, true); |
| 1073 default: |
| 1074 break; |
| 1075 } |
| 1076 } |
| 1077 |
| 1078 // Branch could not be combined with a compare, emit compare against 0. |
| 1079 VisitWord32Test(this, value, &cont); |
| 1080 } |
| 1081 |
| 1082 |
| 1083 void InstructionSelector::VisitWord32Equal(Node* const node) { |
| 1084 Node* const user = node; |
| 1085 FlagsContinuation cont(kEqual, node); |
| 1086 Int32BinopMatcher m(user); |
| 1087 if (m.right().Is(0)) { |
| 1088 Node* const value = m.left().node(); |
| 1089 if (CanCover(user, value)) { |
| 1090 switch (value->opcode()) { |
| 1091 case IrOpcode::kInt32Add: |
| 1092 return VisitWordCompare(this, value, kArm64Cmn32, &cont, true); |
| 1093 case IrOpcode::kInt32Sub: |
| 1094 return VisitWordCompare(this, value, kArm64Cmp32, &cont, false); |
| 1095 case IrOpcode::kWord32And: |
| 1096 return VisitWordCompare(this, value, kArm64Tst32, &cont, true); |
| 1097 default: |
| 1098 break; |
| 1099 } |
| 1100 return VisitWord32Test(this, value, &cont); |
| 1101 } |
| 1102 } |
| 1103 VisitWord32Compare(this, node, &cont); |
| 1104 } |
| 1105 |
| 1106 |
| 1107 void InstructionSelector::VisitInt32LessThan(Node* node) { |
| 1108 FlagsContinuation cont(kSignedLessThan, node); |
| 1109 VisitWord32Compare(this, node, &cont); |
| 1110 } |
| 1111 |
| 1112 |
| 1113 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) { |
| 1114 FlagsContinuation cont(kSignedLessThanOrEqual, node); |
| 1115 VisitWord32Compare(this, node, &cont); |
| 1116 } |
| 1117 |
| 1118 |
| 1119 void InstructionSelector::VisitUint32LessThan(Node* node) { |
| 1120 FlagsContinuation cont(kUnsignedLessThan, node); |
| 1121 VisitWord32Compare(this, node, &cont); |
| 1122 } |
| 1123 |
| 1124 |
| 1125 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { |
| 1126 FlagsContinuation cont(kUnsignedLessThanOrEqual, node); |
| 1127 VisitWord32Compare(this, node, &cont); |
| 1128 } |
| 1129 |
| 1130 |
| 1131 void InstructionSelector::VisitWord64Equal(Node* const node) { |
| 1132 Node* const user = node; |
| 1133 FlagsContinuation cont(kEqual, node); |
| 1134 Int64BinopMatcher m(user); |
| 1135 if (m.right().Is(0)) { |
| 1136 Node* const value = m.left().node(); |
| 1137 if (CanCover(user, value)) { |
| 1138 switch (value->opcode()) { |
| 1139 case IrOpcode::kWord64And: |
| 1140 return VisitWordCompare(this, value, kArm64Tst, &cont, true); |
| 1141 default: |
| 1142 break; |
| 1143 } |
| 1144 return VisitWord64Test(this, value, &cont); |
| 1145 } |
| 1146 } |
| 1147 VisitWordCompare(this, node, kArm64Cmp, &cont, false); |
| 1148 } |
| 1149 |
| 1150 |
| 1151 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { |
| 1152 if (Node* ovf = node->FindProjection(1)) { |
| 1153 FlagsContinuation cont(kOverflow, ovf); |
| 1154 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, |
| 1155 kArithmeticImm, &cont); |
| 1156 } |
| 1157 FlagsContinuation cont; |
| 1158 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, &cont); |
| 1159 } |
| 1160 |
| 1161 |
| 1162 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { |
| 1163 if (Node* ovf = node->FindProjection(1)) { |
| 1164 FlagsContinuation cont(kOverflow, ovf); |
| 1165 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, |
| 1166 kArithmeticImm, &cont); |
| 1167 } |
| 1168 FlagsContinuation cont; |
| 1169 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, &cont); |
| 1170 } |
| 1171 |
| 1172 |
| 1173 void InstructionSelector::VisitInt64LessThan(Node* node) { |
| 1174 FlagsContinuation cont(kSignedLessThan, node); |
| 1175 VisitWordCompare(this, node, kArm64Cmp, &cont, false); |
| 1176 } |
| 1177 |
| 1178 |
| 1179 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) { |
| 1180 FlagsContinuation cont(kSignedLessThanOrEqual, node); |
| 1181 VisitWordCompare(this, node, kArm64Cmp, &cont, false); |
| 1182 } |
| 1183 |
| 1184 |
| 1185 void InstructionSelector::VisitUint64LessThan(Node* node) { |
| 1186 FlagsContinuation cont(kUnsignedLessThan, node); |
| 1187 VisitWordCompare(this, node, kArm64Cmp, &cont, false); |
| 1188 } |
| 1189 |
| 1190 |
| 1191 void InstructionSelector::VisitFloat64Equal(Node* node) { |
| 1192 FlagsContinuation cont(kUnorderedEqual, node); |
| 1193 VisitFloat64Compare(this, node, &cont); |
| 1194 } |
| 1195 |
| 1196 |
| 1197 void InstructionSelector::VisitFloat64LessThan(Node* node) { |
| 1198 FlagsContinuation cont(kUnorderedLessThan, node); |
| 1199 VisitFloat64Compare(this, node, &cont); |
| 1200 } |
| 1201 |
| 1202 |
| 1203 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { |
| 1204 FlagsContinuation cont(kUnorderedLessThanOrEqual, node); |
| 1205 VisitFloat64Compare(this, node, &cont); |
| 993 } | 1206 } |
| 994 | 1207 |
| 995 } // namespace compiler | 1208 } // namespace compiler |
| 996 } // namespace internal | 1209 } // namespace internal |
| 997 } // namespace v8 | 1210 } // namespace v8 |
| OLD | NEW |