Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
| 6 #include "src/base/bits.h" | 6 #include "src/base/bits.h" |
| 7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
| 10 | 10 |
| (...skipping 1062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1073 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { | 1073 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { |
| 1074 VisitRR(this, kArmVrintzF64, node); | 1074 VisitRR(this, kArmVrintzF64, node); |
| 1075 } | 1075 } |
| 1076 | 1076 |
| 1077 | 1077 |
| 1078 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { | 1078 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { |
| 1079 VisitRR(this, kArmVrintaF64, node); | 1079 VisitRR(this, kArmVrintaF64, node); |
| 1080 } | 1080 } |
| 1081 | 1081 |
| 1082 | 1082 |
| 1083 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler, | 1083 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { |
| 1084 CallMode call_mode) { | |
| 1085 ArmOperandGenerator g(this); | 1084 ArmOperandGenerator g(this); |
| 1086 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); | 1085 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); |
| 1087 | 1086 |
| 1088 FrameStateDescriptor* frame_state_descriptor = NULL; | 1087 FrameStateDescriptor* frame_state_descriptor = nullptr; |
| 1089 if (descriptor->NeedsFrameState()) { | 1088 if (descriptor->NeedsFrameState()) { |
| 1090 frame_state_descriptor = | 1089 frame_state_descriptor = |
| 1091 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); | 1090 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
| 1092 } | 1091 } |
| 1093 | 1092 |
| 1094 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 1093 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 1095 | 1094 |
| 1096 // Compute InstructionOperands for inputs and outputs. | 1095 // Compute InstructionOperands for inputs and outputs. |
| 1097 // TODO(turbofan): on ARM64 it's probably better to use the code object in a | 1096 // TODO(turbofan): on ARM it's probably better to use the code object in a |
| 1098 // register if there are multiple uses of it. Improve constant pool and the | 1097 // register if there are multiple uses of it. Improve constant pool and the |
| 1099 // heuristics in the register allocator for where to emit constants. | 1098 // heuristics in the register allocator for where to emit constants. |
| 1100 InitializeCallBuffer(node, &buffer, true, false); | 1099 InitializeCallBuffer(node, &buffer, true, false); |
| 1101 | 1100 |
| 1102 // TODO(dcarney): might be possible to use claim/poke instead | |
| 1103 // Push any stack arguments. | 1101 // Push any stack arguments. |
| 1104 for (Node* node : base::Reversed(buffer.pushed_nodes)) { | 1102 for (Node* node : base::Reversed(buffer.pushed_nodes)) { |
| 1105 Emit(kArmPush, g.NoOutput(), g.UseRegister(node)); | 1103 Emit(kArmPush, g.NoOutput(), g.UseRegister(node)); |
| 1106 } | 1104 } |
| 1107 | 1105 |
| 1108 // Pass label of exception handler block. | 1106 // Pass label of exception handler block. |
| 1109 CallDescriptor::Flags flags = descriptor->flags(); | 1107 CallDescriptor::Flags flags = descriptor->flags(); |
| 1110 if (handler != nullptr) { | 1108 if (handler) { |
| 1111 flags |= CallDescriptor::kHasExceptionHandler; | 1109 flags |= CallDescriptor::kHasExceptionHandler; |
| 1112 buffer.instruction_args.push_back(g.Label(handler)); | 1110 buffer.instruction_args.push_back(g.Label(handler)); |
| 1113 } | 1111 } |
| 1114 | 1112 |
| 1115 // Select the appropriate opcode based on the call type. | 1113 // Select the appropriate opcode based on the call type. |
| 1116 bool is_tail_call = call_mode == TAIL_CALL; | |
| 1117 InstructionCode opcode; | 1114 InstructionCode opcode; |
| 1118 switch (descriptor->kind()) { | 1115 switch (descriptor->kind()) { |
| 1119 case CallDescriptor::kCallCodeObject: { | 1116 case CallDescriptor::kCallCodeObject: { |
| 1120 opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject; | 1117 opcode = kArchCallCodeObject; |
| 1121 break; | 1118 break; |
| 1122 } | 1119 } |
| 1123 case CallDescriptor::kCallJSFunction: | 1120 case CallDescriptor::kCallJSFunction: |
| 1124 opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction; | 1121 opcode = kArchCallJSFunction; |
| 1125 break; | 1122 break; |
| 1126 default: | 1123 default: |
| 1127 UNREACHABLE(); | 1124 UNREACHABLE(); |
| 1128 return; | 1125 return; |
| 1129 } | 1126 } |
| 1130 opcode |= MiscField::encode(flags); | 1127 opcode |= MiscField::encode(flags); |
| 1131 | 1128 |
| 1132 // Emit the call instruction. | 1129 // Emit the call instruction. |
| 1133 size_t size = is_tail_call ? 0 : buffer.outputs.size(); | 1130 size_t const output_count = buffer.outputs.size(); |
| 1134 InstructionOperand* first_output = | 1131 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; |
| 1135 size > 0 ? &buffer.outputs.front() : nullptr; | 1132 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), |
| 1136 Instruction* call_instr = | 1133 &buffer.instruction_args.front())->MarkAsCall(); |
| 1137 Emit(opcode, size, first_output, buffer.instruction_args.size(), | |
| 1138 &buffer.instruction_args.front()); | |
| 1139 call_instr->MarkAsCall(); | |
| 1140 } | 1134 } |
| 1141 | 1135 |
| 1142 | 1136 |
| 1137 void InstructionSelector::VisitTailCall(Node* node) { | |
| 1138 ArmOperandGenerator g(this); | |
| 1139 CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node); | |
| 1140 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); | |
| 1141 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); | |
| 1142 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); | |
| 1143 | |
| 1144 // TODO(turbofan): Relax restriction for stack parameters. | |
| 1145 if (descriptor->UsesOnlyRegisters() && | |
| 1146 descriptor->HasSameReturnLocationsAs( | |
| 1147 linkage()->GetIncomingDescriptor())) { | |
| 1148 CallBuffer buffer(zone(), descriptor, nullptr); | |
| 1149 | |
| 1150 // Compute InstructionOperands for inputs and outputs. | |
| 1151 // TODO(turbofan): on ARM it's probably better to use the code object in a | |
| 1152 // register if there are multiple uses of it. Improve constant pool and the | |
| 1153 // heuristics in the register allocator for where to emit constants. | |
| 1154 InitializeCallBuffer(node, &buffer, true, false); | |
| 1155 | |
| 1156 DCHECK_EQ(0u, buffer.pushed_nodes.size()); | |
| 1157 | |
| 1158 // Select the appropriate opcode based on the call type. | |
| 1159 InstructionCode opcode; | |
| 1160 switch (descriptor->kind()) { | |
| 1161 case CallDescriptor::kCallCodeObject: | |
| 1162 opcode = kArchTailCallCodeObject; | |
| 1163 break; | |
| 1164 case CallDescriptor::kCallJSFunction: | |
| 1165 opcode = kArchTailCallJSFunction; | |
| 1166 break; | |
| 1167 default: | |
| 1168 UNREACHABLE(); | |
| 1169 return; | |
| 1170 } | |
| 1171 opcode |= MiscField::encode(descriptor->flags()); | |
| 1172 | |
| 1173 // Emit the tailcall instruction. | |
| 1174 Emit(opcode, 0, nullptr, buffer.instruction_args.size(), | |
| 1175 &buffer.instruction_args.front()); | |
| 1176 } else { | |
| 1177 FrameStateDescriptor* frame_state_descriptor = | |
|
Jarin
2015/05/05 09:15:24
There seems to be some serious overlap with the Vi
Benedikt Meurer
2015/05/05 09:40:21
As said, we will unify VisitCall and VisitTailCall
| |
| 1178 descriptor->NeedsFrameState() | |
| 1179 ? GetFrameStateDescriptor( | |
| 1180 node->InputAt(static_cast<int>(descriptor->InputCount()))) | |
| 1181 : nullptr; | |
| 1182 | |
| 1183 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | |
| 1184 | |
| 1185 // Compute InstructionOperands for inputs and outputs. | |
| 1186 // TODO(turbofan): on ARM it's probably better to use the code object in a | |
| 1187 // register if there are multiple uses of it. Improve constant pool and the | |
| 1188 // heuristics in the register allocator for where to emit constants. | |
| 1189 InitializeCallBuffer(node, &buffer, true, false); | |
| 1190 | |
| 1191 // Push any stack arguments. | |
| 1192 for (Node* node : base::Reversed(buffer.pushed_nodes)) { | |
| 1193 Emit(kArmPush, g.NoOutput(), g.UseRegister(node)); | |
| 1194 } | |
| 1195 | |
| 1196 // Select the appropriate opcode based on the call type. | |
| 1197 InstructionCode opcode; | |
| 1198 switch (descriptor->kind()) { | |
| 1199 case CallDescriptor::kCallCodeObject: { | |
| 1200 opcode = kArchCallCodeObject; | |
| 1201 break; | |
| 1202 } | |
| 1203 case CallDescriptor::kCallJSFunction: | |
| 1204 opcode = kArchCallJSFunction; | |
| 1205 break; | |
| 1206 default: | |
| 1207 UNREACHABLE(); | |
| 1208 return; | |
| 1209 } | |
| 1210 opcode |= MiscField::encode(descriptor->flags()); | |
| 1211 | |
| 1212 // Emit the call instruction. | |
| 1213 size_t const output_count = buffer.outputs.size(); | |
| 1214 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; | |
| 1215 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), | |
| 1216 &buffer.instruction_args.front())->MarkAsCall(); | |
| 1217 Emit(kArchRet, 0, nullptr, output_count, outputs); | |
| 1218 } | |
| 1219 } | |
| 1220 | |
| 1221 | |
| 1143 namespace { | 1222 namespace { |
| 1144 | 1223 |
| 1145 // Shared routine for multiple float32 compare operations. | 1224 // Shared routine for multiple float32 compare operations. |
| 1146 void VisitFloat32Compare(InstructionSelector* selector, Node* node, | 1225 void VisitFloat32Compare(InstructionSelector* selector, Node* node, |
| 1147 FlagsContinuation* cont) { | 1226 FlagsContinuation* cont) { |
| 1148 ArmOperandGenerator g(selector); | 1227 ArmOperandGenerator g(selector); |
| 1149 Float32BinopMatcher m(node); | 1228 Float32BinopMatcher m(node); |
| 1150 InstructionOperand rhs = m.right().Is(0.0) ? g.UseImmediate(m.right().node()) | 1229 InstructionOperand rhs = m.right().Is(0.0) ? g.UseImmediate(m.right().node()) |
| 1151 : g.UseRegister(m.right().node()); | 1230 : g.UseRegister(m.right().node()); |
| 1152 if (cont->IsBranch()) { | 1231 if (cont->IsBranch()) { |
| (...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1524 flags |= MachineOperatorBuilder::kFloat64RoundDown | | 1603 flags |= MachineOperatorBuilder::kFloat64RoundDown | |
| 1525 MachineOperatorBuilder::kFloat64RoundTruncate | | 1604 MachineOperatorBuilder::kFloat64RoundTruncate | |
| 1526 MachineOperatorBuilder::kFloat64RoundTiesAway; | 1605 MachineOperatorBuilder::kFloat64RoundTiesAway; |
| 1527 } | 1606 } |
| 1528 return flags; | 1607 return flags; |
| 1529 } | 1608 } |
| 1530 | 1609 |
| 1531 } // namespace compiler | 1610 } // namespace compiler |
| 1532 } // namespace internal | 1611 } // namespace internal |
| 1533 } // namespace v8 | 1612 } // namespace v8 |
| OLD | NEW |