OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 #include "src/compiler/node-properties.h" | 7 #include "src/compiler/node-properties.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 1186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1197 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { | 1197 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { |
1198 VisitRR(this, kArm64Float64RoundTruncate, node); | 1198 VisitRR(this, kArm64Float64RoundTruncate, node); |
1199 } | 1199 } |
1200 | 1200 |
1201 | 1201 |
1202 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { | 1202 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { |
1203 VisitRR(this, kArm64Float64RoundTiesAway, node); | 1203 VisitRR(this, kArm64Float64RoundTiesAway, node); |
1204 } | 1204 } |
1205 | 1205 |
1206 | 1206 |
1207 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler, | 1207 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) { |
1208 CallMode call_mode) { | |
1209 Arm64OperandGenerator g(this); | 1208 Arm64OperandGenerator g(this); |
1210 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); | 1209 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); |
1211 | 1210 |
1212 FrameStateDescriptor* frame_state_descriptor = NULL; | 1211 FrameStateDescriptor* frame_state_descriptor = nullptr; |
1213 if (descriptor->NeedsFrameState()) { | 1212 if (descriptor->NeedsFrameState()) { |
1214 frame_state_descriptor = | 1213 frame_state_descriptor = |
1215 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); | 1214 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
1216 } | 1215 } |
1217 | 1216 |
1218 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); | 1217 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
1219 | 1218 |
1220 // Compute InstructionOperands for inputs and outputs. | 1219 // Compute InstructionOperands for inputs and outputs. |
1221 // TODO(turbofan): on ARM64 it's probably better to use the code object in a | 1220 // TODO(turbofan): on ARM64 it's probably better to use the code object in a |
1222 // register if there are multiple uses of it. Improve constant pool and the | 1221 // register if there are multiple uses of it. Improve constant pool and the |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1254 } | 1253 } |
1255 | 1254 |
1256 // Pass label of exception handler block. | 1255 // Pass label of exception handler block. |
1257 CallDescriptor::Flags flags = descriptor->flags(); | 1256 CallDescriptor::Flags flags = descriptor->flags(); |
1258 if (handler != nullptr) { | 1257 if (handler != nullptr) { |
1259 flags |= CallDescriptor::kHasExceptionHandler; | 1258 flags |= CallDescriptor::kHasExceptionHandler; |
1260 buffer.instruction_args.push_back(g.Label(handler)); | 1259 buffer.instruction_args.push_back(g.Label(handler)); |
1261 } | 1260 } |
1262 | 1261 |
1263 // Select the appropriate opcode based on the call type. | 1262 // Select the appropriate opcode based on the call type. |
1264 bool is_tail_call = call_mode == TAIL_CALL; | |
1265 InstructionCode opcode; | 1263 InstructionCode opcode; |
1266 switch (descriptor->kind()) { | 1264 switch (descriptor->kind()) { |
1267 case CallDescriptor::kCallCodeObject: { | 1265 case CallDescriptor::kCallCodeObject: { |
1268 opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject; | 1266 opcode = kArchCallCodeObject; |
1269 break; | 1267 break; |
1270 } | 1268 } |
1271 case CallDescriptor::kCallJSFunction: | 1269 case CallDescriptor::kCallJSFunction: |
1272 opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction; | 1270 opcode = kArchCallJSFunction; |
1273 break; | 1271 break; |
1274 default: | 1272 default: |
1275 UNREACHABLE(); | 1273 UNREACHABLE(); |
1276 return; | 1274 return; |
1277 } | 1275 } |
1278 opcode |= MiscField::encode(flags); | 1276 opcode |= MiscField::encode(flags); |
1279 | 1277 |
1280 // Emit the call instruction. | 1278 // Emit the call instruction. |
1281 size_t size = is_tail_call ? 0 : buffer.outputs.size(); | 1279 size_t const output_count = buffer.outputs.size(); |
1282 InstructionOperand* first_output = | 1280 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; |
1283 size > 0 ? &buffer.outputs.front() : nullptr; | 1281 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), |
1284 Instruction* call_instr = | 1282 &buffer.instruction_args.front())->MarkAsCall(); |
1285 Emit(opcode, size, first_output, buffer.instruction_args.size(), | |
1286 &buffer.instruction_args.front()); | |
1287 call_instr->MarkAsCall(); | |
1288 } | 1283 } |
1289 | 1284 |
1290 | 1285 |
| 1286 void InstructionSelector::VisitTailCall(Node* node) { |
| 1287 Arm64OperandGenerator g(this); |
| 1288 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node); |
| 1289 DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls); |
| 1290 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite); |
| 1291 DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); |
| 1292 |
| 1293 // TODO(turbofan): Relax restriction for stack parameters. |
| 1294 if (descriptor->UsesOnlyRegisters() && |
| 1295 descriptor->HasSameReturnLocationsAs( |
| 1296 linkage()->GetIncomingDescriptor())) { |
| 1297 CallBuffer buffer(zone(), descriptor, nullptr); |
| 1298 |
| 1299 // Compute InstructionOperands for inputs and outputs. |
| 1300 // TODO(turbofan): on ARM64 it's probably better to use the code object in a |
| 1301 // register if there are multiple uses of it. Improve constant pool and the |
| 1302 // heuristics in the register allocator for where to emit constants. |
| 1303 InitializeCallBuffer(node, &buffer, true, false); |
| 1304 |
| 1305 DCHECK_EQ(0u, buffer.pushed_nodes.size()); |
| 1306 |
| 1307 // Select the appropriate opcode based on the call type. |
| 1308 InstructionCode opcode; |
| 1309 switch (descriptor->kind()) { |
| 1310 case CallDescriptor::kCallCodeObject: |
| 1311 opcode = kArchTailCallCodeObject; |
| 1312 break; |
| 1313 case CallDescriptor::kCallJSFunction: |
| 1314 opcode = kArchTailCallJSFunction; |
| 1315 break; |
| 1316 default: |
| 1317 UNREACHABLE(); |
| 1318 return; |
| 1319 } |
| 1320 opcode |= MiscField::encode(descriptor->flags()); |
| 1321 |
| 1322 // Emit the tailcall instruction. |
| 1323 Emit(opcode, 0, nullptr, buffer.instruction_args.size(), |
| 1324 &buffer.instruction_args.front()); |
| 1325 } else { |
| 1326 FrameStateDescriptor* frame_state_descriptor = nullptr; |
| 1327 if (descriptor->NeedsFrameState()) { |
| 1328 frame_state_descriptor = |
| 1329 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount())); |
| 1330 } |
| 1331 |
| 1332 CallBuffer buffer(zone(), descriptor, frame_state_descriptor); |
| 1333 |
| 1334 // Compute InstructionOperands for inputs and outputs. |
| 1335 // TODO(turbofan): on ARM64 it's probably better to use the code object in a |
| 1336 // register if there are multiple uses of it. Improve constant pool and the |
| 1337 // heuristics in the register allocator for where to emit constants. |
| 1338 InitializeCallBuffer(node, &buffer, true, false); |
| 1339 |
| 1340 // Push the arguments to the stack. |
| 1341 bool pushed_count_uneven = buffer.pushed_nodes.size() & 1; |
| 1342 int aligned_push_count = buffer.pushed_nodes.size(); |
| 1343 // TODO(dcarney): claim and poke probably take small immediates, |
| 1344 // loop here or whatever. |
| 1345 // Bump the stack pointer(s). |
| 1346 if (aligned_push_count > 0) { |
| 1347 // TODO(dcarney): it would be better to bump the csp here only |
| 1348 // and emit paired stores with increment for non c frames. |
| 1349 Emit(kArm64Claim, g.NoOutput(), g.TempImmediate(aligned_push_count)); |
| 1350 } |
| 1351 // Move arguments to the stack. |
| 1352 { |
| 1353 int slot = buffer.pushed_nodes.size() - 1; |
| 1354 // Emit the uneven pushes. |
| 1355 if (pushed_count_uneven) { |
| 1356 Node* input = buffer.pushed_nodes[slot]; |
| 1357 Emit(kArm64Poke, g.NoOutput(), g.UseRegister(input), |
| 1358 g.TempImmediate(slot)); |
| 1359 slot--; |
| 1360 } |
| 1361 // Now all pushes can be done in pairs. |
| 1362 for (; slot >= 0; slot -= 2) { |
| 1363 Emit(kArm64PokePair, g.NoOutput(), |
| 1364 g.UseRegister(buffer.pushed_nodes[slot]), |
| 1365 g.UseRegister(buffer.pushed_nodes[slot - 1]), |
| 1366 g.TempImmediate(slot)); |
| 1367 } |
| 1368 } |
| 1369 |
| 1370 // Select the appropriate opcode based on the call type. |
| 1371 InstructionCode opcode; |
| 1372 switch (descriptor->kind()) { |
| 1373 case CallDescriptor::kCallCodeObject: { |
| 1374 opcode = kArchCallCodeObject; |
| 1375 break; |
| 1376 } |
| 1377 case CallDescriptor::kCallJSFunction: |
| 1378 opcode = kArchCallJSFunction; |
| 1379 break; |
| 1380 default: |
| 1381 UNREACHABLE(); |
| 1382 return; |
| 1383 } |
| 1384 opcode |= MiscField::encode(descriptor->flags()); |
| 1385 |
| 1386 // Emit the call instruction. |
| 1387 size_t const output_count = buffer.outputs.size(); |
| 1388 auto* outputs = output_count ? &buffer.outputs.front() : nullptr; |
| 1389 Emit(opcode, output_count, outputs, buffer.instruction_args.size(), |
| 1390 &buffer.instruction_args.front())->MarkAsCall(); |
| 1391 Emit(kArchRet, 0, nullptr, output_count, outputs); |
| 1392 } |
| 1393 } |
| 1394 |
| 1395 |
1291 namespace { | 1396 namespace { |
1292 | 1397 |
1293 // Shared routine for multiple compare operations. | 1398 // Shared routine for multiple compare operations. |
1294 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, | 1399 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, |
1295 InstructionOperand left, InstructionOperand right, | 1400 InstructionOperand left, InstructionOperand right, |
1296 FlagsContinuation* cont) { | 1401 FlagsContinuation* cont) { |
1297 Arm64OperandGenerator g(selector); | 1402 Arm64OperandGenerator g(selector); |
1298 opcode = cont->Encode(opcode); | 1403 opcode = cont->Encode(opcode); |
1299 if (cont->IsBranch()) { | 1404 if (cont->IsBranch()) { |
1300 selector->Emit(opcode, g.NoOutput(), left, right, | 1405 selector->Emit(opcode, g.NoOutput(), left, right, |
(...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1776 MachineOperatorBuilder::kFloat64RoundTruncate | | 1881 MachineOperatorBuilder::kFloat64RoundTruncate | |
1777 MachineOperatorBuilder::kFloat64RoundTiesAway | | 1882 MachineOperatorBuilder::kFloat64RoundTiesAway | |
1778 MachineOperatorBuilder::kWord32ShiftIsSafe | | 1883 MachineOperatorBuilder::kWord32ShiftIsSafe | |
1779 MachineOperatorBuilder::kInt32DivIsSafe | | 1884 MachineOperatorBuilder::kInt32DivIsSafe | |
1780 MachineOperatorBuilder::kUint32DivIsSafe; | 1885 MachineOperatorBuilder::kUint32DivIsSafe; |
1781 } | 1886 } |
1782 | 1887 |
1783 } // namespace compiler | 1888 } // namespace compiler |
1784 } // namespace internal | 1889 } // namespace internal |
1785 } // namespace v8 | 1890 } // namespace v8 |
OLD | NEW |