| Index: src/compiler/arm64/instruction-selector-arm64.cc
|
| diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc
|
| index 7bac111f7d1341d9efad84d620e5c4401e2742eb..4d2927232e5b3b99148f5331676c0227f9940e52 100644
|
| --- a/src/compiler/arm64/instruction-selector-arm64.cc
|
| +++ b/src/compiler/arm64/instruction-selector-arm64.cc
|
| @@ -1451,110 +1451,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
| }
|
|
|
|
|
| -void InstructionSelector::VisitTailCall(Node* node) {
|
| - Arm64OperandGenerator g(this);
|
| - const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
|
| - DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
| - DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
| - DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
| -
|
| - // TODO(turbofan): Relax restriction for stack parameters.
|
| - if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
| - CallBuffer buffer(zone(), descriptor, nullptr);
|
| -
|
| - // Compute InstructionOperands for inputs and outputs.
|
| - // TODO(turbofan): on ARM64 it's probably better to use the code object in a
|
| - // register if there are multiple uses of it. Improve constant pool and the
|
| - // heuristics in the register allocator for where to emit constants.
|
| - InitializeCallBuffer(node, &buffer, true, false);
|
| -
|
| - // Select the appropriate opcode based on the call type.
|
| - InstructionCode opcode;
|
| - switch (descriptor->kind()) {
|
| - case CallDescriptor::kCallCodeObject:
|
| - opcode = kArchTailCallCodeObject;
|
| - break;
|
| - case CallDescriptor::kCallJSFunction:
|
| - opcode = kArchTailCallJSFunction;
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - return;
|
| - }
|
| - opcode |= MiscField::encode(descriptor->flags());
|
| -
|
| - // Emit the tailcall instruction.
|
| - Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
| - &buffer.instruction_args.front());
|
| - } else {
|
| - FrameStateDescriptor* frame_state_descriptor = nullptr;
|
| - if (descriptor->NeedsFrameState()) {
|
| - frame_state_descriptor = GetFrameStateDescriptor(
|
| - node->InputAt(static_cast<int>(descriptor->InputCount())));
|
| - }
|
| -
|
| - CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
| -
|
| - // Compute InstructionOperands for inputs and outputs.
|
| - // TODO(turbofan): on ARM64 it's probably better to use the code object in a
|
| - // register if there are multiple uses of it. Improve constant pool and the
|
| - // heuristics in the register allocator for where to emit constants.
|
| - InitializeCallBuffer(node, &buffer, true, false);
|
| -
|
| - // Push the arguments to the stack.
|
| - int aligned_push_count = static_cast<int>(buffer.pushed_nodes.size());
|
| - bool pushed_count_uneven = aligned_push_count & 1;
|
| - // TODO(dcarney): claim and poke probably take small immediates,
|
| - // loop here or whatever.
|
| - // Bump the stack pointer(s).
|
| - if (aligned_push_count > 0) {
|
| - // TODO(dcarney): it would be better to bump the csp here only
|
| - // and emit paired stores with increment for non c frames.
|
| - Emit(kArm64Claim, g.NoOutput(), g.TempImmediate(aligned_push_count));
|
| - }
|
| - // Move arguments to the stack.
|
| - {
|
| - int slot = aligned_push_count - 1;
|
| - // Emit the uneven pushes.
|
| - if (pushed_count_uneven) {
|
| - Node* input = buffer.pushed_nodes[slot];
|
| - Emit(kArm64Poke, g.NoOutput(), g.UseRegister(input),
|
| - g.TempImmediate(slot));
|
| - slot--;
|
| - }
|
| - // Now all pushes can be done in pairs.
|
| - for (; slot >= 0; slot -= 2) {
|
| - Emit(kArm64PokePair, g.NoOutput(),
|
| - g.UseRegister(buffer.pushed_nodes[slot]),
|
| - g.UseRegister(buffer.pushed_nodes[slot - 1]),
|
| - g.TempImmediate(slot));
|
| - }
|
| - }
|
| -
|
| - // Select the appropriate opcode based on the call type.
|
| - InstructionCode opcode;
|
| - switch (descriptor->kind()) {
|
| - case CallDescriptor::kCallCodeObject: {
|
| - opcode = kArchCallCodeObject;
|
| - break;
|
| - }
|
| - case CallDescriptor::kCallJSFunction:
|
| - opcode = kArchCallJSFunction;
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - return;
|
| - }
|
| - opcode |= MiscField::encode(descriptor->flags());
|
| -
|
| - // Emit the call instruction.
|
| - size_t const output_count = buffer.outputs.size();
|
| - auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
| - Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
| - &buffer.instruction_args.front())->MarkAsCall();
|
| - Emit(kArchRet, 0, nullptr, output_count, outputs);
|
| - }
|
| -}
|
| +bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
|
|
|
|
| namespace {
|
|
|