OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/arm64/macro-assembler-arm64.h" | 8 #include "src/arm64/macro-assembler-arm64.h" |
9 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" |
10 #include "src/compiler/gap-resolver.h" | 10 #include "src/compiler/gap-resolver.h" |
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
451 } else { \ | 451 } else { \ |
452 uint32_t imm = \ | 452 uint32_t imm = \ |
453 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \ | 453 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \ |
454 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ | 454 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \ |
455 imm % (width)); \ | 455 imm % (width)); \ |
456 } \ | 456 } \ |
457 } while (0) | 457 } while (0) |
458 | 458 |
459 | 459 |
460 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 460 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
| 461 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 462 if (sp_slot_delta > 0) { |
| 463 __ Add(jssp, jssp, Operand(sp_slot_delta * kPointerSize)); |
| 464 } |
461 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 465 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
462 int stack_slots = frame()->GetSpillSlotCount(); | 466 int spill_slots = frame()->GetSpillSlotCount(); |
463 if (descriptor->IsJSFunctionCall() || stack_slots > 0) { | 467 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; |
464 __ Mov(jssp, fp); | 468 if (has_frame) { |
465 __ Pop(fp, lr); | 469 __ Pop(fp, lr); |
466 } | 470 } |
467 if (stack_param_delta < 0) { | |
468 int offset = -stack_param_delta * kPointerSize; | |
469 __ Add(jssp, jssp, Operand(offset)); | |
470 } | |
471 } | 471 } |
472 | 472 |
| 473 |
| 474 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
| 475 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 476 if (sp_slot_delta < 0) { |
| 477 __ Sub(jssp, jssp, Operand(-sp_slot_delta * kPointerSize)); |
| 478 frame()->AllocateOutgoingParameterSlots(-sp_slot_delta); |
| 479 } |
| 480 } |
| 481 |
473 | 482 |
474 // Assembles an instruction after register allocation, producing machine code. | 483 // Assembles an instruction after register allocation, producing machine code. |
475 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 484 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
476 Arm64OperandConverter i(this, instr); | 485 Arm64OperandConverter i(this, instr); |
477 InstructionCode opcode = instr->opcode(); | 486 InstructionCode opcode = instr->opcode(); |
478 switch (ArchOpcodeField::decode(opcode)) { | 487 switch (ArchOpcodeField::decode(opcode)) { |
479 case kArchCallCodeObject: { | 488 case kArchCallCodeObject: { |
480 EnsureSpaceForLazyDeopt(); | 489 EnsureSpaceForLazyDeopt(); |
481 if (instr->InputAt(0)->IsImmediate()) { | 490 if (instr->InputAt(0)->IsImmediate()) { |
482 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 491 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 EnsureSpaceForLazyDeopt(); | 551 EnsureSpaceForLazyDeopt(); |
543 RecordCallPosition(instr); | 552 RecordCallPosition(instr); |
544 break; | 553 break; |
545 } | 554 } |
546 case kArchPrepareCallCFunction: | 555 case kArchPrepareCallCFunction: |
547 // We don't need kArchPrepareCallCFunction on arm64 as the instruction | 556 // We don't need kArchPrepareCallCFunction on arm64 as the instruction |
548 // selector already perform a Claim to reserve space on the stack and | 557 // selector already perform a Claim to reserve space on the stack and |
549 // guarantee correct alignment of stack pointer. | 558 // guarantee correct alignment of stack pointer. |
550 UNREACHABLE(); | 559 UNREACHABLE(); |
551 break; | 560 break; |
| 561 case kArchPrepareTailCall: |
| 562 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
| 563 break; |
552 case kArchCallCFunction: { | 564 case kArchCallCFunction: { |
553 int const num_parameters = MiscField::decode(instr->opcode()); | 565 int const num_parameters = MiscField::decode(instr->opcode()); |
554 if (instr->InputAt(0)->IsImmediate()) { | 566 if (instr->InputAt(0)->IsImmediate()) { |
555 ExternalReference ref = i.InputExternalReference(0); | 567 ExternalReference ref = i.InputExternalReference(0); |
556 __ CallCFunction(ref, num_parameters, 0); | 568 __ CallCFunction(ref, num_parameters, 0); |
557 } else { | 569 } else { |
558 Register func = i.InputRegister(0); | 570 Register func = i.InputRegister(0); |
559 __ CallCFunction(func, num_parameters, 0); | 571 __ CallCFunction(func, num_parameters, 0); |
560 } | 572 } |
561 // CallCFunction only supports register arguments so we never need to call | 573 // CallCFunction only supports register arguments so we never need to call |
(...skipping 967 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1529 padding_size -= kInstructionSize; | 1541 padding_size -= kInstructionSize; |
1530 } | 1542 } |
1531 } | 1543 } |
1532 } | 1544 } |
1533 | 1545 |
1534 #undef __ | 1546 #undef __ |
1535 | 1547 |
1536 } // namespace compiler | 1548 } // namespace compiler |
1537 } // namespace internal | 1549 } // namespace internal |
1538 } // namespace v8 | 1550 } // namespace v8 |
OLD | NEW |