| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/arm64/assembler-arm64-inl.h" | 7 #include "src/arm64/assembler-arm64-inl.h" |
| 8 #include "src/arm64/frames-arm64.h" | 8 #include "src/arm64/frames-arm64.h" |
| 9 #include "src/arm64/macro-assembler-arm64-inl.h" | 9 #include "src/arm64/macro-assembler-arm64-inl.h" |
| 10 #include "src/compilation-info.h" | 10 #include "src/compilation-info.h" |
| (...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 651 } | 651 } |
| 652 | 652 |
| 653 // Assembles an instruction after register allocation, producing machine code. | 653 // Assembles an instruction after register allocation, producing machine code. |
| 654 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( | 654 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( |
| 655 Instruction* instr) { | 655 Instruction* instr) { |
| 656 Arm64OperandConverter i(this, instr); | 656 Arm64OperandConverter i(this, instr); |
| 657 InstructionCode opcode = instr->opcode(); | 657 InstructionCode opcode = instr->opcode(); |
| 658 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); | 658 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); |
| 659 switch (arch_opcode) { | 659 switch (arch_opcode) { |
| 660 case kArchCallCodeObject: { | 660 case kArchCallCodeObject: { |
| 661 // We must not share code targets for calls to builtins for wasm code, as |
| 662 // they might need to be patched individually. |
| 663 internal::Assembler::BlockCodeTargetSharingScope scope; |
| 664 if (info()->IsWasm()) scope.Open(masm()); |
| 665 |
| 661 EnsureSpaceForLazyDeopt(); | 666 EnsureSpaceForLazyDeopt(); |
| 662 if (instr->InputAt(0)->IsImmediate()) { | 667 if (instr->InputAt(0)->IsImmediate()) { |
| 663 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 668 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
| 664 RelocInfo::CODE_TARGET); | 669 RelocInfo::CODE_TARGET); |
| 665 } else { | 670 } else { |
| 666 Register target = i.InputRegister(0); | 671 Register target = i.InputRegister(0); |
| 667 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 672 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
| 668 __ Call(target); | 673 __ Call(target); |
| 669 } | 674 } |
| 670 RecordCallPosition(instr); | 675 RecordCallPosition(instr); |
| 671 // TODO(titzer): this is ugly. JSSP should be a caller-save register | 676 // TODO(titzer): this is ugly. JSSP should be a caller-save register |
| 672 // in this case, but it is not possible to express in the register | 677 // in this case, but it is not possible to express in the register |
| 673 // allocator. | 678 // allocator. |
| 674 CallDescriptor::Flags flags(MiscField::decode(opcode)); | 679 CallDescriptor::Flags flags(MiscField::decode(opcode)); |
| 675 if (flags & CallDescriptor::kRestoreJSSP) { | 680 if (flags & CallDescriptor::kRestoreJSSP) { |
| 676 __ Ldr(jssp, MemOperand(csp)); | 681 __ Ldr(jssp, MemOperand(csp)); |
| 677 __ Mov(csp, jssp); | 682 __ Mov(csp, jssp); |
| 678 } | 683 } |
| 679 if (flags & CallDescriptor::kRestoreCSP) { | 684 if (flags & CallDescriptor::kRestoreCSP) { |
| 680 __ Mov(csp, jssp); | 685 __ Mov(csp, jssp); |
| 681 __ AssertCspAligned(); | 686 __ AssertCspAligned(); |
| 682 } | 687 } |
| 683 frame_access_state()->ClearSPDelta(); | 688 frame_access_state()->ClearSPDelta(); |
| 684 break; | 689 break; |
| 685 } | 690 } |
| 686 case kArchTailCallCodeObjectFromJSFunction: | 691 case kArchTailCallCodeObjectFromJSFunction: |
| 687 case kArchTailCallCodeObject: { | 692 case kArchTailCallCodeObject: { |
| 693 // We must not share code targets for calls to builtins for wasm code, as |
| 694 // they might need to be patched individually. |
| 695 internal::Assembler::BlockCodeTargetSharingScope scope; |
| 696 if (info()->IsWasm()) scope.Open(masm()); |
| 697 |
| 688 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { | 698 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { |
| 689 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, | 699 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, |
| 690 i.TempRegister(0), i.TempRegister(1), | 700 i.TempRegister(0), i.TempRegister(1), |
| 691 i.TempRegister(2)); | 701 i.TempRegister(2)); |
| 692 } | 702 } |
| 693 if (instr->InputAt(0)->IsImmediate()) { | 703 if (instr->InputAt(0)->IsImmediate()) { |
| 694 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 704 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
| 695 RelocInfo::CODE_TARGET); | 705 RelocInfo::CODE_TARGET); |
| 696 } else { | 706 } else { |
| 697 Register target = i.InputRegister(0); | 707 Register target = i.InputRegister(0); |
| (...skipping 1661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2359 padding_size -= kInstructionSize; | 2369 padding_size -= kInstructionSize; |
| 2360 } | 2370 } |
| 2361 } | 2371 } |
| 2362 } | 2372 } |
| 2363 | 2373 |
| 2364 #undef __ | 2374 #undef __ |
| 2365 | 2375 |
| 2366 } // namespace compiler | 2376 } // namespace compiler |
| 2367 } // namespace internal | 2377 } // namespace internal |
| 2368 } // namespace v8 | 2378 } // namespace v8 |
| OLD | NEW |