| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
| 8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
| 9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
| 10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
| (...skipping 603 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 614 } | 614 } |
| 615 | 615 |
| 616 | 616 |
| 617 // Assembles an instruction after register allocation, producing machine code. | 617 // Assembles an instruction after register allocation, producing machine code. |
| 618 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 618 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
| 619 PPCOperandConverter i(this, instr); | 619 PPCOperandConverter i(this, instr); |
| 620 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode()); | 620 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode()); |
| 621 | 621 |
| 622 switch (opcode) { | 622 switch (opcode) { |
| 623 case kArchCallCodeObject: { | 623 case kArchCallCodeObject: { |
| 624 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( |
| 625 masm()); |
| 624 EnsureSpaceForLazyDeopt(); | 626 EnsureSpaceForLazyDeopt(); |
| 625 if (HasRegisterInput(instr, 0)) { | 627 if (HasRegisterInput(instr, 0)) { |
| 626 __ addi(ip, i.InputRegister(0), | 628 __ addi(ip, i.InputRegister(0), |
| 627 Operand(Code::kHeaderSize - kHeapObjectTag)); | 629 Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 628 __ Call(ip); | 630 __ Call(ip); |
| 629 } else { | 631 } else { |
| 630 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 632 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
| 631 RelocInfo::CODE_TARGET); | 633 RelocInfo::CODE_TARGET); |
| 632 } | 634 } |
| 633 RecordCallPosition(instr); | 635 RecordCallPosition(instr); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 644 // We cannot use the constant pool to load the target since | 646 // We cannot use the constant pool to load the target since |
| 645 // we've already restored the caller's frame. | 647 // we've already restored the caller's frame. |
| 646 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); | 648 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); |
| 647 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 649 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
| 648 RelocInfo::CODE_TARGET); | 650 RelocInfo::CODE_TARGET); |
| 649 } | 651 } |
| 650 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 652 DCHECK_EQ(LeaveRC, i.OutputRCBit()); |
| 651 break; | 653 break; |
| 652 } | 654 } |
| 653 case kArchCallJSFunction: { | 655 case kArchCallJSFunction: { |
| 656 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( |
| 657 masm()); |
| 654 EnsureSpaceForLazyDeopt(); | 658 EnsureSpaceForLazyDeopt(); |
| 655 Register func = i.InputRegister(0); | 659 Register func = i.InputRegister(0); |
| 656 if (FLAG_debug_code) { | 660 if (FLAG_debug_code) { |
| 657 // Check the function's context matches the context argument. | 661 // Check the function's context matches the context argument. |
| 658 __ LoadP(kScratchReg, | 662 __ LoadP(kScratchReg, |
| 659 FieldMemOperand(func, JSFunction::kContextOffset)); | 663 FieldMemOperand(func, JSFunction::kContextOffset)); |
| 660 __ cmp(cp, kScratchReg); | 664 __ cmp(cp, kScratchReg); |
| 661 __ Assert(eq, kWrongFunctionContext); | 665 __ Assert(eq, kWrongFunctionContext); |
| 662 } | 666 } |
| 663 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 667 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 675 __ cmp(cp, kScratchReg); | 679 __ cmp(cp, kScratchReg); |
| 676 __ Assert(eq, kWrongFunctionContext); | 680 __ Assert(eq, kWrongFunctionContext); |
| 677 } | 681 } |
| 678 AssembleDeconstructActivationRecord(); | 682 AssembleDeconstructActivationRecord(); |
| 679 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 683 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
| 680 __ Jump(ip); | 684 __ Jump(ip); |
| 681 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 685 DCHECK_EQ(LeaveRC, i.OutputRCBit()); |
| 682 break; | 686 break; |
| 683 } | 687 } |
| 684 case kArchLazyBailout: { | 688 case kArchLazyBailout: { |
| 689 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( |
| 690 masm()); |
| 685 EnsureSpaceForLazyDeopt(); | 691 EnsureSpaceForLazyDeopt(); |
| 686 RecordCallPosition(instr); | 692 RecordCallPosition(instr); |
| 687 break; | 693 break; |
| 688 } | 694 } |
| 689 case kArchPrepareCallCFunction: { | 695 case kArchPrepareCallCFunction: { |
| 690 int const num_parameters = MiscField::decode(instr->opcode()); | 696 int const num_parameters = MiscField::decode(instr->opcode()); |
| 691 __ PrepareCallCFunction(num_parameters, kScratchReg); | 697 __ PrepareCallCFunction(num_parameters, kScratchReg); |
| 692 break; | 698 break; |
| 693 } | 699 } |
| 694 case kArchCallCFunction: { | 700 case kArchCallCFunction: { |
| (...skipping 1000 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1695 void CodeGenerator::EnsureSpaceForLazyDeopt() { | 1701 void CodeGenerator::EnsureSpaceForLazyDeopt() { |
| 1696 if (!info()->ShouldEnsureSpaceForLazyDeopt()) { | 1702 if (!info()->ShouldEnsureSpaceForLazyDeopt()) { |
| 1697 return; | 1703 return; |
| 1698 } | 1704 } |
| 1699 | 1705 |
| 1700 int space_needed = Deoptimizer::patch_size(); | 1706 int space_needed = Deoptimizer::patch_size(); |
| 1701 // Ensure that we have enough space after the previous lazy-bailout | 1707 // Ensure that we have enough space after the previous lazy-bailout |
| 1702 // instruction for patching the code here. | 1708 // instruction for patching the code here. |
| 1703 int current_pc = masm()->pc_offset(); | 1709 int current_pc = masm()->pc_offset(); |
| 1704 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 1710 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 1711 // Block tramoline pool emission for duration of padding. |
| 1712 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( |
| 1713 masm()); |
| 1705 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1714 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 1706 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize); | 1715 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize); |
| 1707 while (padding_size > 0) { | 1716 while (padding_size > 0) { |
| 1708 __ nop(); | 1717 __ nop(); |
| 1709 padding_size -= v8::internal::Assembler::kInstrSize; | 1718 padding_size -= v8::internal::Assembler::kInstrSize; |
| 1710 } | 1719 } |
| 1711 } | 1720 } |
| 1712 } | 1721 } |
| 1713 | 1722 |
| 1714 #undef __ | 1723 #undef __ |
| 1715 | 1724 |
| 1716 } // namespace compiler | 1725 } // namespace compiler |
| 1717 } // namespace internal | 1726 } // namespace internal |
| 1718 } // namespace v8 | 1727 } // namespace v8 |
| OLD | NEW |