 Chromium Code Reviews
 Chromium Code Reviews Issue 2542403002:
  MIPS: Use JIC/JIALC offset when possible  (Closed)
    
  
    Issue 2542403002:
  MIPS: Use JIC/JIALC offset when possible  (Closed) 
  | OLD | NEW | 
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 
| 6 | 6 | 
| 7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS | 
| 8 | 8 | 
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" | 
| 10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" | 
| (...skipping 3582 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3593 DCHECK(offset == 0); | 3593 DCHECK(offset == 0); | 
| 3594 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3594 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 
| 3595 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); | 3595 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); | 
| 3596 } else { | 3596 } else { | 
| 3597 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); | 3597 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); | 
| 3598 } | 3598 } | 
| 3599 } | 3599 } | 
| 3600 return false; | 3600 return false; | 
| 3601 } | 3601 } | 
| 3602 | 3602 | 
| 3603 void MacroAssembler::Jump(Register target, int16_t offset, Condition cond, | |
| 3604 Register rs, const Operand& rt, BranchDelaySlot bd) { | |
| 3605 BlockTrampolinePoolScope block_trampoline_pool(this); | |
| 3606 DCHECK(is_int16(offset)); | |
| 
dusan.simicic
2017/04/25 11:24:10
'offset' always fits in int16 in this method, DCHE
 | |
| 3607 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | |
| 3608 if (cond == cc_always) { | |
| 3609 jic(target, offset); | |
| 3610 } else { | |
| 3611 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3612 Branch(2, NegateCondition(cond), rs, rt); | |
| 3613 jic(target, offset); | |
| 3614 } | |
| 3615 } else { | |
| 3616 if (offset != 0) { | |
| 3617 Addu(target, target, offset); | |
| 3618 } | |
| 3619 if (cond == cc_always) { | |
| 3620 jr(target); | |
| 3621 } else { | |
| 3622 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3623 Branch(2, NegateCondition(cond), rs, rt); | |
| 3624 jr(target); | |
| 3625 } | |
| 3626 // Emit a nop in the branch delay slot if required. | |
| 3627 if (bd == PROTECT) nop(); | |
| 3628 } | |
| 3629 } | |
| 3603 | 3630 | 
| 3604 void MacroAssembler::Jump(Register target, | 3631 void MacroAssembler::Jump(Register target, Register base, int16_t offset, | 
| 3605 Condition cond, | 3632 Condition cond, Register rs, const Operand& rt, | 
| 3606 Register rs, | |
| 3607 const Operand& rt, | |
| 3608 BranchDelaySlot bd) { | 3633 BranchDelaySlot bd) { | 
| 3634 DCHECK(is_int16(offset)); | |
| 
dusan.simicic
2017/04/25 11:24:10
Same comment as above.
 | |
| 3609 BlockTrampolinePoolScope block_trampoline_pool(this); | 3635 BlockTrampolinePoolScope block_trampoline_pool(this); | 
| 3610 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3636 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 
| 3611 if (cond == cc_always) { | 3637 if (cond == cc_always) { | 
| 3612 jic(target, 0); | 3638 jic(base, offset); | 
| 3613 } else { | 3639 } else { | 
| 3614 BRANCH_ARGS_CHECK(cond, rs, rt); | 3640 BRANCH_ARGS_CHECK(cond, rs, rt); | 
| 3615 Branch(2, NegateCondition(cond), rs, rt); | 3641 Branch(2, NegateCondition(cond), rs, rt); | 
| 3616 jic(target, 0); | 3642 jic(base, offset); | 
| 3617 } | 3643 } | 
| 3618 } else { | 3644 } else { | 
| 3645 if (offset != 0) { | |
| 3646 Addu(target, base, offset); | |
| 3647 } else { // Call through target | |
| 3648 if (!target.is(base)) mov(target, base); | |
| 3649 } | |
| 3619 if (cond == cc_always) { | 3650 if (cond == cc_always) { | 
| 3620 jr(target); | 3651 jr(target); | 
| 3621 } else { | 3652 } else { | 
| 3653 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3654 Branch(2, NegateCondition(cond), rs, rt); | |
| 3655 jr(target); | |
| 3656 } | |
| 3657 // Emit a nop in the branch delay slot if required. | |
| 3658 if (bd == PROTECT) nop(); | |
| 3659 } | |
| 3660 } | |
| 3661 | |
| 3662 void MacroAssembler::Jump(Register target, const Operand& offset, | |
| 3663 Condition cond, Register rs, const Operand& rt, | |
| 3664 BranchDelaySlot bd) { | |
| 3665 BlockTrampolinePoolScope block_trampoline_pool(this); | |
| 3666 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && | |
| 3667 !is_int16(offset.immediate())) { | |
| 3668 uint32_t aui_offset, jic_offset; | |
| 3669 Assembler::UnpackTargetAddressUnsigned(offset.immediate(), aui_offset, | |
| 3670 jic_offset); | |
| 3671 RecordRelocInfo(RelocInfo::EXTERNAL_REFERENCE, offset.immediate()); | |
| 3672 aui(target, target, aui_offset); | |
| 3673 if (cond == cc_always) { | |
| 3674 jic(target, jic_offset); | |
| 3675 } else { | |
| 3676 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3677 Branch(2, NegateCondition(cond), rs, rt); | |
| 3678 jic(target, jic_offset); | |
| 3679 } | |
| 3680 } else { | |
| 3681 if (offset.immediate() != 0) { | |
| 3682 Addu(target, target, offset); | |
| 3683 } | |
| 3684 if (cond == cc_always) { | |
| 3685 jr(target); | |
| 3686 } else { | |
| 3622 BRANCH_ARGS_CHECK(cond, rs, rt); | 3687 BRANCH_ARGS_CHECK(cond, rs, rt); | 
| 3623 Branch(2, NegateCondition(cond), rs, rt); | 3688 Branch(2, NegateCondition(cond), rs, rt); | 
| 3624 jr(target); | 3689 jr(target); | 
| 3625 } | 3690 } | 
| 3626 // Emit a nop in the branch delay slot if required. | 3691 // Emit a nop in the branch delay slot if required. | 
| 3627 if (bd == PROTECT) nop(); | 3692 if (bd == PROTECT) nop(); | 
| 3628 } | 3693 } | 
| 3629 } | 3694 } | 
| 3630 | 3695 | 
| 3631 | 3696 | 
| 3632 void MacroAssembler::Jump(intptr_t target, | 3697 void MacroAssembler::Jump(intptr_t target, | 
| 3633 RelocInfo::Mode rmode, | 3698 RelocInfo::Mode rmode, | 
| 3634 Condition cond, | 3699 Condition cond, | 
| 3635 Register rs, | 3700 Register rs, | 
| 3636 const Operand& rt, | 3701 const Operand& rt, | 
| 3637 BranchDelaySlot bd) { | 3702 BranchDelaySlot bd) { | 
| 3703 BlockTrampolinePoolScope block_trampoline_pool(this); | |
| 3638 Label skip; | 3704 Label skip; | 
| 3639 if (cond != cc_always) { | 3705 if (cond != cc_always) { | 
| 3640 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); | 3706 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); | 
| 3641 } | 3707 } | 
| 3642 // The first instruction of 'li' may be placed in the delay slot. | 3708 // The first instruction of 'li' may be placed in the delay slot. | 
| 3643 // This is not an issue, t9 is expected to be clobbered anyway. | 3709 // This is not an issue, t9 is expected to be clobbered anyway. | 
| 
dusan.simicic
2017/04/25 11:24:10
This comment is related to 'else' block now. Maybe
 | |
| 3644 li(t9, Operand(target, rmode)); | 3710 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 
| 3645 Jump(t9, al, zero_reg, Operand(zero_reg), bd); | 3711 uint32_t lui_offset, jic_offset; | 
| 3712 UnpackTargetAddressUnsigned(target, lui_offset, jic_offset); | |
| 3713 DCHECK(MustUseReg(rmode)); | |
| 3714 RecordRelocInfo(rmode, target); | |
| 3715 lui(t9, lui_offset); | |
| 3716 Jump(t9, jic_offset, al, zero_reg, Operand(zero_reg), bd); | |
| 3717 } else { | |
| 3718 li(t9, Operand(target, rmode)); | |
| 3719 Jump(t9, 0, al, zero_reg, Operand(zero_reg), bd); | |
| 3720 } | |
| 3646 bind(&skip); | 3721 bind(&skip); | 
| 3647 } | 3722 } | 
| 3648 | 3723 | 
| 3649 | 3724 | 
| 3650 void MacroAssembler::Jump(Address target, | 3725 void MacroAssembler::Jump(Address target, | 
| 3651 RelocInfo::Mode rmode, | 3726 RelocInfo::Mode rmode, | 
| 3652 Condition cond, | 3727 Condition cond, | 
| 3653 Register rs, | 3728 Register rs, | 
| 3654 const Operand& rt, | 3729 const Operand& rt, | 
| 3655 BranchDelaySlot bd) { | 3730 BranchDelaySlot bd) { | 
| 3656 DCHECK(!RelocInfo::IsCodeTarget(rmode)); | 3731 DCHECK(!RelocInfo::IsCodeTarget(rmode)); | 
| 3657 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd); | 3732 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd); | 
| 3658 } | 3733 } | 
| 3659 | 3734 | 
| 3660 | 3735 | 
| 3661 void MacroAssembler::Jump(Handle<Code> code, | 3736 void MacroAssembler::Jump(Handle<Code> code, | 
| 3662 RelocInfo::Mode rmode, | 3737 RelocInfo::Mode rmode, | 
| 3663 Condition cond, | 3738 Condition cond, | 
| 3664 Register rs, | 3739 Register rs, | 
| 3665 const Operand& rt, | 3740 const Operand& rt, | 
| 3666 BranchDelaySlot bd) { | 3741 BranchDelaySlot bd) { | 
| 3667 DCHECK(RelocInfo::IsCodeTarget(rmode)); | 3742 DCHECK(RelocInfo::IsCodeTarget(rmode)); | 
| 3668 AllowDeferredHandleDereference embedding_raw_address; | 3743 AllowDeferredHandleDereference embedding_raw_address; | 
| 3669 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd); | 3744 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd); | 
| 3670 } | 3745 } | 
| 3671 | 3746 | 
| 3672 | 3747 int MacroAssembler::CallSize(Register target, int16_t offset, Condition cond, | 
| 3673 int MacroAssembler::CallSize(Register target, | 3748 Register rs, const Operand& rt, | 
| 3674 Condition cond, | |
| 3675 Register rs, | |
| 3676 const Operand& rt, | |
| 3677 BranchDelaySlot bd) { | 3749 BranchDelaySlot bd) { | 
| 3678 int size = 0; | 3750 int size = 0; | 
| 3679 | 3751 | 
| 3680 if (cond == cc_always) { | 3752 if (cond == cc_always) { | 
| 3681 size += 1; | 3753 size += 1; | 
| 3682 } else { | 3754 } else { | 
| 3683 size += 3; | 3755 size += 3; | 
| 3684 } | 3756 } | 
| 3685 | 3757 | 
| 3686 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; | 3758 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; | 
| 3687 | 3759 | 
| 3760 if (!IsMipsArchVariant(kMips32r6) && offset != 0) { | |
| 3761 size += 1; | |
| 3762 } | |
| 3763 | |
| 3688 return size * kInstrSize; | 3764 return size * kInstrSize; | 
| 3689 } | 3765 } | 
| 3690 | 3766 | 
| 3691 | 3767 | 
| 3692 // Note: To call gcc-compiled C code on mips, you must call thru t9. | 3768 // Note: To call gcc-compiled C code on mips, you must call thru t9. | 
| 3693 void MacroAssembler::Call(Register target, | 3769 void MacroAssembler::Call(Register target, int16_t offset, Condition cond, | 
| 3694 Condition cond, | 3770 Register rs, const Operand& rt, BranchDelaySlot bd) { | 
| 3695 Register rs, | 3771 DCHECK(is_int16(offset)); | 
| 3696 const Operand& rt, | |
| 3697 BranchDelaySlot bd) { | |
| 3698 #ifdef DEBUG | 3772 #ifdef DEBUG | 
| 3699 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; | 3773 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; | 
| 3700 #endif | 3774 #endif | 
| 3775 | |
| 3776 BlockTrampolinePoolScope block_trampoline_pool(this); | |
| 3777 Label start; | |
| 3778 bind(&start); | |
| 3779 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | |
| 3780 if (cond == cc_always) { | |
| 3781 jialc(target, offset); | |
| 3782 } else { | |
| 3783 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3784 Branch(2, NegateCondition(cond), rs, rt); | |
| 3785 jialc(target, offset); | |
| 3786 } | |
| 3787 } else { | |
| 3788 if (offset != 0) { | |
| 3789 Addu(target, target, offset); | |
| 3790 } | |
| 3791 if (cond == cc_always) { | |
| 3792 jalr(target); | |
| 3793 } else { | |
| 3794 BRANCH_ARGS_CHECK(cond, rs, rt); | |
| 3795 Branch(2, NegateCondition(cond), rs, rt); | |
| 3796 jalr(target); | |
| 3797 } | |
| 3798 // Emit a nop in the branch delay slot if required. | |
| 3799 if (bd == PROTECT) nop(); | |
| 3800 } | |
| 3801 | |
| 3802 #ifdef DEBUG | |
| 3803 CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd), | |
| 3804 SizeOfCodeGeneratedSince(&start)); | |
| 3805 #endif | |
| 3806 } | |
| 3807 | |
| 3808 // Note: To call gcc-compiled C code on mips, you must call thru t9. | |
| 3809 void MacroAssembler::Call(Register target, Register base, int16_t offset, | |
| 3810 Condition cond, Register rs, const Operand& rt, | |
| 3811 BranchDelaySlot bd) { | |
| 3812 DCHECK(is_uint16(offset)); | |
| 3813 #ifdef DEBUG | |
| 3814 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; | |
| 3815 #endif | |
| 3701 | 3816 | 
| 3702 BlockTrampolinePoolScope block_trampoline_pool(this); | 3817 BlockTrampolinePoolScope block_trampoline_pool(this); | 
| 3703 Label start; | 3818 Label start; | 
| 3704 bind(&start); | 3819 bind(&start); | 
| 3705 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3820 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 
| 3706 if (cond == cc_always) { | 3821 if (cond == cc_always) { | 
| 3707 jialc(target, 0); | 3822 jialc(base, offset); | 
| 3708 } else { | 3823 } else { | 
| 3709 BRANCH_ARGS_CHECK(cond, rs, rt); | 3824 BRANCH_ARGS_CHECK(cond, rs, rt); | 
| 3710 Branch(2, NegateCondition(cond), rs, rt); | 3825 Branch(2, NegateCondition(cond), rs, rt); | 
| 3711 jialc(target, 0); | 3826 jialc(base, offset); | 
| 3712 } | 3827 } | 
| 3713 } else { | 3828 } else { | 
| 3829 if (offset != 0) { | |
| 3830 Addu(target, base, offset); | |
| 3831 } else { // Call through target | |
| 3832 if (!target.is(base)) mov(target, base); | |
| 3833 } | |
| 3714 if (cond == cc_always) { | 3834 if (cond == cc_always) { | 
| 3715 jalr(target); | 3835 jalr(target); | 
| 3716 } else { | 3836 } else { | 
| 3717 BRANCH_ARGS_CHECK(cond, rs, rt); | 3837 BRANCH_ARGS_CHECK(cond, rs, rt); | 
| 3718 Branch(2, NegateCondition(cond), rs, rt); | 3838 Branch(2, NegateCondition(cond), rs, rt); | 
| 3719 jalr(target); | 3839 jalr(target); | 
| 3720 } | 3840 } | 
| 3721 // Emit a nop in the branch delay slot if required. | 3841 // Emit a nop in the branch delay slot if required. | 
| 3722 if (bd == PROTECT) nop(); | 3842 if (bd == PROTECT) nop(); | 
| 3723 } | 3843 } | 
| 3724 | 3844 | 
| 3725 #ifdef DEBUG | 3845 #ifdef DEBUG | 
| 3726 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd), | 3846 CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd), | 
| 3727 SizeOfCodeGeneratedSince(&start)); | 3847 SizeOfCodeGeneratedSince(&start)); | 
| 3728 #endif | 3848 #endif | 
| 3729 } | 3849 } | 
| 3730 | 3850 | 
| 3731 | 3851 | 
| 3732 int MacroAssembler::CallSize(Address target, | 3852 int MacroAssembler::CallSize(Address target, | 
| 3733 RelocInfo::Mode rmode, | 3853 RelocInfo::Mode rmode, | 
| 3734 Condition cond, | 3854 Condition cond, | 
| 3735 Register rs, | 3855 Register rs, | 
| 3736 const Operand& rt, | 3856 const Operand& rt, | 
| 3737 BranchDelaySlot bd) { | 3857 BranchDelaySlot bd) { | 
| 3738 int size = CallSize(t9, cond, rs, rt, bd); | 3858 int size = CallSize(t9, 0, cond, rs, rt, bd); | 
| 3739 return size + 2 * kInstrSize; | 3859 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) | 
| 3860 return size + 1 * kInstrSize; | |
| 3861 else | |
| 3862 return size + 2 * kInstrSize; | |
| 3740 } | 3863 } | 
| 3741 | 3864 | 
| 3742 | 3865 | 
| 3743 void MacroAssembler::Call(Address target, | 3866 void MacroAssembler::Call(Address target, | 
| 3744 RelocInfo::Mode rmode, | 3867 RelocInfo::Mode rmode, | 
| 3745 Condition cond, | 3868 Condition cond, | 
| 3746 Register rs, | 3869 Register rs, | 
| 3747 const Operand& rt, | 3870 const Operand& rt, | 
| 3748 BranchDelaySlot bd) { | 3871 BranchDelaySlot bd) { | 
| 3872 CheckBuffer(); | |
| 3749 BlockTrampolinePoolScope block_trampoline_pool(this); | 3873 BlockTrampolinePoolScope block_trampoline_pool(this); | 
| 3750 Label start; | 3874 Label start; | 
| 3751 bind(&start); | 3875 bind(&start); | 
| 3752 int32_t target_int = reinterpret_cast<int32_t>(target); | 3876 int32_t target_int = reinterpret_cast<int32_t>(target); | 
| 3753 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); | 3877 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) { | 
| 3754 Call(t9, cond, rs, rt, bd); | 3878 uint32_t lui_offset, jialc_offset; | 
| 3879 UnpackTargetAddressUnsigned(target_int, lui_offset, jialc_offset); | |
| 3880 if (MustUseReg(rmode)) { | |
| 3881 RecordRelocInfo(rmode, target_int); | |
| 3882 } | |
| 3883 lui(t9, lui_offset); | |
| 3884 Call(t9, jialc_offset, cond, rs, rt, bd); | |
| 3885 } else { | |
| 3886 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); | |
| 3887 Call(t9, 0, cond, rs, rt, bd); | |
| 3888 } | |
| 3755 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), | 3889 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), | 
| 3756 SizeOfCodeGeneratedSince(&start)); | 3890 SizeOfCodeGeneratedSince(&start)); | 
| 3757 } | 3891 } | 
| 3758 | 3892 | 
| 3759 | 3893 | 
| 3760 int MacroAssembler::CallSize(Handle<Code> code, | 3894 int MacroAssembler::CallSize(Handle<Code> code, | 
| 3761 RelocInfo::Mode rmode, | 3895 RelocInfo::Mode rmode, | 
| 3762 TypeFeedbackId ast_id, | 3896 TypeFeedbackId ast_id, | 
| 3763 Condition cond, | 3897 Condition cond, | 
| 3764 Register rs, | 3898 Register rs, | 
| (...skipping 24 matching lines...) Expand all Loading... | |
| 3789 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); | 3923 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); | 
| 3790 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), | 3924 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), | 
| 3791 SizeOfCodeGeneratedSince(&start)); | 3925 SizeOfCodeGeneratedSince(&start)); | 
| 3792 } | 3926 } | 
| 3793 | 3927 | 
| 3794 | 3928 | 
| 3795 void MacroAssembler::Ret(Condition cond, | 3929 void MacroAssembler::Ret(Condition cond, | 
| 3796 Register rs, | 3930 Register rs, | 
| 3797 const Operand& rt, | 3931 const Operand& rt, | 
| 3798 BranchDelaySlot bd) { | 3932 BranchDelaySlot bd) { | 
| 3799 Jump(ra, cond, rs, rt, bd); | 3933 Jump(ra, 0, cond, rs, rt, bd); | 
| 3800 } | 3934 } | 
| 3801 | 3935 | 
| 3802 | 3936 | 
| 3803 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { | 3937 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { | 
| 3804 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 3938 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 
| 3805 (!L->is_bound() || is_near_r6(L))) { | 3939 (!L->is_bound() || is_near_r6(L))) { | 
| 3806 BranchShortHelperR6(0, L); | 3940 BranchShortHelperR6(0, L); | 
| 3807 } else { | 3941 } else { | 
| 3808 BlockTrampolinePoolScope block_trampoline_pool(this); | 3942 BlockTrampolinePoolScope block_trampoline_pool(this); | 
| 3809 uint32_t imm32; | 3943 uint32_t imm32; | 
| 3810 imm32 = jump_address(L); | 3944 imm32 = jump_address(L); | 
| 3811 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3945 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 
| 3812 uint32_t lui_offset, jic_offset; | 3946 uint32_t lui_offset, jic_offset; | 
| 3813 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset); | 3947 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset); | 
| 3814 { | 3948 { | 
| 3815 BlockGrowBufferScope block_buf_growth(this); | 3949 BlockGrowBufferScope block_buf_growth(this); | 
| 3816 // Buffer growth (and relocation) must be blocked for internal | 3950 // Buffer growth (and relocation) must be blocked for internal | 
| 3817 // references until associated instructions are emitted and | 3951 // references until associated instructions are emitted and | 
| 3818 // available to be patched. | 3952 // available to be patched. | 
| 3819 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3953 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 
| 3820 lui(at, lui_offset); | 3954 lui(at, lui_offset); | 
| 3821 jic(at, jic_offset); | 3955 jic(at, jic_offset); | 
| 3822 } | 3956 } | 
| 3823 CheckBuffer(); | 3957 CheckBuffer(); | 
| 3824 } else { | 3958 } else { | 
| 3825 { | 3959 { | 
| 3826 BlockGrowBufferScope block_buf_growth(this); | 3960 BlockGrowBufferScope block_buf_growth(this); | 
| 3827 // Buffer growth (and relocation) must be blocked for internal | 3961 // Buffer growth (and relocation) must be blocked for internal | 
| 3828 // references | 3962 // references until associated instructions are emitted and | 
| 3829 // until associated instructions are emitted and available to be | 3963 // available to be patched. | 
| 3830 // patched. | |
| 3831 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3964 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 
| 3832 lui(at, (imm32 & kHiMask) >> kLuiShift); | 3965 lui(at, (imm32 & kHiMask) >> kLuiShift); | 
| 3833 ori(at, at, (imm32 & kImm16Mask)); | 3966 ori(at, at, (imm32 & kImm16Mask)); | 
| 3834 } | 3967 } | 
| 3835 CheckBuffer(); | 3968 CheckBuffer(); | 
| 3836 jr(at); | 3969 jr(at); | 
| 3837 // Emit a nop in the branch delay slot if required. | 3970 // Emit a nop in the branch delay slot if required. | 
| 3838 if (bdslot == PROTECT) nop(); | 3971 if (bdslot == PROTECT) nop(); | 
| 3839 } | 3972 } | 
| 3840 } | 3973 } | 
| 3841 } | 3974 } | 
| 3842 | 3975 | 
| 3843 | 3976 | 
| 3844 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { | 3977 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { | 
| 3845 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 3978 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 
| 3846 (!L->is_bound() || is_near_r6(L))) { | 3979 (!L->is_bound() || is_near_r6(L))) { | 
| 3847 BranchAndLinkShortHelperR6(0, L); | 3980 BranchAndLinkShortHelperR6(0, L); | 
| 3848 } else { | 3981 } else { | 
| 3849 BlockTrampolinePoolScope block_trampoline_pool(this); | 3982 BlockTrampolinePoolScope block_trampoline_pool(this); | 
| 3850 uint32_t imm32; | 3983 uint32_t imm32; | 
| 3851 imm32 = jump_address(L); | 3984 imm32 = jump_address(L); | 
| 3852 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3985 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 
| 3853 uint32_t lui_offset, jic_offset; | 3986 uint32_t lui_offset, jialc_offset; | 
| 3854 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset); | 3987 UnpackTargetAddressUnsigned(imm32, lui_offset, jialc_offset); | 
| 3855 { | 3988 { | 
| 3856 BlockGrowBufferScope block_buf_growth(this); | 3989 BlockGrowBufferScope block_buf_growth(this); | 
| 3857 // Buffer growth (and relocation) must be blocked for internal | 3990 // Buffer growth (and relocation) must be blocked for internal | 
| 3858 // references until associated instructions are emitted and | 3991 // references until associated instructions are emitted and | 
| 3859 // available to be patched. | 3992 // available to be patched. | 
| 3860 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3993 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 
| 3861 lui(at, lui_offset); | 3994 lui(at, lui_offset); | 
| 3862 jialc(at, jic_offset); | 3995 jialc(at, jialc_offset); | 
| 3863 } | 3996 } | 
| 3864 CheckBuffer(); | 3997 CheckBuffer(); | 
| 3865 } else { | 3998 } else { | 
| 3866 { | 3999 { | 
| 3867 BlockGrowBufferScope block_buf_growth(this); | 4000 BlockGrowBufferScope block_buf_growth(this); | 
| 3868 // Buffer growth (and relocation) must be blocked for internal | 4001 // Buffer growth (and relocation) must be blocked for internal | 
| 3869 // references | 4002 // references until associated instructions are emitted and | 
| 3870 // until associated instructions are emitted and available to be | 4003 // available to be patched. | 
| 3871 // patched. | |
| 3872 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 4004 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 
| 3873 lui(at, (imm32 & kHiMask) >> kLuiShift); | 4005 lui(at, (imm32 & kHiMask) >> kLuiShift); | 
| 3874 ori(at, at, (imm32 & kImm16Mask)); | 4006 ori(at, at, (imm32 & kImm16Mask)); | 
| 3875 } | 4007 } | 
| 3876 CheckBuffer(); | 4008 CheckBuffer(); | 
| 3877 jalr(at); | 4009 jalr(at); | 
| 3878 // Emit a nop in the branch delay slot if required. | 4010 // Emit a nop in the branch delay slot if required. | 
| 3879 if (bdslot == PROTECT) nop(); | 4011 if (bdslot == PROTECT) nop(); | 
| 3880 } | 4012 } | 
| 3881 } | 4013 } | 
| (...skipping 2158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6040 | 6172 | 
| 6041 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 6173 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 
| 6042 Register scratch) { | 6174 Register scratch) { | 
| 6043 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 6175 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 
| 6044 } | 6176 } | 
| 6045 | 6177 | 
| 6046 | 6178 | 
| 6047 void MacroAssembler::CallCFunction(ExternalReference function, | 6179 void MacroAssembler::CallCFunction(ExternalReference function, | 
| 6048 int num_reg_arguments, | 6180 int num_reg_arguments, | 
| 6049 int num_double_arguments) { | 6181 int num_double_arguments) { | 
| 6050 li(t8, Operand(function)); | 6182 if (IsMipsArchVariant(kMips32r6)) { | 
| 6051 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments); | 6183 uint32_t lui_offset, jialc_offset; | 
| 6184 UnpackTargetAddressUnsigned(Operand(function).immediate(), lui_offset, | |
| 6185 jialc_offset); | |
| 6186 if (MustUseReg(Operand(function).rmode())) { | |
| 6187 RecordRelocInfo(Operand(function).rmode(), Operand(function).immediate()); | |
| 6188 } | |
| 6189 lui(t9, lui_offset); | |
| 6190 CallCFunctionHelper(t9, jialc_offset, num_reg_arguments, | |
| 6191 num_double_arguments); | |
| 6192 } else { | |
| 6193 li(t9, Operand(function)); | |
| 6194 CallCFunctionHelper(t9, 0, num_reg_arguments, num_double_arguments); | |
| 6195 } | |
| 6052 } | 6196 } | 
| 6053 | 6197 | 
| 6054 | 6198 | 
| 6055 void MacroAssembler::CallCFunction(Register function, | 6199 void MacroAssembler::CallCFunction(Register function, | 
| 6056 int num_reg_arguments, | 6200 int num_reg_arguments, | 
| 6057 int num_double_arguments) { | 6201 int num_double_arguments) { | 
| 6058 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); | 6202 CallCFunctionHelper(function, 0, num_reg_arguments, num_double_arguments); | 
| 6059 } | 6203 } | 
| 6060 | 6204 | 
| 6061 | 6205 | 
| 6062 void MacroAssembler::CallCFunction(ExternalReference function, | 6206 void MacroAssembler::CallCFunction(ExternalReference function, | 
| 6063 int num_arguments) { | 6207 int num_arguments) { | 
| 6064 CallCFunction(function, num_arguments, 0); | 6208 CallCFunction(function, num_arguments, 0); | 
| 6065 } | 6209 } | 
| 6066 | 6210 | 
| 6067 | 6211 | 
| 6068 void MacroAssembler::CallCFunction(Register function, | 6212 void MacroAssembler::CallCFunction(Register function, | 
| 6069 int num_arguments) { | 6213 int num_arguments) { | 
| 6070 CallCFunction(function, num_arguments, 0); | 6214 CallCFunction(function, num_arguments, 0); | 
| 6071 } | 6215 } | 
| 6072 | 6216 | 
| 6073 | 6217 void MacroAssembler::CallCFunctionHelper(Register function_base, | 
| 6074 void MacroAssembler::CallCFunctionHelper(Register function, | 6218 int16_t function_offset, | 
| 6075 int num_reg_arguments, | 6219 int num_reg_arguments, | 
| 6076 int num_double_arguments) { | 6220 int num_double_arguments) { | 
| 6077 DCHECK(has_frame()); | 6221 DCHECK(has_frame()); | 
| 6078 // Make sure that the stack is aligned before calling a C function unless | 6222 // Make sure that the stack is aligned before calling a C function unless | 
| 6079 // running in the simulator. The simulator has its own alignment check which | 6223 // running in the simulator. The simulator has its own alignment check which | 
| 6080 // provides more information. | 6224 // provides more information. | 
| 6081 // The argument stots are presumed to have been set up by | 6225 // The argument stots are presumed to have been set up by | 
| 6082 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. | 6226 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. | 
| 6083 | 6227 | 
| 6084 #if V8_HOST_ARCH_MIPS | 6228 #if V8_HOST_ARCH_MIPS | 
| (...skipping 10 matching lines...) Expand all Loading... | |
| 6095 stop("Unexpected alignment in CallCFunction"); | 6239 stop("Unexpected alignment in CallCFunction"); | 
| 6096 bind(&alignment_as_expected); | 6240 bind(&alignment_as_expected); | 
| 6097 } | 6241 } | 
| 6098 } | 6242 } | 
| 6099 #endif // V8_HOST_ARCH_MIPS | 6243 #endif // V8_HOST_ARCH_MIPS | 
| 6100 | 6244 | 
| 6101 // Just call directly. The function called cannot cause a GC, or | 6245 // Just call directly. The function called cannot cause a GC, or | 
| 6102 // allow preemption, so the return address in the link register | 6246 // allow preemption, so the return address in the link register | 
| 6103 // stays correct. | 6247 // stays correct. | 
| 6104 | 6248 | 
| 6105 if (!function.is(t9)) { | 6249 if (!function_base.is(t9)) { | 
| 6106 mov(t9, function); | 6250 mov(t9, function_base); | 
| 6107 function = t9; | 6251 function_base = t9; | 
| 6108 } | 6252 } | 
| 6109 | 6253 | 
| 6110 Call(function); | 6254 Call(function_base, function_offset); | 
| 6111 | 6255 | 
| 6112 int stack_passed_arguments = CalculateStackPassedWords( | 6256 int stack_passed_arguments = CalculateStackPassedWords( | 
| 6113 num_reg_arguments, num_double_arguments); | 6257 num_reg_arguments, num_double_arguments); | 
| 6114 | 6258 | 
| 6115 if (base::OS::ActivationFrameAlignment() > kPointerSize) { | 6259 if (base::OS::ActivationFrameAlignment() > kPointerSize) { | 
| 6116 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 6260 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 
| 6117 } else { | 6261 } else { | 
| 6118 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); | 6262 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); | 
| 6119 } | 6263 } | 
| 6120 } | 6264 } | 
| (...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6435 } | 6579 } | 
| 6436 | 6580 | 
| 6437 | 6581 | 
| 6438 CodePatcher::~CodePatcher() { | 6582 CodePatcher::~CodePatcher() { | 
| 6439 // Indicate that code has changed. | 6583 // Indicate that code has changed. | 
| 6440 if (flush_cache_ == FLUSH) { | 6584 if (flush_cache_ == FLUSH) { | 
| 6441 Assembler::FlushICache(masm_.isolate(), address_, size_); | 6585 Assembler::FlushICache(masm_.isolate(), address_, size_); | 
| 6442 } | 6586 } | 
| 6443 | 6587 | 
| 6444 // Check that the code was patched as expected. | 6588 // Check that the code was patched as expected. | 
| 6589 | |
| 6445 DCHECK(masm_.pc_ == address_ + size_); | 6590 DCHECK(masm_.pc_ == address_ + size_); | 
| 6446 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 6591 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
| 6447 } | 6592 } | 
| 6448 | 6593 | 
| 6449 | 6594 | 
| 6450 void CodePatcher::Emit(Instr instr) { | 6595 void CodePatcher::Emit(Instr instr) { | 
| 6451 masm()->emit(instr); | 6596 masm()->emit(instr); | 
| 6452 } | 6597 } | 
| 6453 | 6598 | 
| 6454 | 6599 | 
| (...skipping 29 matching lines...) Expand all Loading... | |
| 6484 if (mag.shift > 0) sra(result, result, mag.shift); | 6629 if (mag.shift > 0) sra(result, result, mag.shift); | 
| 6485 srl(at, dividend, 31); | 6630 srl(at, dividend, 31); | 
| 6486 Addu(result, result, Operand(at)); | 6631 Addu(result, result, Operand(at)); | 
| 6487 } | 6632 } | 
| 6488 | 6633 | 
| 6489 | 6634 | 
| 6490 } // namespace internal | 6635 } // namespace internal | 
| 6491 } // namespace v8 | 6636 } // namespace v8 | 
| 6492 | 6637 | 
| 6493 #endif // V8_TARGET_ARCH_MIPS | 6638 #endif // V8_TARGET_ARCH_MIPS | 
| OLD | NEW |