| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
| (...skipping 3649 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3660 DCHECK(offset == 0); | 3660 DCHECK(offset == 0); |
| 3661 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3661 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
| 3662 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); | 3662 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); |
| 3663 } else { | 3663 } else { |
| 3664 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); | 3664 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); |
| 3665 } | 3665 } |
| 3666 } | 3666 } |
| 3667 return false; | 3667 return false; |
| 3668 } | 3668 } |
| 3669 | 3669 |
| 3670 | 3670 void MacroAssembler::Jump(Register target, int16_t offset, Condition cond, |
| 3671 void MacroAssembler::Jump(Register target, | 3671 Register rs, const Operand& rt, BranchDelaySlot bd) { |
| 3672 Condition cond, | 3672 DCHECK(is_int16(offset)); |
| 3673 Register rs, | |
| 3674 const Operand& rt, | |
| 3675 BranchDelaySlot bd) { | |
| 3676 BlockTrampolinePoolScope block_trampoline_pool(this); | 3673 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3677 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3674 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
| 3678 if (cond == cc_always) { | 3675 if (cond == cc_always) { |
| 3679 jic(target, 0); | 3676 jic(target, offset); |
| 3680 } else { | 3677 } else { |
| 3681 BRANCH_ARGS_CHECK(cond, rs, rt); | 3678 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3682 Branch(2, NegateCondition(cond), rs, rt); | 3679 Branch(2, NegateCondition(cond), rs, rt); |
| 3683 jic(target, 0); | 3680 jic(target, offset); |
| 3684 } | 3681 } |
| 3685 } else { | 3682 } else { |
| 3683 if (offset != 0) { |
| 3684 Addu(target, target, offset); |
| 3685 } |
| 3686 if (cond == cc_always) { | 3686 if (cond == cc_always) { |
| 3687 jr(target); | 3687 jr(target); |
| 3688 } else { | 3688 } else { |
| 3689 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3690 Branch(2, NegateCondition(cond), rs, rt); |
| 3691 jr(target); |
| 3692 } |
| 3693 // Emit a nop in the branch delay slot if required. |
| 3694 if (bd == PROTECT) nop(); |
| 3695 } |
| 3696 } |
| 3697 |
| 3698 void MacroAssembler::Jump(Register target, Register base, int16_t offset, |
| 3699 Condition cond, Register rs, const Operand& rt, |
| 3700 BranchDelaySlot bd) { |
| 3701 DCHECK(is_int16(offset)); |
| 3702 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3703 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
| 3704 if (cond == cc_always) { |
| 3705 jic(base, offset); |
| 3706 } else { |
| 3707 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3708 Branch(2, NegateCondition(cond), rs, rt); |
| 3709 jic(base, offset); |
| 3710 } |
| 3711 } else { |
| 3712 if (offset != 0) { |
| 3713 Addu(target, base, offset); |
| 3714 } else { // Call through target |
| 3715 if (!target.is(base)) mov(target, base); |
| 3716 } |
| 3717 if (cond == cc_always) { |
| 3718 jr(target); |
| 3719 } else { |
| 3720 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3721 Branch(2, NegateCondition(cond), rs, rt); |
| 3722 jr(target); |
| 3723 } |
| 3724 // Emit a nop in the branch delay slot if required. |
| 3725 if (bd == PROTECT) nop(); |
| 3726 } |
| 3727 } |
| 3728 |
| 3729 void MacroAssembler::Jump(Register target, const Operand& offset, |
| 3730 Condition cond, Register rs, const Operand& rt, |
| 3731 BranchDelaySlot bd) { |
| 3732 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3733 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && |
| 3734 !is_int16(offset.immediate())) { |
| 3735 uint32_t aui_offset, jic_offset; |
| 3736 Assembler::UnpackTargetAddressUnsigned(offset.immediate(), aui_offset, |
| 3737 jic_offset); |
| 3738 RecordRelocInfo(RelocInfo::EXTERNAL_REFERENCE, offset.immediate()); |
| 3739 aui(target, target, aui_offset); |
| 3740 if (cond == cc_always) { |
| 3741 jic(target, jic_offset); |
| 3742 } else { |
| 3743 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3744 Branch(2, NegateCondition(cond), rs, rt); |
| 3745 jic(target, jic_offset); |
| 3746 } |
| 3747 } else { |
| 3748 if (offset.immediate() != 0) { |
| 3749 Addu(target, target, offset); |
| 3750 } |
| 3751 if (cond == cc_always) { |
| 3752 jr(target); |
| 3753 } else { |
| 3689 BRANCH_ARGS_CHECK(cond, rs, rt); | 3754 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3690 Branch(2, NegateCondition(cond), rs, rt); | 3755 Branch(2, NegateCondition(cond), rs, rt); |
| 3691 jr(target); | 3756 jr(target); |
| 3692 } | 3757 } |
| 3693 // Emit a nop in the branch delay slot if required. | 3758 // Emit a nop in the branch delay slot if required. |
| 3694 if (bd == PROTECT) nop(); | 3759 if (bd == PROTECT) nop(); |
| 3695 } | 3760 } |
| 3696 } | 3761 } |
| 3697 | 3762 |
| 3698 | 3763 |
| 3699 void MacroAssembler::Jump(intptr_t target, | 3764 void MacroAssembler::Jump(intptr_t target, |
| 3700 RelocInfo::Mode rmode, | 3765 RelocInfo::Mode rmode, |
| 3701 Condition cond, | 3766 Condition cond, |
| 3702 Register rs, | 3767 Register rs, |
| 3703 const Operand& rt, | 3768 const Operand& rt, |
| 3704 BranchDelaySlot bd) { | 3769 BranchDelaySlot bd) { |
| 3705 Label skip; | 3770 Label skip; |
| 3706 if (cond != cc_always) { | 3771 if (cond != cc_always) { |
| 3707 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); | 3772 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); |
| 3708 } | 3773 } |
| 3709 // The first instruction of 'li' may be placed in the delay slot. | 3774 // The first instruction of 'li' may be placed in the delay slot. |
| 3710 // This is not an issue, t9 is expected to be clobbered anyway. | 3775 // This is not an issue, t9 is expected to be clobbered anyway. |
| 3711 li(t9, Operand(target, rmode)); | 3776 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
| 3712 Jump(t9, al, zero_reg, Operand(zero_reg), bd); | 3777 uint32_t lui_offset, jic_offset; |
| 3778 UnpackTargetAddressUnsigned(target, lui_offset, jic_offset); |
| 3779 if (MustUseReg(rmode)) { |
| 3780 RecordRelocInfo(rmode, target); |
| 3781 } |
| 3782 lui(t9, lui_offset); |
| 3783 Jump(t9, jic_offset, al, zero_reg, Operand(zero_reg), bd); |
| 3784 } else { |
| 3785 li(t9, Operand(target, rmode)); |
| 3786 Jump(t9, 0, al, zero_reg, Operand(zero_reg), bd); |
| 3787 } |
| 3713 bind(&skip); | 3788 bind(&skip); |
| 3714 } | 3789 } |
| 3715 | 3790 |
| 3716 | 3791 |
| 3717 void MacroAssembler::Jump(Address target, | 3792 void MacroAssembler::Jump(Address target, |
| 3718 RelocInfo::Mode rmode, | 3793 RelocInfo::Mode rmode, |
| 3719 Condition cond, | 3794 Condition cond, |
| 3720 Register rs, | 3795 Register rs, |
| 3721 const Operand& rt, | 3796 const Operand& rt, |
| 3722 BranchDelaySlot bd) { | 3797 BranchDelaySlot bd) { |
| 3723 DCHECK(!RelocInfo::IsCodeTarget(rmode)); | 3798 DCHECK(!RelocInfo::IsCodeTarget(rmode)); |
| 3724 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd); | 3799 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd); |
| 3725 } | 3800 } |
| 3726 | 3801 |
| 3727 | 3802 |
| 3728 void MacroAssembler::Jump(Handle<Code> code, | 3803 void MacroAssembler::Jump(Handle<Code> code, |
| 3729 RelocInfo::Mode rmode, | 3804 RelocInfo::Mode rmode, |
| 3730 Condition cond, | 3805 Condition cond, |
| 3731 Register rs, | 3806 Register rs, |
| 3732 const Operand& rt, | 3807 const Operand& rt, |
| 3733 BranchDelaySlot bd) { | 3808 BranchDelaySlot bd) { |
| 3734 DCHECK(RelocInfo::IsCodeTarget(rmode)); | 3809 DCHECK(RelocInfo::IsCodeTarget(rmode)); |
| 3735 AllowDeferredHandleDereference embedding_raw_address; | 3810 AllowDeferredHandleDereference embedding_raw_address; |
| 3736 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd); | 3811 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd); |
| 3737 } | 3812 } |
| 3738 | 3813 |
| 3739 | 3814 int MacroAssembler::CallSize(Register target, int16_t offset, Condition cond, |
| 3740 int MacroAssembler::CallSize(Register target, | 3815 Register rs, const Operand& rt, |
| 3741 Condition cond, | |
| 3742 Register rs, | |
| 3743 const Operand& rt, | |
| 3744 BranchDelaySlot bd) { | 3816 BranchDelaySlot bd) { |
| 3745 int size = 0; | 3817 int size = 0; |
| 3746 | 3818 |
| 3747 if (cond == cc_always) { | 3819 if (cond == cc_always) { |
| 3748 size += 1; | 3820 size += 1; |
| 3749 } else { | 3821 } else { |
| 3750 size += 3; | 3822 size += 3; |
| 3751 } | 3823 } |
| 3752 | 3824 |
| 3753 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; | 3825 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; |
| 3754 | 3826 |
| 3827 if (!IsMipsArchVariant(kMips32r6) && offset != 0) { |
| 3828 size += 1; |
| 3829 } |
| 3830 |
| 3755 return size * kInstrSize; | 3831 return size * kInstrSize; |
| 3756 } | 3832 } |
| 3757 | 3833 |
| 3758 | 3834 |
| 3759 // Note: To call gcc-compiled C code on mips, you must call thru t9. | 3835 // Note: To call gcc-compiled C code on mips, you must call thru t9. |
| 3760 void MacroAssembler::Call(Register target, | 3836 void MacroAssembler::Call(Register target, int16_t offset, Condition cond, |
| 3761 Condition cond, | 3837 Register rs, const Operand& rt, BranchDelaySlot bd) { |
| 3762 Register rs, | 3838 DCHECK(is_int16(offset)); |
| 3763 const Operand& rt, | |
| 3764 BranchDelaySlot bd) { | |
| 3765 #ifdef DEBUG | 3839 #ifdef DEBUG |
| 3766 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; | 3840 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
| 3767 #endif | 3841 #endif |
| 3842 |
| 3843 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3844 Label start; |
| 3845 bind(&start); |
| 3846 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
| 3847 if (cond == cc_always) { |
| 3848 jialc(target, offset); |
| 3849 } else { |
| 3850 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3851 Branch(2, NegateCondition(cond), rs, rt); |
| 3852 jialc(target, offset); |
| 3853 } |
| 3854 } else { |
| 3855 if (offset != 0) { |
| 3856 Addu(target, target, offset); |
| 3857 } |
| 3858 if (cond == cc_always) { |
| 3859 jalr(target); |
| 3860 } else { |
| 3861 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3862 Branch(2, NegateCondition(cond), rs, rt); |
| 3863 jalr(target); |
| 3864 } |
| 3865 // Emit a nop in the branch delay slot if required. |
| 3866 if (bd == PROTECT) nop(); |
| 3867 } |
| 3868 |
| 3869 #ifdef DEBUG |
| 3870 CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd), |
| 3871 SizeOfCodeGeneratedSince(&start)); |
| 3872 #endif |
| 3873 } |
| 3874 |
| 3875 // Note: To call gcc-compiled C code on mips, you must call thru t9. |
| 3876 void MacroAssembler::Call(Register target, Register base, int16_t offset, |
| 3877 Condition cond, Register rs, const Operand& rt, |
| 3878 BranchDelaySlot bd) { |
| 3879 DCHECK(is_uint16(offset)); |
| 3880 #ifdef DEBUG |
| 3881 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
| 3882 #endif |
| 3768 | 3883 |
| 3769 BlockTrampolinePoolScope block_trampoline_pool(this); | 3884 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3770 Label start; | 3885 Label start; |
| 3771 bind(&start); | 3886 bind(&start); |
| 3772 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3887 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
| 3773 if (cond == cc_always) { | 3888 if (cond == cc_always) { |
| 3774 jialc(target, 0); | 3889 jialc(base, offset); |
| 3775 } else { | 3890 } else { |
| 3776 BRANCH_ARGS_CHECK(cond, rs, rt); | 3891 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3777 Branch(2, NegateCondition(cond), rs, rt); | 3892 Branch(2, NegateCondition(cond), rs, rt); |
| 3778 jialc(target, 0); | 3893 jialc(base, offset); |
| 3779 } | 3894 } |
| 3780 } else { | 3895 } else { |
| 3896 if (offset != 0) { |
| 3897 Addu(target, base, offset); |
| 3898 } else { // Call through target |
| 3899 if (!target.is(base)) mov(target, base); |
| 3900 } |
| 3781 if (cond == cc_always) { | 3901 if (cond == cc_always) { |
| 3782 jalr(target); | 3902 jalr(target); |
| 3783 } else { | 3903 } else { |
| 3784 BRANCH_ARGS_CHECK(cond, rs, rt); | 3904 BRANCH_ARGS_CHECK(cond, rs, rt); |
| 3785 Branch(2, NegateCondition(cond), rs, rt); | 3905 Branch(2, NegateCondition(cond), rs, rt); |
| 3786 jalr(target); | 3906 jalr(target); |
| 3787 } | 3907 } |
| 3788 // Emit a nop in the branch delay slot if required. | 3908 // Emit a nop in the branch delay slot if required. |
| 3789 if (bd == PROTECT) nop(); | 3909 if (bd == PROTECT) nop(); |
| 3790 } | 3910 } |
| 3791 | 3911 |
| 3792 #ifdef DEBUG | 3912 #ifdef DEBUG |
| 3793 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd), | 3913 CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd), |
| 3794 SizeOfCodeGeneratedSince(&start)); | 3914 SizeOfCodeGeneratedSince(&start)); |
| 3795 #endif | 3915 #endif |
| 3796 } | 3916 } |
| 3797 | 3917 |
| 3798 | 3918 |
| 3799 int MacroAssembler::CallSize(Address target, | 3919 int MacroAssembler::CallSize(Address target, |
| 3800 RelocInfo::Mode rmode, | 3920 RelocInfo::Mode rmode, |
| 3801 Condition cond, | 3921 Condition cond, |
| 3802 Register rs, | 3922 Register rs, |
| 3803 const Operand& rt, | 3923 const Operand& rt, |
| 3804 BranchDelaySlot bd) { | 3924 BranchDelaySlot bd) { |
| 3805 int size = CallSize(t9, cond, rs, rt, bd); | 3925 int size = CallSize(t9, 0, cond, rs, rt, bd); |
| 3806 return size + 2 * kInstrSize; | 3926 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) |
| 3927 return size + 1 * kInstrSize; |
| 3928 else |
| 3929 return size + 2 * kInstrSize; |
| 3807 } | 3930 } |
| 3808 | 3931 |
| 3809 | 3932 |
| 3810 void MacroAssembler::Call(Address target, | 3933 void MacroAssembler::Call(Address target, |
| 3811 RelocInfo::Mode rmode, | 3934 RelocInfo::Mode rmode, |
| 3812 Condition cond, | 3935 Condition cond, |
| 3813 Register rs, | 3936 Register rs, |
| 3814 const Operand& rt, | 3937 const Operand& rt, |
| 3815 BranchDelaySlot bd) { | 3938 BranchDelaySlot bd) { |
| 3816 BlockTrampolinePoolScope block_trampoline_pool(this); | 3939 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3817 Label start; | 3940 Label start; |
| 3818 bind(&start); | 3941 bind(&start); |
| 3819 int32_t target_int = reinterpret_cast<int32_t>(target); | 3942 int32_t target_int = reinterpret_cast<int32_t>(target); |
| 3820 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); | 3943 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) { |
| 3821 Call(t9, cond, rs, rt, bd); | 3944 uint32_t lui_offset, jialc_offset; |
| 3945 UnpackTargetAddressUnsigned(target_int, lui_offset, jialc_offset); |
| 3946 if (MustUseReg(rmode)) { |
| 3947 RecordRelocInfo(rmode, target_int); |
| 3948 } |
| 3949 lui(t9, lui_offset); |
| 3950 Call(t9, jialc_offset, cond, rs, rt, bd); |
| 3951 } else { |
| 3952 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); |
| 3953 Call(t9, 0, cond, rs, rt, bd); |
| 3954 } |
| 3822 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), | 3955 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), |
| 3823 SizeOfCodeGeneratedSince(&start)); | 3956 SizeOfCodeGeneratedSince(&start)); |
| 3824 } | 3957 } |
| 3825 | 3958 |
| 3826 | 3959 |
| 3827 int MacroAssembler::CallSize(Handle<Code> code, | 3960 int MacroAssembler::CallSize(Handle<Code> code, |
| 3828 RelocInfo::Mode rmode, | 3961 RelocInfo::Mode rmode, |
| 3829 TypeFeedbackId ast_id, | 3962 TypeFeedbackId ast_id, |
| 3830 Condition cond, | 3963 Condition cond, |
| 3831 Register rs, | 3964 Register rs, |
| (...skipping 24 matching lines...) Expand all Loading... |
| 3856 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); | 3989 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); |
| 3857 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), | 3990 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), |
| 3858 SizeOfCodeGeneratedSince(&start)); | 3991 SizeOfCodeGeneratedSince(&start)); |
| 3859 } | 3992 } |
| 3860 | 3993 |
| 3861 | 3994 |
| 3862 void MacroAssembler::Ret(Condition cond, | 3995 void MacroAssembler::Ret(Condition cond, |
| 3863 Register rs, | 3996 Register rs, |
| 3864 const Operand& rt, | 3997 const Operand& rt, |
| 3865 BranchDelaySlot bd) { | 3998 BranchDelaySlot bd) { |
| 3866 Jump(ra, cond, rs, rt, bd); | 3999 Jump(ra, 0, cond, rs, rt, bd); |
| 3867 } | 4000 } |
| 3868 | 4001 |
| 3869 | 4002 |
| 3870 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { | 4003 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { |
| 3871 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 4004 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && |
| 3872 (!L->is_bound() || is_near_r6(L))) { | 4005 (!L->is_bound() || is_near_r6(L))) { |
| 3873 BranchShortHelperR6(0, L); | 4006 BranchShortHelperR6(0, L); |
| 3874 } else { | 4007 } else { |
| 3875 BlockTrampolinePoolScope block_trampoline_pool(this); | 4008 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3876 uint32_t imm32; | 4009 uint32_t imm32; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3910 | 4043 |
| 3911 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { | 4044 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { |
| 3912 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 4045 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && |
| 3913 (!L->is_bound() || is_near_r6(L))) { | 4046 (!L->is_bound() || is_near_r6(L))) { |
| 3914 BranchAndLinkShortHelperR6(0, L); | 4047 BranchAndLinkShortHelperR6(0, L); |
| 3915 } else { | 4048 } else { |
| 3916 BlockTrampolinePoolScope block_trampoline_pool(this); | 4049 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 3917 uint32_t imm32; | 4050 uint32_t imm32; |
| 3918 imm32 = jump_address(L); | 4051 imm32 = jump_address(L); |
| 3919 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 4052 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
| 3920 uint32_t lui_offset, jic_offset; | 4053 uint32_t lui_offset, jialc_offset; |
| 3921 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset); | 4054 UnpackTargetAddressUnsigned(imm32, lui_offset, jialc_offset); |
| 3922 { | 4055 { |
| 3923 BlockGrowBufferScope block_buf_growth(this); | 4056 BlockGrowBufferScope block_buf_growth(this); |
| 3924 // Buffer growth (and relocation) must be blocked for internal | 4057 // Buffer growth (and relocation) must be blocked for internal |
| 3925 // references until associated instructions are emitted and | 4058 // references until associated instructions are emitted and |
| 3926 // available to be patched. | 4059 // available to be patched. |
| 3927 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 4060 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
| 3928 lui(at, lui_offset); | 4061 lui(at, lui_offset); |
| 3929 jialc(at, jic_offset); | 4062 jialc(at, jialc_offset); |
| 3930 } | 4063 } |
| 3931 CheckBuffer(); | 4064 CheckBuffer(); |
| 3932 } else { | 4065 } else { |
| 3933 { | 4066 { |
| 3934 BlockGrowBufferScope block_buf_growth(this); | 4067 BlockGrowBufferScope block_buf_growth(this); |
| 3935 // Buffer growth (and relocation) must be blocked for internal | 4068 // Buffer growth (and relocation) must be blocked for internal |
| 3936 // references | 4069 // references |
| 3937 // until associated instructions are emitted and available to be | 4070 // until associated instructions are emitted and available to be |
| 3938 // patched. | 4071 // patched. |
| 3939 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 4072 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
| (...skipping 2325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6265 | 6398 |
| 6266 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 6399 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, |
| 6267 Register scratch) { | 6400 Register scratch) { |
| 6268 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 6401 PrepareCallCFunction(num_reg_arguments, 0, scratch); |
| 6269 } | 6402 } |
| 6270 | 6403 |
| 6271 | 6404 |
| 6272 void MacroAssembler::CallCFunction(ExternalReference function, | 6405 void MacroAssembler::CallCFunction(ExternalReference function, |
| 6273 int num_reg_arguments, | 6406 int num_reg_arguments, |
| 6274 int num_double_arguments) { | 6407 int num_double_arguments) { |
| 6275 li(t8, Operand(function)); | 6408 if (IsMipsArchVariant(kMips32r6)) { |
| 6276 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments); | 6409 uint32_t lui_offset, jialc_offset; |
| 6410 UnpackTargetAddressUnsigned(Operand(function).immediate(), lui_offset, |
| 6411 jialc_offset); |
| 6412 if (MustUseReg(Operand(function).rmode())) { |
| 6413 RecordRelocInfo(Operand(function).rmode(), Operand(function).immediate()); |
| 6414 } |
| 6415 lui(t9, lui_offset); |
| 6416 CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments, |
| 6417 jialc_offset); |
| 6418 } else { |
| 6419 li(t9, Operand(function)); |
| 6420 CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments); |
| 6421 } |
| 6277 } | 6422 } |
| 6278 | 6423 |
| 6279 | 6424 |
| 6280 void MacroAssembler::CallCFunction(Register function, | 6425 void MacroAssembler::CallCFunction(Register function, |
| 6281 int num_reg_arguments, | 6426 int num_reg_arguments, |
| 6282 int num_double_arguments) { | 6427 int num_double_arguments) { |
| 6283 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); | 6428 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); |
| 6284 } | 6429 } |
| 6285 | 6430 |
| 6286 | 6431 |
| 6287 void MacroAssembler::CallCFunction(ExternalReference function, | 6432 void MacroAssembler::CallCFunction(ExternalReference function, |
| 6288 int num_arguments) { | 6433 int num_arguments) { |
| 6289 CallCFunction(function, num_arguments, 0); | 6434 CallCFunction(function, num_arguments, 0); |
| 6290 } | 6435 } |
| 6291 | 6436 |
| 6292 | 6437 |
| 6293 void MacroAssembler::CallCFunction(Register function, | 6438 void MacroAssembler::CallCFunction(Register function, |
| 6294 int num_arguments) { | 6439 int num_arguments) { |
| 6295 CallCFunction(function, num_arguments, 0); | 6440 CallCFunction(function, num_arguments, 0); |
| 6296 } | 6441 } |
| 6297 | 6442 |
| 6298 | |
| 6299 void MacroAssembler::CallCFunctionHelper(Register function, | 6443 void MacroAssembler::CallCFunctionHelper(Register function, |
| 6300 int num_reg_arguments, | 6444 int num_reg_arguments, |
| 6301 int num_double_arguments) { | 6445 int num_double_arguments, |
| 6446 uint32_t offset) { |
| 6302 DCHECK(has_frame()); | 6447 DCHECK(has_frame()); |
| 6303 // Make sure that the stack is aligned before calling a C function unless | 6448 // Make sure that the stack is aligned before calling a C function unless |
| 6304 // running in the simulator. The simulator has its own alignment check which | 6449 // running in the simulator. The simulator has its own alignment check which |
| 6305 // provides more information. | 6450 // provides more information. |
| 6306 // The argument stots are presumed to have been set up by | 6451 // The argument stots are presumed to have been set up by |
| 6307 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. | 6452 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. |
| 6308 | 6453 |
| 6309 #if V8_HOST_ARCH_MIPS | 6454 #if V8_HOST_ARCH_MIPS |
| 6310 if (emit_debug_code()) { | 6455 if (emit_debug_code()) { |
| 6311 int frame_alignment = base::OS::ActivationFrameAlignment(); | 6456 int frame_alignment = base::OS::ActivationFrameAlignment(); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 6325 | 6470 |
| 6326 // Just call directly. The function called cannot cause a GC, or | 6471 // Just call directly. The function called cannot cause a GC, or |
| 6327 // allow preemption, so the return address in the link register | 6472 // allow preemption, so the return address in the link register |
| 6328 // stays correct. | 6473 // stays correct. |
| 6329 | 6474 |
| 6330 if (!function.is(t9)) { | 6475 if (!function.is(t9)) { |
| 6331 mov(t9, function); | 6476 mov(t9, function); |
| 6332 function = t9; | 6477 function = t9; |
| 6333 } | 6478 } |
| 6334 | 6479 |
| 6335 Call(function); | 6480 Call(function, offset); |
| 6336 | 6481 |
| 6337 int stack_passed_arguments = CalculateStackPassedWords( | 6482 int stack_passed_arguments = CalculateStackPassedWords( |
| 6338 num_reg_arguments, num_double_arguments); | 6483 num_reg_arguments, num_double_arguments); |
| 6339 | 6484 |
| 6340 if (base::OS::ActivationFrameAlignment() > kPointerSize) { | 6485 if (base::OS::ActivationFrameAlignment() > kPointerSize) { |
| 6341 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 6486 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
| 6342 } else { | 6487 } else { |
| 6343 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); | 6488 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); |
| 6344 } | 6489 } |
| 6345 } | 6490 } |
| (...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6743 if (mag.shift > 0) sra(result, result, mag.shift); | 6888 if (mag.shift > 0) sra(result, result, mag.shift); |
| 6744 srl(at, dividend, 31); | 6889 srl(at, dividend, 31); |
| 6745 Addu(result, result, Operand(at)); | 6890 Addu(result, result, Operand(at)); |
| 6746 } | 6891 } |
| 6747 | 6892 |
| 6748 | 6893 |
| 6749 } // namespace internal | 6894 } // namespace internal |
| 6750 } // namespace v8 | 6895 } // namespace v8 |
| 6751 | 6896 |
| 6752 #endif // V8_TARGET_ARCH_MIPS | 6897 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |