OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
(...skipping 3649 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3660 DCHECK(offset == 0); | 3660 DCHECK(offset == 0); |
3661 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3661 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
3662 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); | 3662 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); |
3663 } else { | 3663 } else { |
3664 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); | 3664 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); |
3665 } | 3665 } |
3666 } | 3666 } |
3667 return false; | 3667 return false; |
3668 } | 3668 } |
3669 | 3669 |
3670 | 3670 void MacroAssembler::Jump(Register target, uint32_t offset, Condition cond, |
3671 void MacroAssembler::Jump(Register target, | 3671 Register rs, const Operand& rt, BranchDelaySlot bd) { |
3672 Condition cond, | |
3673 Register rs, | |
3674 const Operand& rt, | |
3675 BranchDelaySlot bd) { | |
3676 BlockTrampolinePoolScope block_trampoline_pool(this); | 3672 BlockTrampolinePoolScope block_trampoline_pool(this); |
3677 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3673 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
3678 if (cond == cc_always) { | 3674 if (cond == cc_always) { |
3679 jic(target, 0); | 3675 jic(target, offset); |
3680 } else { | 3676 } else { |
3681 BRANCH_ARGS_CHECK(cond, rs, rt); | 3677 BRANCH_ARGS_CHECK(cond, rs, rt); |
3682 Branch(2, NegateCondition(cond), rs, rt); | 3678 Branch(2, NegateCondition(cond), rs, rt); |
3683 jic(target, 0); | 3679 jic(target, offset); |
3684 } | 3680 } |
3685 } else { | 3681 } else { |
3686 if (cond == cc_always) { | 3682 if (cond == cc_always) { |
ivica.bogosavljevic
2016/12/05 10:48:48
What happens in this branch if offset != 0. We sho
miran.karic
2016/12/05 12:44:48
Currently offset is used only for r6, so here it i
| |
3687 jr(target); | 3683 jr(target); |
3688 } else { | 3684 } else { |
3689 BRANCH_ARGS_CHECK(cond, rs, rt); | 3685 BRANCH_ARGS_CHECK(cond, rs, rt); |
3690 Branch(2, NegateCondition(cond), rs, rt); | 3686 Branch(2, NegateCondition(cond), rs, rt); |
3691 jr(target); | 3687 jr(target); |
3692 } | 3688 } |
3693 // Emit a nop in the branch delay slot if required. | 3689 // Emit a nop in the branch delay slot if required. |
3694 if (bd == PROTECT) nop(); | 3690 if (bd == PROTECT) nop(); |
3695 } | 3691 } |
3696 } | 3692 } |
3697 | 3693 |
3698 | 3694 |
3699 void MacroAssembler::Jump(intptr_t target, | 3695 void MacroAssembler::Jump(intptr_t target, |
3700 RelocInfo::Mode rmode, | 3696 RelocInfo::Mode rmode, |
3701 Condition cond, | 3697 Condition cond, |
3702 Register rs, | 3698 Register rs, |
3703 const Operand& rt, | 3699 const Operand& rt, |
3704 BranchDelaySlot bd) { | 3700 BranchDelaySlot bd) { |
3705 Label skip; | 3701 Label skip; |
3706 if (cond != cc_always) { | 3702 if (cond != cc_always) { |
3707 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); | 3703 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt); |
3708 } | 3704 } |
3709 // The first instruction of 'li' may be placed in the delay slot. | 3705 // The first instruction of 'li' may be placed in the delay slot. |
3710 // This is not an issue, t9 is expected to be clobbered anyway. | 3706 // This is not an issue, t9 is expected to be clobbered anyway. |
3711 li(t9, Operand(target, rmode)); | 3707 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
3712 Jump(t9, al, zero_reg, Operand(zero_reg), bd); | 3708 uint32_t lui_offset, jic_offset; |
3709 UnpackTargetAddressUnsigned(target, lui_offset, jic_offset); | |
3710 if (MustUseReg(rmode)) { | |
3711 RecordRelocInfo(rmode, target); | |
ivica.bogosavljevic
2016/12/05 10:48:48
Why are we recording reloc info for mipsr6 but not
miran.karic
2016/12/05 12:44:48
We do it in li() for lui, ori, and here for lui, j
| |
3712 } | |
3713 lui(t9, lui_offset); | |
3714 Jump(t9, jic_offset, al, zero_reg, Operand(zero_reg), bd); | |
3715 } else { | |
3716 li(t9, Operand(target, rmode)); | |
3717 Jump(t9, 0, al, zero_reg, Operand(zero_reg), bd); | |
3718 } | |
3713 bind(&skip); | 3719 bind(&skip); |
3714 } | 3720 } |
3715 | 3721 |
3716 | 3722 |
3717 void MacroAssembler::Jump(Address target, | 3723 void MacroAssembler::Jump(Address target, |
3718 RelocInfo::Mode rmode, | 3724 RelocInfo::Mode rmode, |
3719 Condition cond, | 3725 Condition cond, |
3720 Register rs, | 3726 Register rs, |
3721 const Operand& rt, | 3727 const Operand& rt, |
3722 BranchDelaySlot bd) { | 3728 BranchDelaySlot bd) { |
(...skipping 27 matching lines...) Expand all Loading... | |
3750 size += 3; | 3756 size += 3; |
3751 } | 3757 } |
3752 | 3758 |
3753 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; | 3759 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1; |
3754 | 3760 |
3755 return size * kInstrSize; | 3761 return size * kInstrSize; |
3756 } | 3762 } |
3757 | 3763 |
3758 | 3764 |
3759 // Note: To call gcc-compiled C code on mips, you must call thru t9. | 3765 // Note: To call gcc-compiled C code on mips, you must call thru t9. |
3760 void MacroAssembler::Call(Register target, | 3766 void MacroAssembler::Call(Register target, uint32_t offset, Condition cond, |
3761 Condition cond, | 3767 Register rs, const Operand& rt, BranchDelaySlot bd) { |
3762 Register rs, | |
3763 const Operand& rt, | |
3764 BranchDelaySlot bd) { | |
3765 #ifdef DEBUG | 3768 #ifdef DEBUG |
3766 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; | 3769 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
3767 #endif | 3770 #endif |
3768 | 3771 |
3769 BlockTrampolinePoolScope block_trampoline_pool(this); | 3772 BlockTrampolinePoolScope block_trampoline_pool(this); |
3770 Label start; | 3773 Label start; |
3771 bind(&start); | 3774 bind(&start); |
3772 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { | 3775 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) { |
3773 if (cond == cc_always) { | 3776 if (cond == cc_always) { |
3774 jialc(target, 0); | 3777 jialc(target, offset); |
3775 } else { | 3778 } else { |
3776 BRANCH_ARGS_CHECK(cond, rs, rt); | 3779 BRANCH_ARGS_CHECK(cond, rs, rt); |
3777 Branch(2, NegateCondition(cond), rs, rt); | 3780 Branch(2, NegateCondition(cond), rs, rt); |
3778 jialc(target, 0); | 3781 jialc(target, offset); |
3779 } | 3782 } |
3780 } else { | 3783 } else { |
3781 if (cond == cc_always) { | 3784 if (cond == cc_always) { |
3782 jalr(target); | 3785 jalr(target); |
ivica.bogosavljevic
2016/12/05 10:48:47
samo as for Jump
| |
3783 } else { | 3786 } else { |
3784 BRANCH_ARGS_CHECK(cond, rs, rt); | 3787 BRANCH_ARGS_CHECK(cond, rs, rt); |
3785 Branch(2, NegateCondition(cond), rs, rt); | 3788 Branch(2, NegateCondition(cond), rs, rt); |
3786 jalr(target); | 3789 jalr(target); |
3787 } | 3790 } |
3788 // Emit a nop in the branch delay slot if required. | 3791 // Emit a nop in the branch delay slot if required. |
3789 if (bd == PROTECT) nop(); | 3792 if (bd == PROTECT) nop(); |
3790 } | 3793 } |
3791 | 3794 |
3792 #ifdef DEBUG | 3795 #ifdef DEBUG |
3793 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd), | 3796 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd), |
3794 SizeOfCodeGeneratedSince(&start)); | 3797 SizeOfCodeGeneratedSince(&start)); |
3795 #endif | 3798 #endif |
3796 } | 3799 } |
3797 | 3800 |
3798 | 3801 |
3799 int MacroAssembler::CallSize(Address target, | 3802 int MacroAssembler::CallSize(Address target, |
3800 RelocInfo::Mode rmode, | 3803 RelocInfo::Mode rmode, |
3801 Condition cond, | 3804 Condition cond, |
3802 Register rs, | 3805 Register rs, |
3803 const Operand& rt, | 3806 const Operand& rt, |
3804 BranchDelaySlot bd) { | 3807 BranchDelaySlot bd) { |
3805 int size = CallSize(t9, cond, rs, rt, bd); | 3808 int size = CallSize(t9, cond, rs, rt, bd); |
3806 return size + 2 * kInstrSize; | 3809 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) |
3810 return size + 1 * kInstrSize; | |
3811 else | |
3812 return size + 2 * kInstrSize; | |
3807 } | 3813 } |
3808 | 3814 |
3809 | 3815 |
3810 void MacroAssembler::Call(Address target, | 3816 void MacroAssembler::Call(Address target, |
3811 RelocInfo::Mode rmode, | 3817 RelocInfo::Mode rmode, |
3812 Condition cond, | 3818 Condition cond, |
3813 Register rs, | 3819 Register rs, |
3814 const Operand& rt, | 3820 const Operand& rt, |
3815 BranchDelaySlot bd) { | 3821 BranchDelaySlot bd) { |
3816 BlockTrampolinePoolScope block_trampoline_pool(this); | 3822 BlockTrampolinePoolScope block_trampoline_pool(this); |
3817 Label start; | 3823 Label start; |
3818 bind(&start); | 3824 bind(&start); |
3819 int32_t target_int = reinterpret_cast<int32_t>(target); | 3825 int32_t target_int = reinterpret_cast<int32_t>(target); |
3820 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); | 3826 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) { |
3821 Call(t9, cond, rs, rt, bd); | 3827 uint32_t lui_offset, jialc_offset; |
3828 UnpackTargetAddressUnsigned(target_int, lui_offset, jialc_offset); | |
3829 if (MustUseReg(rmode)) { | |
3830 RecordRelocInfo(rmode, target_int); | |
ivica.bogosavljevic
2016/12/05 10:48:47
same as for :Call from above
| |
3831 } | |
3832 lui(t9, lui_offset); | |
3833 Call(t9, jialc_offset, cond, rs, rt, bd); | |
3834 } else { | |
3835 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); | |
3836 Call(t9, 0, cond, rs, rt, bd); | |
3837 } | |
3822 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), | 3838 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd), |
3823 SizeOfCodeGeneratedSince(&start)); | 3839 SizeOfCodeGeneratedSince(&start)); |
3824 } | 3840 } |
3825 | 3841 |
3826 | 3842 |
3827 int MacroAssembler::CallSize(Handle<Code> code, | 3843 int MacroAssembler::CallSize(Handle<Code> code, |
3828 RelocInfo::Mode rmode, | 3844 RelocInfo::Mode rmode, |
3829 TypeFeedbackId ast_id, | 3845 TypeFeedbackId ast_id, |
3830 Condition cond, | 3846 Condition cond, |
3831 Register rs, | 3847 Register rs, |
(...skipping 24 matching lines...) Expand all Loading... | |
3856 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); | 3872 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd); |
3857 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), | 3873 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd), |
3858 SizeOfCodeGeneratedSince(&start)); | 3874 SizeOfCodeGeneratedSince(&start)); |
3859 } | 3875 } |
3860 | 3876 |
3861 | 3877 |
3862 void MacroAssembler::Ret(Condition cond, | 3878 void MacroAssembler::Ret(Condition cond, |
3863 Register rs, | 3879 Register rs, |
3864 const Operand& rt, | 3880 const Operand& rt, |
3865 BranchDelaySlot bd) { | 3881 BranchDelaySlot bd) { |
3866 Jump(ra, cond, rs, rt, bd); | 3882 Jump(ra, 0, cond, rs, rt, bd); |
3867 } | 3883 } |
3868 | 3884 |
3869 | 3885 |
3870 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { | 3886 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) { |
3871 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 3887 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && |
3872 (!L->is_bound() || is_near_r6(L))) { | 3888 (!L->is_bound() || is_near_r6(L))) { |
3873 BranchShortHelperR6(0, L); | 3889 BranchShortHelperR6(0, L); |
3874 } else { | 3890 } else { |
3875 BlockTrampolinePoolScope block_trampoline_pool(this); | 3891 BlockTrampolinePoolScope block_trampoline_pool(this); |
3876 uint32_t imm32; | 3892 uint32_t imm32; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3910 | 3926 |
3911 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { | 3927 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) { |
3912 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && | 3928 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT && |
3913 (!L->is_bound() || is_near_r6(L))) { | 3929 (!L->is_bound() || is_near_r6(L))) { |
3914 BranchAndLinkShortHelperR6(0, L); | 3930 BranchAndLinkShortHelperR6(0, L); |
3915 } else { | 3931 } else { |
3916 BlockTrampolinePoolScope block_trampoline_pool(this); | 3932 BlockTrampolinePoolScope block_trampoline_pool(this); |
3917 uint32_t imm32; | 3933 uint32_t imm32; |
3918 imm32 = jump_address(L); | 3934 imm32 = jump_address(L); |
3919 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { | 3935 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
3920 uint32_t lui_offset, jic_offset; | 3936 uint32_t lui_offset, jialc_offset; |
3921 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset); | 3937 UnpackTargetAddressUnsigned(imm32, lui_offset, jialc_offset); |
3922 { | 3938 { |
3923 BlockGrowBufferScope block_buf_growth(this); | 3939 BlockGrowBufferScope block_buf_growth(this); |
3924 // Buffer growth (and relocation) must be blocked for internal | 3940 // Buffer growth (and relocation) must be blocked for internal |
3925 // references until associated instructions are emitted and | 3941 // references until associated instructions are emitted and |
3926 // available to be patched. | 3942 // available to be patched. |
3927 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3943 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
3928 lui(at, lui_offset); | 3944 lui(at, lui_offset); |
3929 jialc(at, jic_offset); | 3945 jialc(at, jialc_offset); |
3930 } | 3946 } |
3931 CheckBuffer(); | 3947 CheckBuffer(); |
3932 } else { | 3948 } else { |
3933 { | 3949 { |
3934 BlockGrowBufferScope block_buf_growth(this); | 3950 BlockGrowBufferScope block_buf_growth(this); |
3935 // Buffer growth (and relocation) must be blocked for internal | 3951 // Buffer growth (and relocation) must be blocked for internal |
3936 // references | 3952 // references |
3937 // until associated instructions are emitted and available to be | 3953 // until associated instructions are emitted and available to be |
3938 // patched. | 3954 // patched. |
3939 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3955 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
(...skipping 2325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6265 | 6281 |
6266 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 6282 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, |
6267 Register scratch) { | 6283 Register scratch) { |
6268 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 6284 PrepareCallCFunction(num_reg_arguments, 0, scratch); |
6269 } | 6285 } |
6270 | 6286 |
6271 | 6287 |
6272 void MacroAssembler::CallCFunction(ExternalReference function, | 6288 void MacroAssembler::CallCFunction(ExternalReference function, |
6273 int num_reg_arguments, | 6289 int num_reg_arguments, |
6274 int num_double_arguments) { | 6290 int num_double_arguments) { |
6275 li(t8, Operand(function)); | 6291 if (IsMipsArchVariant(kMips32r6)) { |
6276 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments); | 6292 uint32_t lui_offset, jialc_offset; |
6293 UnpackTargetAddressUnsigned(Operand(function).immediate(), lui_offset, | |
6294 jialc_offset); | |
6295 if (MustUseReg(Operand(function).rmode())) { | |
6296 RecordRelocInfo(Operand(function).rmode(), Operand(function).immediate()); | |
6297 } | |
6298 lui(t9, lui_offset); | |
6299 CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments, | |
6300 jialc_offset); | |
6301 } else { | |
6302 li(t9, Operand(function)); | |
6303 CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments); | |
6304 } | |
6277 } | 6305 } |
6278 | 6306 |
6279 | 6307 |
6280 void MacroAssembler::CallCFunction(Register function, | 6308 void MacroAssembler::CallCFunction(Register function, |
6281 int num_reg_arguments, | 6309 int num_reg_arguments, |
6282 int num_double_arguments) { | 6310 int num_double_arguments) { |
6283 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); | 6311 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); |
6284 } | 6312 } |
6285 | 6313 |
6286 | 6314 |
6287 void MacroAssembler::CallCFunction(ExternalReference function, | 6315 void MacroAssembler::CallCFunction(ExternalReference function, |
6288 int num_arguments) { | 6316 int num_arguments) { |
6289 CallCFunction(function, num_arguments, 0); | 6317 CallCFunction(function, num_arguments, 0); |
6290 } | 6318 } |
6291 | 6319 |
6292 | 6320 |
6293 void MacroAssembler::CallCFunction(Register function, | 6321 void MacroAssembler::CallCFunction(Register function, |
6294 int num_arguments) { | 6322 int num_arguments) { |
6295 CallCFunction(function, num_arguments, 0); | 6323 CallCFunction(function, num_arguments, 0); |
6296 } | 6324 } |
6297 | 6325 |
6298 | |
6299 void MacroAssembler::CallCFunctionHelper(Register function, | 6326 void MacroAssembler::CallCFunctionHelper(Register function, |
6300 int num_reg_arguments, | 6327 int num_reg_arguments, |
6301 int num_double_arguments) { | 6328 int num_double_arguments, |
6329 uint32_t offset) { | |
6302 DCHECK(has_frame()); | 6330 DCHECK(has_frame()); |
6303 // Make sure that the stack is aligned before calling a C function unless | 6331 // Make sure that the stack is aligned before calling a C function unless |
6304 // running in the simulator. The simulator has its own alignment check which | 6332 // running in the simulator. The simulator has its own alignment check which |
6305 // provides more information. | 6333 // provides more information. |
6306 // The argument stots are presumed to have been set up by | 6334 // The argument stots are presumed to have been set up by |
6307 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. | 6335 // PrepareCallCFunction. The C function must be called via t9, for mips ABI. |
6308 | 6336 |
6309 #if V8_HOST_ARCH_MIPS | 6337 #if V8_HOST_ARCH_MIPS |
6310 if (emit_debug_code()) { | 6338 if (emit_debug_code()) { |
6311 int frame_alignment = base::OS::ActivationFrameAlignment(); | 6339 int frame_alignment = base::OS::ActivationFrameAlignment(); |
(...skipping 13 matching lines...) Expand all Loading... | |
6325 | 6353 |
6326 // Just call directly. The function called cannot cause a GC, or | 6354 // Just call directly. The function called cannot cause a GC, or |
6327 // allow preemption, so the return address in the link register | 6355 // allow preemption, so the return address in the link register |
6328 // stays correct. | 6356 // stays correct. |
6329 | 6357 |
6330 if (!function.is(t9)) { | 6358 if (!function.is(t9)) { |
6331 mov(t9, function); | 6359 mov(t9, function); |
6332 function = t9; | 6360 function = t9; |
6333 } | 6361 } |
6334 | 6362 |
6335 Call(function); | 6363 Call(function, offset); |
6336 | 6364 |
6337 int stack_passed_arguments = CalculateStackPassedWords( | 6365 int stack_passed_arguments = CalculateStackPassedWords( |
6338 num_reg_arguments, num_double_arguments); | 6366 num_reg_arguments, num_double_arguments); |
6339 | 6367 |
6340 if (base::OS::ActivationFrameAlignment() > kPointerSize) { | 6368 if (base::OS::ActivationFrameAlignment() > kPointerSize) { |
6341 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 6369 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
6342 } else { | 6370 } else { |
6343 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); | 6371 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize)); |
6344 } | 6372 } |
6345 } | 6373 } |
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6743 if (mag.shift > 0) sra(result, result, mag.shift); | 6771 if (mag.shift > 0) sra(result, result, mag.shift); |
6744 srl(at, dividend, 31); | 6772 srl(at, dividend, 31); |
6745 Addu(result, result, Operand(at)); | 6773 Addu(result, result, Operand(at)); |
6746 } | 6774 } |
6747 | 6775 |
6748 | 6776 |
6749 } // namespace internal | 6777 } // namespace internal |
6750 } // namespace v8 | 6778 } // namespace v8 |
6751 | 6779 |
6752 #endif // V8_TARGET_ARCH_MIPS | 6780 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |