| Index: src/mips/macro-assembler-mips.cc
|
| diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
|
| index c3abe4fa6fe2f5405d00441331d075aedf770077..75c8a1dc8dd69ddfefc5da0fff4548c0feaabacc 100644
|
| --- a/src/mips/macro-assembler-mips.cc
|
| +++ b/src/mips/macro-assembler-mips.cc
|
| @@ -3667,22 +3667,87 @@ bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
|
| return false;
|
| }
|
|
|
| +void MacroAssembler::Jump(Register target, int16_t offset, Condition cond,
|
| + Register rs, const Operand& rt, BranchDelaySlot bd) {
|
| + DCHECK(is_int16(offset));
|
| + BlockTrampolinePoolScope block_trampoline_pool(this);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
|
| + if (cond == cc_always) {
|
| + jic(target, offset);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jic(target, offset);
|
| + }
|
| + } else {
|
| + if (offset != 0) {
|
| + Addu(target, target, offset);
|
| + }
|
| + if (cond == cc_always) {
|
| + jr(target);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jr(target);
|
| + }
|
| + // Emit a nop in the branch delay slot if required.
|
| + if (bd == PROTECT) nop();
|
| + }
|
| +}
|
|
|
| -void MacroAssembler::Jump(Register target,
|
| - Condition cond,
|
| - Register rs,
|
| - const Operand& rt,
|
| +void MacroAssembler::Jump(Register target, Register base, int16_t offset,
|
| + Condition cond, Register rs, const Operand& rt,
|
| BranchDelaySlot bd) {
|
| + DCHECK(is_int16(offset));
|
| BlockTrampolinePoolScope block_trampoline_pool(this);
|
| if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
|
| if (cond == cc_always) {
|
| - jic(target, 0);
|
| + jic(base, offset);
|
| } else {
|
| BRANCH_ARGS_CHECK(cond, rs, rt);
|
| Branch(2, NegateCondition(cond), rs, rt);
|
| - jic(target, 0);
|
| + jic(base, offset);
|
| }
|
| } else {
|
| + if (offset != 0) {
|
| + Addu(target, base, offset);
|
| + } else { // Call through target
|
| + if (!target.is(base)) mov(target, base);
|
| + }
|
| + if (cond == cc_always) {
|
| + jr(target);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jr(target);
|
| + }
|
| + // Emit a nop in the branch delay slot if required.
|
| + if (bd == PROTECT) nop();
|
| + }
|
| +}
|
| +
|
| +void MacroAssembler::Jump(Register target, const Operand& offset,
|
| + Condition cond, Register rs, const Operand& rt,
|
| + BranchDelaySlot bd) {
|
| + BlockTrampolinePoolScope block_trampoline_pool(this);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT &&
|
| + !is_int16(offset.immediate())) {
|
| + uint32_t aui_offset, jic_offset;
|
| + Assembler::UnpackTargetAddressUnsigned(offset.immediate(), aui_offset,
|
| + jic_offset);
|
| + RecordRelocInfo(RelocInfo::EXTERNAL_REFERENCE, offset.immediate());
|
| + aui(target, target, aui_offset);
|
| + if (cond == cc_always) {
|
| + jic(target, jic_offset);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jic(target, jic_offset);
|
| + }
|
| + } else {
|
| + if (offset.immediate() != 0) {
|
| + Addu(target, target, offset);
|
| + }
|
| if (cond == cc_always) {
|
| jr(target);
|
| } else {
|
| @@ -3708,8 +3773,18 @@ void MacroAssembler::Jump(intptr_t target,
|
| }
|
| // The first instruction of 'li' may be placed in the delay slot.
|
| // This is not an issue, t9 is expected to be clobbered anyway.
|
| - li(t9, Operand(target, rmode));
|
| - Jump(t9, al, zero_reg, Operand(zero_reg), bd);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
|
| + uint32_t lui_offset, jic_offset;
|
| + UnpackTargetAddressUnsigned(target, lui_offset, jic_offset);
|
| + if (MustUseReg(rmode)) {
|
| + RecordRelocInfo(rmode, target);
|
| + }
|
| + lui(t9, lui_offset);
|
| + Jump(t9, jic_offset, al, zero_reg, Operand(zero_reg), bd);
|
| + } else {
|
| + li(t9, Operand(target, rmode));
|
| + Jump(t9, 0, al, zero_reg, Operand(zero_reg), bd);
|
| + }
|
| bind(&skip);
|
| }
|
|
|
| @@ -3736,11 +3811,8 @@ void MacroAssembler::Jump(Handle<Code> code,
|
| Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
|
| }
|
|
|
| -
|
| -int MacroAssembler::CallSize(Register target,
|
| - Condition cond,
|
| - Register rs,
|
| - const Operand& rt,
|
| +int MacroAssembler::CallSize(Register target, int16_t offset, Condition cond,
|
| + Register rs, const Operand& rt,
|
| BranchDelaySlot bd) {
|
| int size = 0;
|
|
|
| @@ -3752,16 +3824,59 @@ int MacroAssembler::CallSize(Register target,
|
|
|
| if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1;
|
|
|
| + if (!IsMipsArchVariant(kMips32r6) && offset != 0) {
|
| + size += 1;
|
| + }
|
| +
|
| return size * kInstrSize;
|
| }
|
|
|
|
|
| // Note: To call gcc-compiled C code on mips, you must call thru t9.
|
| -void MacroAssembler::Call(Register target,
|
| - Condition cond,
|
| - Register rs,
|
| - const Operand& rt,
|
| +void MacroAssembler::Call(Register target, int16_t offset, Condition cond,
|
| + Register rs, const Operand& rt, BranchDelaySlot bd) {
|
| + DCHECK(is_int16(offset));
|
| +#ifdef DEBUG
|
| + int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
|
| +#endif
|
| +
|
| + BlockTrampolinePoolScope block_trampoline_pool(this);
|
| + Label start;
|
| + bind(&start);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
|
| + if (cond == cc_always) {
|
| + jialc(target, offset);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jialc(target, offset);
|
| + }
|
| + } else {
|
| + if (offset != 0) {
|
| + Addu(target, target, offset);
|
| + }
|
| + if (cond == cc_always) {
|
| + jalr(target);
|
| + } else {
|
| + BRANCH_ARGS_CHECK(cond, rs, rt);
|
| + Branch(2, NegateCondition(cond), rs, rt);
|
| + jalr(target);
|
| + }
|
| + // Emit a nop in the branch delay slot if required.
|
| + if (bd == PROTECT) nop();
|
| + }
|
| +
|
| +#ifdef DEBUG
|
| + CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd),
|
| + SizeOfCodeGeneratedSince(&start));
|
| +#endif
|
| +}
|
| +
|
| +// Note: To call gcc-compiled C code on mips, you must call thru t9.
|
| +void MacroAssembler::Call(Register target, Register base, int16_t offset,
|
| + Condition cond, Register rs, const Operand& rt,
|
| BranchDelaySlot bd) {
|
| + DCHECK(is_uint16(offset));
|
| #ifdef DEBUG
|
| int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
|
| #endif
|
| @@ -3771,13 +3886,18 @@ void MacroAssembler::Call(Register target,
|
| bind(&start);
|
| if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
|
| if (cond == cc_always) {
|
| - jialc(target, 0);
|
| + jialc(base, offset);
|
| } else {
|
| BRANCH_ARGS_CHECK(cond, rs, rt);
|
| Branch(2, NegateCondition(cond), rs, rt);
|
| - jialc(target, 0);
|
| + jialc(base, offset);
|
| }
|
| } else {
|
| + if (offset != 0) {
|
| + Addu(target, base, offset);
|
| + } else { // Call through target
|
| + if (!target.is(base)) mov(target, base);
|
| + }
|
| if (cond == cc_always) {
|
| jalr(target);
|
| } else {
|
| @@ -3790,7 +3910,7 @@ void MacroAssembler::Call(Register target,
|
| }
|
|
|
| #ifdef DEBUG
|
| - CHECK_EQ(size + CallSize(target, cond, rs, rt, bd),
|
| + CHECK_EQ(size + CallSize(target, offset, cond, rs, rt, bd),
|
| SizeOfCodeGeneratedSince(&start));
|
| #endif
|
| }
|
| @@ -3802,8 +3922,11 @@ int MacroAssembler::CallSize(Address target,
|
| Register rs,
|
| const Operand& rt,
|
| BranchDelaySlot bd) {
|
| - int size = CallSize(t9, cond, rs, rt, bd);
|
| - return size + 2 * kInstrSize;
|
| + int size = CallSize(t9, 0, cond, rs, rt, bd);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always)
|
| + return size + 1 * kInstrSize;
|
| + else
|
| + return size + 2 * kInstrSize;
|
| }
|
|
|
|
|
| @@ -3817,8 +3940,18 @@ void MacroAssembler::Call(Address target,
|
| Label start;
|
| bind(&start);
|
| int32_t target_int = reinterpret_cast<int32_t>(target);
|
| - li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
|
| - Call(t9, cond, rs, rt, bd);
|
| + if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) {
|
| + uint32_t lui_offset, jialc_offset;
|
| + UnpackTargetAddressUnsigned(target_int, lui_offset, jialc_offset);
|
| + if (MustUseReg(rmode)) {
|
| + RecordRelocInfo(rmode, target_int);
|
| + }
|
| + lui(t9, lui_offset);
|
| + Call(t9, jialc_offset, cond, rs, rt, bd);
|
| + } else {
|
| + li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
|
| + Call(t9, 0, cond, rs, rt, bd);
|
| + }
|
| DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
|
| SizeOfCodeGeneratedSince(&start));
|
| }
|
| @@ -3863,7 +3996,7 @@ void MacroAssembler::Ret(Condition cond,
|
| Register rs,
|
| const Operand& rt,
|
| BranchDelaySlot bd) {
|
| - Jump(ra, cond, rs, rt, bd);
|
| + Jump(ra, 0, cond, rs, rt, bd);
|
| }
|
|
|
|
|
| @@ -3917,8 +4050,8 @@ void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
|
| uint32_t imm32;
|
| imm32 = jump_address(L);
|
| if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
|
| - uint32_t lui_offset, jic_offset;
|
| - UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset);
|
| + uint32_t lui_offset, jialc_offset;
|
| + UnpackTargetAddressUnsigned(imm32, lui_offset, jialc_offset);
|
| {
|
| BlockGrowBufferScope block_buf_growth(this);
|
| // Buffer growth (and relocation) must be blocked for internal
|
| @@ -3926,7 +4059,7 @@ void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
|
| // available to be patched.
|
| RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
|
| lui(at, lui_offset);
|
| - jialc(at, jic_offset);
|
| + jialc(at, jialc_offset);
|
| }
|
| CheckBuffer();
|
| } else {
|
| @@ -6272,8 +6405,20 @@ void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
|
| void MacroAssembler::CallCFunction(ExternalReference function,
|
| int num_reg_arguments,
|
| int num_double_arguments) {
|
| - li(t8, Operand(function));
|
| - CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
|
| + if (IsMipsArchVariant(kMips32r6)) {
|
| + uint32_t lui_offset, jialc_offset;
|
| + UnpackTargetAddressUnsigned(Operand(function).immediate(), lui_offset,
|
| + jialc_offset);
|
| + if (MustUseReg(Operand(function).rmode())) {
|
| + RecordRelocInfo(Operand(function).rmode(), Operand(function).immediate());
|
| + }
|
| + lui(t9, lui_offset);
|
| + CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments,
|
| + jialc_offset);
|
| + } else {
|
| + li(t9, Operand(function));
|
| + CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments);
|
| + }
|
| }
|
|
|
|
|
| @@ -6295,10 +6440,10 @@ void MacroAssembler::CallCFunction(Register function,
|
| CallCFunction(function, num_arguments, 0);
|
| }
|
|
|
| -
|
| void MacroAssembler::CallCFunctionHelper(Register function,
|
| int num_reg_arguments,
|
| - int num_double_arguments) {
|
| + int num_double_arguments,
|
| + uint32_t offset) {
|
| DCHECK(has_frame());
|
| // Make sure that the stack is aligned before calling a C function unless
|
| // running in the simulator. The simulator has its own alignment check which
|
| @@ -6332,7 +6477,7 @@ void MacroAssembler::CallCFunctionHelper(Register function,
|
| function = t9;
|
| }
|
|
|
| - Call(function);
|
| + Call(function, offset);
|
|
|
| int stack_passed_arguments = CalculateStackPassedWords(
|
| num_reg_arguments, num_double_arguments);
|
|
|