Index: src/arm/assembler-thumb.cc |
diff --git a/src/arm/assembler-thumb.cc b/src/arm/assembler-thumb.cc |
index 522c718f64e07d050e2910e8fcfda2ebaad4da24..c7622836f20185fe4cdcb1065038c8e31967f33f 100644 |
--- a/src/arm/assembler-thumb.cc |
+++ b/src/arm/assembler-thumb.cc |
@@ -146,8 +146,7 @@ void Assembler::mov_thumb(Register dst, const Operand& src, SBit s, |
mov_imm_t2(dst, s, cond, i, imm3, imm8); |
return; |
} else { |
- // TODO(rkrithiv): perform 32-bit imm move |
- UNREACHABLE(); |
+ move_32_bit_immediate_thumb(dst, s, src, cond); |
return; |
} |
} |
@@ -708,6 +707,574 @@ void Assembler::it_thumb(Condition cond, int num_instr, bool cond2, |
} |
} |
+ |
+void Assembler::b_thumb(int branch_offset, Condition cond) { |
+ int imm = branch_offset >> 1; |
+ if (cond == al) { |
+ ASSERT(is_int24(imm)); |
+ emit32(thumb32_mode4(B_32_4) | thumb32_sign_extend_imm24(imm)); |
+ return; |
+ } else { |
+ uint32_t cond_thumb = (cond >> 28) & 0xF; |
+ ASSERT(is_int20(imm)); |
+ uint32_t imm11 = imm & 0x7ff; |
+ uint32_t imm6 = (imm >> 11) & 0x3f; |
+ uint32_t j1 = (imm >> 17) & 1; |
+ uint32_t j2 = (imm >> 18) & 1; |
+ uint32_t s = (imm >> 19) & 1; |
+ emit32(thumb32_mode4(B_32_3) | s*BH10 | cond_thumb*BH6 | |
+ imm6*BH0 | j1*B13 | j2*B11 | imm11); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+// from ldr_thumb_reg |
+bool Assembler::fits_thumb16_mode_4_1(Register reg, const MemOperand& op) { |
+ return (is_uint3(op.rm().code()) && |
+ is_uint3(op.rn().code()) && |
+ is_uint3(reg.code()) && |
+ op.shift_imm_ == 0); |
+} |
+ |
+ |
+// other than 4_1 and 4_5 |
+bool Assembler::fits_thumb16_mode_4(Register reg, const MemOperand& op) { |
+// imm5, rn 3, rt 3 |
+ return (is_uint5(op.offset_) && |
+ are_low_reg(reg, op.rn_)); |
+} |
+ |
+ |
+bool Assembler::fits_thumb16_mode_4_5(Register reg, const MemOperand& op) { |
+// Rt 3, imm8 (note: imm8:'00' so >> 2) |
+ return (is_uint8(op.offset() >> 2) && |
+ op.rn_.code() == 13 && |
+ is_low_reg(reg)); |
+} |
+ |
+ |
+// A6.3.7 see A8-118 immediate |
+// A6.3.7 see A8-124 register |
+// A6.3.7 see A8-122 literal |
+// A6.2.4 see A8-124 reg |
+// A6.2.4 see A8-124 reg |
+// mode_4_2, pass opB // mode4 sets opA , param sets opB |
+void Assembler::ldr_thumb_immediate(Register dst, const MemOperand& src) { |
+ if ((src.offset_ & 3) == 0 && src.am_ == Offset) { |
+ uint32_t offset = src.offset_ >> 2; |
+ if (fits_thumb16_mode_4(dst, src)) { |
+ // Encoding T1 (immediate) - 16 bit // mode_4_2 |
+ // emit16(/* imm5@6 Rn@3 Rt */); // LDR_IMM_1 |
+ emit16(thumb16_mode4_2(LDR_IMM_1) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rn_, offset)); |
+ return; |
+ } else if (fits_thumb16_mode_4_5(dst, src)) { |
+ // Encoding T2 (immediate) - 16 bit // mode_4_5, pass opB |
+ emit16(thumb16_mode4_5(LDR_IMM_2) | |
+ thumb16_lowreg_imm8_encoding(dst, offset)); |
+ return; |
+ } |
+ } |
+ if ((is_uint12(src.offset_) && src.offset_ >= 0 && src.am_ == Offset) || |
+ (src.rn_.code() == 15 && (is_uint12(src.offset_) || |
+ is_uint12(-src.offset_)))) { |
+ // Encoding T3 (immediate) - 32 bit |
+ emit32(thumb32_mode7(LDR_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm12(dst, src)); |
+ return; |
+ } else if (is_uint8(src.offset_) || is_uint8(-src.offset_)) { |
+ // Encoding T4 (immediate) - 32 bit |
+ emit32(thumb32_mode7(LDR_32_IMM4) | |
+ thumb32_2reg_zero_extend_imm8(dst, src)); |
+ return; |
+ } |
+ mov_thumb(ip, Operand(src.offset_), LeaveCC, al); |
+ ldr_thumb(dst, MemOperand(src.rn_, ip, src.am_)); |
+} |
+ |
+ |
+void Assembler::ldr_thumb_register(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4_1(dst, src)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(LDR_REG_1) | thumb16_3lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ emit32(thumb32_mode7(LDR_32_REG) | thumb32_3reg_lsl(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldr_thumb(Register dst, const MemOperand& src) { |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ ldr_thumb_immediate(dst, src) ; |
+ return; |
+ } else { |
+ // Register |
+ ldr_thumb_register(dst, src); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::str_thumb_register(Register src, const MemOperand& dst) { |
+ if (fits_thumb16_mode_4_1(src, dst)) { |
+ // Encoding T1 (register) - 16 bit |
+ // Rm Rn Rt |
+ emit16(thumb16_mode4_1(STR_REG) | thumb16_3lowreg_encoding(src, dst)); |
+ return; |
+ } else if (dst.shift_op_ == LSL && is_uint2(dst.shift_imm_)) { |
+ // Encoding T2 (register) - 32 bit |
+ // Rn Rt imm2 Rm |
+ emit32(thumb32_mode10(STR_32_REG) | thumb32_3reg_lsl(src, dst)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::str_thumb_immediate(Register src, const MemOperand& dst) { |
+ if (((dst.offset_ & 3) == 0) && dst.am_ == Offset) { |
+ // try 16-bit |
+ uint32_t offset = dst.offset_ >> 2; |
+ if (fits_thumb16_mode_4(src, dst)) { |
+ // Encoding T1 (immediate) - 16 bit -NOTE: imm5/Rn/Rt - Rt Is SRC, |
+ // Rn imm5 in MemOp |
+ // mode_4_2, pass opB |
+ // imm5 Rn Rt |
+ emit16(thumb16_mode4_2(STR_IMM_1) | |
+ thumb16_2lowreg_imm5_encoding(src, dst.rn_, offset)); |
+ return; |
+ } else if (fits_thumb16_mode_4_5(src, dst)) { |
+ // Encoding T2 (immediate) - 16 bit |
+ // mode_4_5, pass opB; Rt imm8 |
+ emit16(thumb16_mode4_5(STR_IMM_2) | |
+ thumb16_lowreg_imm8_encoding(src, offset)); |
+ return; |
+ } |
+ } |
+ // try 32-bit |
+ if (is_uint12(dst.offset_) && dst.am_ == Offset) { |
+ // Encoding T3 (immediate) - 32 bit |
+ // Rn Rt imm12 |
+ emit32(thumb32_mode10(STR_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm12(src, dst)); |
+ return; |
+ } else if (is_uint8(dst.offset_) || is_uint8(-dst.offset_)) { |
+ // Encoding T4 (immediate) - 32 bit |
+ // Rn Rt immm8 |
+ emit32(thumb32_mode10(STR_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm8(src, dst)); |
+ return; |
+ } |
+ mov_thumb(ip, Operand(dst.offset_), LeaveCC, al); |
+ str_thumb(src, MemOperand(dst.rn_, ip, dst.am_)); |
+} |
+ |
+ |
+void Assembler::str_thumb(Register src, const MemOperand& dst) { |
+ if (!dst.rm_.is_valid()) { |
+ // Immediate. |
+ str_thumb_immediate(src, dst) ; |
+ return; |
+ } else { |
+ // Register. |
+ str_thumb_register(src, dst) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrb_thumb_immediate(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4(dst, src)) { |
+ emit16(thumb16_mode4_3(LDRB_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rn_, src.offset_)); |
+ return; |
+ } else if (src.offset_ >= 0 && src.am_ == Offset && is_uint12(src.offset_)) { |
+ emit32(thumb32_mode9(LDRB_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm12(dst, src)); |
+ return; |
+ } else if (is_uint8(src.offset_) || is_uint8(-src.offset_)) { |
+ emit32(thumb32_mode9(LDRB_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm8(dst, src)); |
+ return; |
+ } |
+ mov_thumb(ip, Operand(src.offset_), LeaveCC, al); |
+ ldrb_thumb(dst, MemOperand(src.rn_, ip, src.am_)); |
+} |
+ |
+ |
+void Assembler::ldrb_thumb_register(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4_1(dst, src)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(LDRB_REG) | thumb16_3lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ emit32(thumb32_mode9(LDRB_32_REG) | thumb32_3reg_lsl(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrb_thumb(Register dst, const MemOperand& src) { |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ ldrb_thumb_immediate(dst, src) ; |
+ return; |
+ } else { |
+ // Register. |
+ ldrb_thumb_register(dst, src) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::strb_thumb_immediate(Register src, const MemOperand& dst) { |
+ if (fits_thumb16_mode_4(src, dst)) { |
+ emit16(thumb16_mode4_3(STRB_IMM) | |
+ thumb16_2lowreg_imm5_encoding(src, dst.rn_, dst.offset_)); |
+ return; |
+ } else if (dst.offset_ >= 0 && dst.am_ == Offset && is_uint12(dst.offset_)) { |
+ emit32(thumb32_mode10(STRB_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm12(src, dst)); |
+ return; |
+ } else if (is_uint8(dst.offset_) || is_uint8(-dst.offset_)) { |
+ emit32(thumb32_mode10(STRB_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm8(src, dst)); |
+ return; |
+ } |
+ mov_thumb(ip, Operand(dst.offset_), LeaveCC, al); |
+ strb_thumb(src, MemOperand(dst.rn_, ip, dst.am_)); |
+} |
+ |
+ |
+void Assembler::strb_thumb_register(Register src, const MemOperand& dst) { |
+ if (fits_thumb16_mode_4_1(src, dst)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(STRB_REG) | thumb16_3lowreg_encoding(src, dst)); |
+ return; |
+ } else if (dst.shift_op_ == LSR) { |
+ ASSERT(dst.rn_.code() != ip.code()); |
+ add_thumb(ip, dst.rn_, |
+ Operand(dst.rm_, dst.shift_op_, dst.shift_imm_), LeaveCC, al); |
+ strb_thumb(src, MemOperand(ip)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ // Rn Rt imm2 Rm |
+ emit32(thumb32_mode10(STRB_32_REG) | thumb32_3reg_lsl(src, dst)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::strb_thumb(Register src, const MemOperand& dst) { |
+ if (!dst.rm_.is_valid()) { |
+ // Immediate. |
+ strb_thumb_immediate(src, dst) ; |
+ return; |
+ } else { |
+ // Register. |
+ strb_thumb_register(src, dst) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrh_thumb_immediate(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4(dst, src) && (src.offset_ & 1) == 0) { |
+ emit16(thumb16_mode4_4(LDRH_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rn_, (src.offset_ >> 1))); |
+ return; |
+ } else if (src.offset_ >= 0 && src.am_ == Offset) { |
+ emit32(thumb32_mode8(LDRH_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm12(dst, src)); |
+ return; |
+ } else { |
+ emit32(thumb32_mode8(LDRH_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm8(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrh_thumb_register(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4_1(dst, src)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(LDRH_REG) | thumb16_3lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ // Rn Rt imm2 Rm |
+ emit32(thumb32_mode8(LDRH_32_REG) | thumb32_3reg_lsl(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrh_thumb(Register dst, const MemOperand& src) { |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ ldrh_thumb_immediate(dst, src) ; |
+ return; |
+ } else { |
+ // Register. |
+ ldrh_thumb_register(dst, src) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::strh_thumb_immediate(Register src, const MemOperand& dst) { |
+ if (fits_thumb16_mode_4(src, dst) && (dst.offset_ & 1) == 0) { |
+ emit16(thumb16_mode4_4(STRH_IMM) | |
+ thumb16_2lowreg_imm5_encoding(src, dst.rn_, (dst.offset_ >> 1))); |
+ return; |
+ } else if (dst.offset_ >= 0 && dst.am_ == Offset) { |
+ emit32(thumb32_mode10(STRH_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm12(src, dst)); |
+ return; |
+ } else { |
+ emit32(thumb32_mode10(STRH_32_IMM3) | |
+ thumb32_2reg_zero_extend_imm8(src, dst)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::strh_thumb_register(Register src, const MemOperand& dst) { |
+ if (fits_thumb16_mode_4_1(src, dst)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(STRH_REG) | thumb16_3lowreg_encoding(src, dst)); |
+ return; |
+ |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ // Rn Rt imm2 Rm |
+ emit32(thumb32_mode10(STRH_32_REG) | thumb32_3reg_lsl(src, dst)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::strh_thumb(Register src, const MemOperand& dst) { |
+ if (!dst.rm_.is_valid()) { |
+ // Immediate. |
+ strh_thumb_immediate(src, dst) ; |
+ return; |
+ } else { |
+ // Register. |
+ strh_thumb_register(src, dst) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsb_thumb_immediate(Register dst, const MemOperand& src) { |
+ if (src.offset_ >= 0 && src.am_ == Offset) { |
+ // T1 - Rn Rt imm12 ZeroX, no neg offset |
+ emit32(thumb32_mode9(LDRSB_32_IMM1) | |
+ thumb32_2reg_zero_extend_imm12(dst, src)); |
+ return; |
+ } else { |
+ // T2 - Rn Rt PUW imm8 |
+ emit32(thumb32_mode9(LDRSB_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm8(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsb_thumb_register(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4_1(dst, src)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(LDRSB_REG) | thumb16_3lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ emit32(thumb32_mode9(LDRSB_32_REG) | thumb32_3reg_lsl(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsb_thumb(Register dst, const MemOperand& src) { |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ ldrsb_thumb_immediate(dst, src) ; |
+ return; |
+ } else { |
+ // Register |
+ ldrsb_thumb_register(dst, src) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsh_thumb(Register dst, const MemOperand& src) { |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ ldrsh_thumb_immediate(dst, src) ; |
+ return; |
+ } else { |
+ // Register. |
+ ldrsh_thumb_register(dst, src) ; |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsh_thumb_immediate(Register dst, const MemOperand& src) { |
+ // T1 & T2 both 32 bit |
+ if (src.offset_ >= 0 && src.am_ == Offset) { |
+ emit32(thumb32_mode8(LDRSH_32_IMM1) | |
+ thumb32_2reg_zero_extend_imm12(dst, src)); |
+ return; |
+ } else { |
+ emit32(thumb32_mode8(LDRSH_32_IMM2) | |
+ thumb32_2reg_zero_extend_imm8(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ldrsh_thumb_register(Register dst, const MemOperand& src) { |
+ if (fits_thumb16_mode_4_1(dst, src)) { |
+ // Encoding T1 (register) - 16 bit |
+ emit16(thumb16_mode4_1(LDRSH_REG) | thumb16_3lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ // Encoding T2 (register) - 32 bit |
+ emit32(thumb32_mode8(LDRSH_32_REG) | thumb32_3reg_lsl(dst, src)); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::mla_thumb(Register dst, Register src1, Register src2, |
+ Register srcA, SBit s, Condition cond) { |
+ ASSERT(cond == al && s == LeaveCC); |
+ emit32(thumb32_mode16(MLA_32) | |
+ thumb32_4reg(dst, src1, src2, srcA)); |
+} |
+ |
+ |
+void Assembler::mls_thumb(Register dst, Register src1, Register src2, |
+ Register srcA, Condition cond) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode16(MLS_32) | |
+ thumb32_4reg(dst, src1, src2, srcA)); |
+} |
+ |
+ |
+void Assembler::sdiv_thumb(Register dst, Register src1, Register src2, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode17(SDIV_32) | thumb32_4reg(dst, src1, src2, pc) | |
+ B7 | B6 | B5 | B4); |
+} |
+ |
+ |
+void Assembler::smlal_thumb(Register dstL, Register dstH, Register src1, |
+ Register src2, SBit s, Condition cond) { |
+ ASSERT(cond == al && s == LeaveCC); |
+ ASSERT(dstL.code() != dstH.code()); |
+ emit32(thumb32_mode17(SMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
+} |
+ |
+ |
+void Assembler::smull_thumb(Register dstL, Register dstH, Register src1, |
+ Register src2, SBit s, Condition cond) { |
+ ASSERT(cond == al && s == LeaveCC); |
+ ASSERT(dstL.code() != dstH.code()); |
+ emit32(thumb32_mode17(SMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
+} |
+ |
+ |
+void Assembler::umlal_thumb(Register dstL, Register dstH, Register src1, |
+ Register src2, SBit s, Condition cond) { |
+ ASSERT(cond == al && s == LeaveCC); |
+ ASSERT(dstL.code() != dstH.code()); |
+ emit32(thumb32_mode17(UMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
+} |
+ |
+ |
+void Assembler::umull_thumb(Register dstL, Register dstH, Register src1, |
+ Register src2, SBit s, Condition cond) { |
+ ASSERT(cond == al && s == LeaveCC); |
+ ASSERT(dstL.code() != dstH.code()); |
+ emit32(thumb32_mode17(UMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
+} |
+ |
+ |
+void Assembler::ldm_thumb(BlockAddrMode am, |
+ Register base, |
+ RegList dst, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ ASSERT((((am & P) >> 24) & 1) != (((am & U) >> 23) & 1)); |
+ if (is_uint3(base.code()) && is_uint8(dst)) { |
+ if ((((am & W) >> 21) & 1) ^ ((dst >> base.code()) & 1)) { |
+ emit16(B15 | B14 | B11 | base.code()*B8 | (dst & 0xff)); |
+ return; |
+ } |
+ } |
+ ASSERT((dst & sp.bit()) == 0); |
+ emit32(thumb32_mode5() | am | L | base.code()*BH0 | dst); |
+} |
+ |
+ |
+void Assembler::stm_thumb(BlockAddrMode am, |
+ Register base, |
+ RegList dst, |
+ Condition cond) { |
+ ASSERT(dst != 0); |
+ ASSERT(cond == al); |
+ ASSERT((((am & P) >> 24) & 1) != (((am & U) >> 23) & 1)); |
+ if (is_uint3(base.code()) && is_uint8(dst) && am == ia_w) { |
+ emit16(B15 | B14 | base.code()*B8 | dst); |
+ return; |
+ } |
+ ASSERT((dst & sp.bit()) == 0 && (dst & pc.bit()) == 0); |
+ emit32(thumb32_mode5() | am | base.code()*BH0 | dst); |
+} |
+ |
+ |
+void Assembler::movt_thumb(Register reg, uint32_t immediate, Condition cond) { |
+ emit32(thumb32_mode3(MOVT_32_IMM) | |
+ thumb32_1reg_zero_extend_imm_split_4i38(reg, immediate)); |
+} |
+ |
+ |
+void Assembler::ldr_pc_thumb(Register dst, const Operand& src) { |
+ RecordRelocInfo(src.rmode_, src.imm32_, USE_CONSTANT_POOL); |
+ ldr_thumb(dst, MemOperand(pc, 0)); |
+} |
+ |
} } // namespace v8::internal |
#endif // V8_TARGET_ARCH_ARM |