Index: src/arm/assembler-arm.cc |
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc |
index fc33b660011afc7731c5136a4d9a7720bd96353f..2b6f416e8506f6d312c108901ebdd10d4fe8402b 100644 |
--- a/src/arm/assembler-arm.cc |
+++ b/src/arm/assembler-arm.cc |
@@ -510,11 +510,14 @@ const Instr kLdrStrInstrTypeMask = 0xffff0000; |
const Instr kLdrStrInstrArgumentMask = 0x0000ffff; |
const Instr kLdrStrOffsetMask = 0x00000fff; |
+const Instr kThumbLdrPCMask = BH15 | 7 * BH12 | 15 * BH8 | 7 * BH4 | 15 * BH0; |
+const Instr kThumbLdrPCPattern = BH15 | 7 * BH12 | BH11 | BH6 | BH4 | 15 * BH0; |
Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
: AssemblerBase(isolate, buffer, buffer_size), |
recorded_ast_id_(TypeFeedbackId::None()), |
- positions_recorder_(this) { |
+ positions_recorder_(this), |
+ thumb_mode_(false) { |
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
num_pending_reloc_info_ = 0; |
num_pending_64_bit_reloc_info_ = 0; |
@@ -775,6 +778,10 @@ int Assembler::GetCmpImmediateRawImmediate(Instr instr) { |
int Assembler::target_at(int pos) { |
+ if (is_thumb_mode()) { |
+ return target_at_thumb(pos); |
+ } |
+ |
Instr instr = instr_at(pos); |
if (is_uint24(instr)) { |
// Emitted link to a label, not part of a branch. |
@@ -792,6 +799,11 @@ int Assembler::target_at(int pos) { |
void Assembler::target_at_put(int pos, int target_pos) { |
+ if (is_thumb_mode()) { |
+ target_at_put_thumb(pos, target_pos); |
+ return; |
+ } |
+ |
Instr instr = instr_at(pos); |
if (is_uint24(instr)) { |
ASSERT(target_pos == pos || target_pos >= 0); |
@@ -1313,12 +1325,20 @@ int Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { |
// Block the emission of the constant pool, since the branch instruction must |
// be emitted at the pc offset recorded by the label. |
BlockConstPoolFor(1); |
+ if (is_thumb_mode()) { |
+ return target_pos - (pc_offset() + kThumbPcLoadDelta); |
+ } |
return target_pos - (pc_offset() + kPcLoadDelta); |
} |
// Branch instructions. |
void Assembler::b(int branch_offset, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT((branch_offset & 1) == 0); |
+ b_thumb(branch_offset, cond); |
+ return; |
+ } |
ASSERT((branch_offset & 3) == 0); |
int imm24 = branch_offset >> 2; |
ASSERT(is_int24(imm24)); |
@@ -1333,6 +1353,13 @@ void Assembler::b(int branch_offset, Condition cond) { |
void Assembler::bl(int branch_offset, Condition cond) { |
positions_recorder()->WriteRecordedPositions(); |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ASSERT((branch_offset & 1) == 0); |
+ int imm = branch_offset >> 1; |
+ emit32(thumb32_mode4(BL_32_IMM) | thumb32_sign_extend_imm24(imm)); |
+ return; |
+ } |
ASSERT((branch_offset & 3) == 0); |
int imm24 = branch_offset >> 2; |
ASSERT(is_int24(imm24)); |
@@ -1342,6 +1369,12 @@ void Assembler::bl(int branch_offset, Condition cond) { |
void Assembler::blx(int branch_offset) { // v5 and above |
positions_recorder()->WriteRecordedPositions(); |
+ if (is_thumb_mode()) { |
+ ASSERT((branch_offset & 3) == 0); |
+ int imm = branch_offset >> 1; |
+ emit32(thumb32_mode4(BLX_32_IMM) | thumb32_sign_extend_imm24(imm)); |
+ return; |
+ } |
ASSERT((branch_offset & 1) == 0); |
int h = ((branch_offset & 2) >> 1)*B24; |
int imm24 = branch_offset >> 2; |
@@ -1353,6 +1386,11 @@ void Assembler::blx(int branch_offset) { // v5 and above |
void Assembler::blx(Register target, Condition cond) { // v5 and above |
positions_recorder()->WriteRecordedPositions(); |
ASSERT(!target.is(pc)); |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit16(thumb16_mode3(BLX_REG) | thumb16_anyreg_encoding(target)); |
+ return; |
+ } |
emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | target.code()); |
} |
@@ -1360,6 +1398,11 @@ void Assembler::blx(Register target, Condition cond) { // v5 and above |
void Assembler::bx(Register target, Condition cond) { // v5 and above, plus v4t |
positions_recorder()->WriteRecordedPositions(); |
ASSERT(!target.is(pc)); // use of pc is actually allowed, but discouraged |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit16(thumb16_mode3(BX_REG) | thumb16_anyreg_encoding(target)); |
+ return; |
+ } |
emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code()); |
} |
@@ -1368,42 +1411,70 @@ void Assembler::bx(Register target, Condition cond) { // v5 and above, plus v4t |
void Assembler::and_(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ and_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | AND | s, src1, dst, src2); |
} |
void Assembler::eor(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ eor_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | EOR | s, src1, dst, src2); |
} |
void Assembler::sub(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ sub_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | SUB | s, src1, dst, src2); |
} |
void Assembler::rsb(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ rsb_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | RSB | s, src1, dst, src2); |
} |
void Assembler::add(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ add_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | ADD | s, src1, dst, src2); |
} |
void Assembler::adc(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ adc_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | ADC | s, src1, dst, src2); |
} |
void Assembler::sbc(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ sbc_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | SBC | s, src1, dst, src2); |
} |
@@ -1415,16 +1486,28 @@ void Assembler::rsc(Register dst, Register src1, const Operand& src2, |
void Assembler::tst(Register src1, const Operand& src2, Condition cond) { |
+ if (is_thumb_mode()) { |
+ tst_thumb(src1, src2, cond); |
+ return; |
+ } |
addrmod1(cond | TST | S, src1, r0, src2); |
} |
void Assembler::teq(Register src1, const Operand& src2, Condition cond) { |
+ if (is_thumb_mode()) { |
+ teq_thumb(src1, src2, cond); |
+ return; |
+ } |
addrmod1(cond | TEQ | S, src1, r0, src2); |
} |
void Assembler::cmp(Register src1, const Operand& src2, Condition cond) { |
+ if (is_thumb_mode()) { |
+ cmp_thumb(src1, src2, cond); |
+ return; |
+ } |
addrmod1(cond | CMP | S, src1, r0, src2); |
} |
@@ -1437,12 +1520,20 @@ void Assembler::cmp_raw_immediate( |
void Assembler::cmn(Register src1, const Operand& src2, Condition cond) { |
+ if (is_thumb_mode()) { |
+ cmn_thumb(src1, src2, cond); |
+ return; |
+ } |
addrmod1(cond | CMN | S, src1, r0, src2); |
} |
void Assembler::orr(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ orr_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | ORR | s, src1, dst, src2); |
} |
@@ -1455,6 +1546,10 @@ void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) { |
// the mov instruction. They must be generated using nop(int/NopMarkerTypes) |
// or MarkCode(int/NopMarkerTypes) pseudo instructions. |
ASSERT(!(src.is_reg() && src.rm().is(dst) && s == LeaveCC && cond == al)); |
+ if (is_thumb_mode()) { |
+ mov_thumb(dst, src, s, cond); |
+ return; |
+ } |
addrmod1(cond | MOV | s, r0, dst, src); |
} |
@@ -1508,17 +1603,29 @@ void Assembler::movw(Register reg, uint32_t immediate, Condition cond) { |
void Assembler::movt(Register reg, uint32_t immediate, Condition cond) { |
+ if (is_thumb_mode()) { |
+ movt_thumb(reg, immediate, cond); |
+ return; |
+ } |
emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate)); |
} |
void Assembler::bic(Register dst, Register src1, const Operand& src2, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ bic_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
addrmod1(cond | BIC | s, src1, dst, src2); |
} |
void Assembler::mvn(Register dst, const Operand& src, SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ mvn_thumb(dst, src, s, cond); |
+ return; |
+ } |
addrmod1(cond | MVN | s, r0, dst, src); |
} |
@@ -1526,6 +1633,10 @@ void Assembler::mvn(Register dst, const Operand& src, SBit s, Condition cond) { |
// Multiply instructions. |
void Assembler::mla(Register dst, Register src1, Register src2, Register srcA, |
SBit s, Condition cond) { |
+ if (is_thumb_mode()) { |
+ mla_thumb(dst, src1, src2, srcA, s, cond); |
+ return; |
+ } |
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); |
emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 | |
src2.code()*B8 | B7 | B4 | src1.code()); |
@@ -1534,6 +1645,10 @@ void Assembler::mla(Register dst, Register src1, Register src2, Register srcA, |
void Assembler::mls(Register dst, Register src1, Register src2, Register srcA, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ mls_thumb(dst, src1, src2, srcA, cond); |
+ return; |
+ } |
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); |
emit(cond | B22 | B21 | dst.code()*B16 | srcA.code()*B12 | |
src2.code()*B8 | B7 | B4 | src1.code()); |
@@ -1542,6 +1657,10 @@ void Assembler::mls(Register dst, Register src1, Register src2, Register srcA, |
void Assembler::sdiv(Register dst, Register src1, Register src2, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ sdiv_thumb(dst, src1, src2, cond); |
+ return; |
+ } |
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); |
ASSERT(IsEnabled(SUDIV)); |
emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 | |
@@ -1552,6 +1671,10 @@ void Assembler::sdiv(Register dst, Register src1, Register src2, |
void Assembler::mul(Register dst, Register src1, Register src2, |
SBit s, Condition cond) { |
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); |
+ if (is_thumb_mode()) { |
+ mul_thumb(dst, src1, src2, s, cond); |
+ return; |
+ } |
// dst goes in bits 16-19 for this instruction! |
emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code()); |
} |
@@ -1563,6 +1686,10 @@ void Assembler::smlal(Register dstL, |
Register src2, |
SBit s, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ smlal_thumb(dstL, dstH, src1, src2, s, cond); |
+ return; |
+ } |
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
ASSERT(!dstL.is(dstH)); |
emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 | |
@@ -1576,6 +1703,10 @@ void Assembler::smull(Register dstL, |
Register src2, |
SBit s, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ smull_thumb(dstL, dstH, src1, src2, s, cond); |
+ return; |
+ } |
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
ASSERT(!dstL.is(dstH)); |
emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 | |
@@ -1589,6 +1720,10 @@ void Assembler::umlal(Register dstL, |
Register src2, |
SBit s, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ umlal_thumb(dstL, dstH, src1, src2, s, cond); |
+ return; |
+ } |
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
ASSERT(!dstL.is(dstH)); |
emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 | |
@@ -1602,6 +1737,10 @@ void Assembler::umull(Register dstL, |
Register src2, |
SBit s, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ umull_thumb(dstL, dstH, src1, src2, s, cond); |
+ return; |
+ } |
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
ASSERT(!dstL.is(dstH)); |
emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 | |
@@ -1613,6 +1752,14 @@ void Assembler::umull(Register dstL, |
void Assembler::clz(Register dst, Register src, Condition cond) { |
// v5 and above. |
ASSERT(!dst.is(pc) && !src.is(pc)); |
+ |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(BH15 | BH14 | BH13 | BH12 | BH11 | BH9 | BH7 | src.code()*BH0 | |
+ B15 | B14 | B13 | B12 | dst.code()*B8 | B7 | src.code()); |
+ return; |
+ } |
+ |
emit(cond | B24 | B22 | B21 | 15*B16 | dst.code()*B12 | |
15*B8 | CLZ | src.code()); |
} |
@@ -1637,6 +1784,13 @@ void Assembler::usat(Register dst, |
sh = 1; |
} |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode3(USAT_32_IMM) | sh*BH5 | |
+ thumb32_bit_field(src.rm_, dst, src.shift_imm_, satpos)); |
+ return; |
+ } |
+ |
emit(cond | 0x6*B24 | 0xe*B20 | satpos*B16 | dst.code()*B12 | |
src.shift_imm_*B7 | sh*B6 | 0x1*B4 | src.rm_.code()); |
} |
@@ -1658,6 +1812,14 @@ void Assembler::ubfx(Register dst, |
ASSERT(!dst.is(pc) && !src.is(pc)); |
ASSERT((lsb >= 0) && (lsb <= 31)); |
ASSERT((width >= 1) && (width <= (32 - lsb))); |
+ |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode3(UBFX_32_IMM) | |
+ thumb32_bit_field(src, dst, lsb, width - 1)); |
+ return; |
+ } |
+ |
emit(cond | 0xf*B23 | B22 | B21 | (width - 1)*B16 | dst.code()*B12 | |
lsb*B7 | B6 | B4 | src.code()); |
} |
@@ -1678,6 +1840,14 @@ void Assembler::sbfx(Register dst, |
ASSERT(!dst.is(pc) && !src.is(pc)); |
ASSERT((lsb >= 0) && (lsb <= 31)); |
ASSERT((width >= 1) && (width <= (32 - lsb))); |
+ |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode3(SBFX_32_IMM) | |
+ thumb32_bit_field(src, dst, lsb, width - 1)); |
+ return; |
+ } |
+ |
emit(cond | 0xf*B23 | B21 | (width - 1)*B16 | dst.code()*B12 | |
lsb*B7 | B6 | B4 | src.code()); |
} |
@@ -1694,6 +1864,13 @@ void Assembler::bfc(Register dst, int lsb, int width, Condition cond) { |
ASSERT((lsb >= 0) && (lsb <= 31)); |
ASSERT((width >= 1) && (width <= (32 - lsb))); |
int msb = lsb + width - 1; |
+ |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode3(BFC_32_IMM) | thumb32_bit_field(pc, dst, lsb, msb)); |
+ return; |
+ } |
+ |
emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 | 0xf); |
} |
@@ -1713,6 +1890,13 @@ void Assembler::bfi(Register dst, |
ASSERT((lsb >= 0) && (lsb <= 31)); |
ASSERT((width >= 1) && (width <= (32 - lsb))); |
int msb = lsb + width - 1; |
+ |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ emit32(thumb32_mode3(BFI_32_IMM) | thumb32_bit_field(src, dst, lsb, msb)); |
+ return; |
+ } |
+ |
emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 | |
src.code()); |
} |
@@ -1856,41 +2040,82 @@ void Assembler::ldr(Register dst, const MemOperand& src, Condition cond) { |
if (dst.is(pc)) { |
positions_recorder()->WriteRecordedPositions(); |
} |
+ |
+ if (is_thumb_mode()) { |
+ ldr_thumb(dst, src); |
+ return; |
+ } |
+ |
addrmod2(cond | B26 | L, dst, src); |
} |
void Assembler::str(Register src, const MemOperand& dst, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ str_thumb(src, dst); |
+ return; |
+ } |
addrmod2(cond | B26, src, dst); |
} |
void Assembler::ldrb(Register dst, const MemOperand& src, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ldrb_thumb(dst, src); |
+ return; |
+ } |
addrmod2(cond | B26 | B | L, dst, src); |
} |
void Assembler::strb(Register src, const MemOperand& dst, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ strb_thumb(src, dst); |
+ return; |
+ } |
addrmod2(cond | B26 | B, src, dst); |
} |
void Assembler::ldrh(Register dst, const MemOperand& src, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ldrh_thumb(dst, src); |
+ return; |
+ } |
addrmod3(cond | L | B7 | H | B4, dst, src); |
} |
void Assembler::strh(Register src, const MemOperand& dst, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ strh_thumb(src, dst); |
+ return; |
+ } |
addrmod3(cond | B7 | H | B4, src, dst); |
} |
void Assembler::ldrsb(Register dst, const MemOperand& src, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ldrsb_thumb(dst, src); |
+ return; |
+ } |
addrmod3(cond | L | B7 | S6 | B4, dst, src); |
} |
void Assembler::ldrsh(Register dst, const MemOperand& src, Condition cond) { |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ldrsh_thumb(dst, src); |
+ return; |
+ } |
addrmod3(cond | L | B7 | S6 | H | B4, dst, src); |
} |
@@ -1901,6 +2126,12 @@ void Assembler::ldrd(Register dst1, Register dst2, |
ASSERT(src.rm().is(no_reg)); |
ASSERT(!dst1.is(lr)); // r14. |
ASSERT_EQ(0, dst1.code() % 2); |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ASSERT(!src.rm_.is_valid()); // Immediate. |
+ ldrd_imm_t1(dst1, dst2, src); |
+ return; |
+ } |
ASSERT_EQ(dst1.code() + 1, dst2.code()); |
addrmod3(cond | B7 | B6 | B4, dst1, src); |
} |
@@ -1911,8 +2142,14 @@ void Assembler::strd(Register src1, Register src2, |
ASSERT(dst.rm().is(no_reg)); |
ASSERT(!src1.is(lr)); // r14. |
ASSERT_EQ(0, src1.code() % 2); |
- ASSERT_EQ(src1.code() + 1, src2.code()); |
ASSERT(IsEnabled(ARMv7)); |
+ if (is_thumb_mode()) { |
+ ASSERT(cond == al); |
+ ASSERT(!dst.rm_.is_valid()); // Immediate. |
+ strd_imm_t1(src1, src2, dst); |
+ return; |
+ } |
+ ASSERT_EQ(src1.code() + 1, src2.code()); |
addrmod3(cond | B7 | B6 | B5 | B4, src1, dst); |
} |
@@ -1943,7 +2180,10 @@ void Assembler::ldm(BlockAddrMode am, |
Condition cond) { |
// ABI stack constraint: ldmxx base, {..sp..} base != sp is not restartable. |
ASSERT(base.is(sp) || (dst & sp.bit()) == 0); |
- |
+ if (is_thumb_mode()) { |
+ ldm_thumb(am, base, dst, cond); |
+ return; |
+ } |
addrmod4(cond | B27 | am | L, base, dst); |
// Emit the constant pool after a function return implemented by ldm ..{..pc}. |
@@ -1962,6 +2202,10 @@ void Assembler::stm(BlockAddrMode am, |
Register base, |
RegList src, |
Condition cond) { |
+ if (is_thumb_mode()) { |
+ stm_thumb(am, base, src, cond); |
+ return; |
+ } |
addrmod4(cond | B27 | am, base, src); |
} |
@@ -1997,6 +2241,11 @@ void Assembler::stop(const char* msg, Condition cond, int32_t code) { |
void Assembler::bkpt(uint32_t imm16) { // v5 and above |
+ if (is_thumb_mode()) { |
+ ASSERT(is_uint8(imm16)); |
+ emit16(B15 | B13 | B12 | B11 | B10 | B9 | (imm16 & 0xFF)); |
+ return; |
+ } |
ASSERT(is_uint16(imm16)); |
emit(al | B24 | B21 | (imm16 >> 4)*B8 | BKPT | (imm16 & 0xf)); |
} |
@@ -2539,6 +2788,12 @@ void Assembler::vmov(const DwVfpRegister dst, |
ASSERT(index.index == 0 || index.index == 1); |
int vd, d; |
dst.split_code(&vd, &d); |
+ if (is_thumb_mode() && cond != al) { |
+ it_thumb(cond, 1); |
+ emit(al | 0xE*B24 | index.index*B21 | vd*B16 | src.code()*B12 | 0xB*B8 | |
+ d*B7 | B4); |
+ return; |
+ } |
emit(cond | 0xE*B24 | index.index*B21 | vd*B16 | src.code()*B12 | 0xB*B8 | |
d*B7 | B4); |
} |
@@ -2816,7 +3071,12 @@ void Assembler::vneg(const DwVfpRegister dst, |
dst.split_code(&vd, &d); |
int vm, m; |
src.split_code(&vm, &m); |
- |
+ if (is_thumb_mode() && cond != al) { |
+ it_thumb(cond, 1); |
+ emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | B16 | vd*B12 | 0x5*B9 | B8 | B6 | |
+ m*B5 | vm); |
+ return; |
+ } |
emit(cond | 0x1D*B23 | d*B22 | 0x3*B20 | B16 | vd*B12 | 0x5*B9 | B8 | B6 | |
m*B5 | vm); |
} |
@@ -3068,6 +3328,11 @@ void Assembler::nop(int type) { |
// We therefore use MOV Rx, Rx, even on newer CPUs, and use Rx to encode |
// a type. |
ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. |
+ if (is_thumb_mode()) { |
+ uint16_t d = type >> 3; |
+ emit16(4*B12 | 6*B8 | d*B7 | type*B3 | (type & 7)); |
+ return; |
+ } |
emit(al | 13*B21 | type*B12 | type); |
} |
@@ -3088,6 +3353,16 @@ bool Assembler::IsMovW(Instr instr) { |
} |
+bool Assembler::IsMovTThumb(Instr instr) { |
+ return ((instr & ~MOVW_THUMB_IMM_MASK) == (MOVW_THUMB_MASK | BH7)); |
+} |
+ |
+ |
+bool Assembler::IsMovWThumb(Instr instr) { |
+ return ((instr & ~MOVW_THUMB_IMM_MASK) == MOVW_THUMB_MASK); |
+} |
+ |
+ |
bool Assembler::IsNop(Instr instr, int type) { |
ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. |
// Check for mov rx, rx where x = type. |
@@ -3379,8 +3654,13 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { |
// Put down constant pool marker "Undefined instruction". |
// The data size helps disassembly know what to print. |
- emit(kConstantPoolMarker | |
- EncodeConstantPoolLength(size_after_marker / kPointerSize)); |
+ if (is_thumb_mode()) { |
+ emit32(kConstantPoolMarker | |
+ EncodeConstantPoolLength(size_after_marker / kPointerSize)); |
+ } else { |
+ emit(kConstantPoolMarker | |
+ EncodeConstantPoolLength(size_after_marker / kPointerSize)); |
+ } |
if (require_64_bit_align) { |
emit(kConstantPoolMarker); |
@@ -3428,6 +3708,23 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { |
continue; |
} |
+ if (is_thumb_mode()) { |
+ Instr instr = thumb32_instr_at(rinfo.pc()); |
+ int thumb_ldr_delta = 2; |
+ if ((reinterpret_cast<int>(rinfo.pc()) & 3) == 0) { |
+ thumb_ldr_delta = 4; |
+ } |
+ int delta = pc_ - rinfo.pc() - thumb_ldr_delta; |
+ ASSERT(is_uint12(delta)); |
+ instr &= ~kOff12Mask; |
+ instr |= delta; |
+ thumb32_instr_at_put(rinfo.pc(), instr); |
+ set_arm_mode(); |
+ emit(rinfo.data()); |
+ set_thumb_mode(); |
+ continue; |
+ } |
+ |
Instr instr = instr_at(rinfo.pc()); |
// 64-bit loads shouldn't get here. |