Index: src/arm/assembler-thumb.cc |
diff --git a/src/arm/assembler-thumb.cc b/src/arm/assembler-thumb.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..8d07222097add7255f23e55ffbacd1a5555e2ab4 |
--- /dev/null |
+++ b/src/arm/assembler-thumb.cc |
@@ -0,0 +1,713 @@ |
+// Copyright 2013 the V8 project authors. All rights reserved. |
+// Redistribution and use in source and binary forms, with or without |
+// modification, are permitted provided that the following conditions are |
+// met: |
+// |
+// * Redistributions of source code must retain the above copyright |
+// notice, this list of conditions and the following disclaimer. |
+// * Redistributions in binary form must reproduce the above |
+// copyright notice, this list of conditions and the following |
+// disclaimer in the documentation and/or other materials provided |
+// with the distribution. |
+// * Neither the name of Google Inc. nor the names of its |
+// contributors may be used to endorse or promote products derived |
+// from this software without specific prior written permission. |
+// |
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
+ |
+#include "v8.h" |
+ |
+#if defined(V8_TARGET_ARCH_ARM) |
+ |
+#include "arm/assembler-arm-inl.h" |
+#include "serialize.h" |
+ |
+namespace v8 { |
+namespace internal { |
+ |
+void Assembler::add_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ if (s == LeaveCC) { |
+ if (is_uint12(src2.imm32_)) { |
+ add_imm_t4(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } else { |
+ if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
+ emit16(thumb16_mode1(ADD_IMM_1) | |
+ thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
+ return; |
+ } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { |
+ emit16(thumb16_mode1(ADD_IMM_2) | |
+ thumb16_lowreg_imm8_encoding(dst, src2)); |
+ return; |
+ } |
+ } |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ add_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ // Immediate - too big for 1 thumb instruction |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ add_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else if (src2.shift_imm_ == 0) { |
+ // Register. |
+ if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { |
+ emit16(thumb16_mode1(ADD_REG_1) | |
+ thumb16_3lowreg_encoding(dst, src1, src2)); |
+ return; |
+ } else if (s == LeaveCC && dst.code() == src1.code()) { |
+ emit16(thumb16_mode3(ADD_REG_2) | |
+ thumb16_2anyreg_encoding(dst, src2)); |
+ return; |
+ } |
+ } |
+ add_reg_t3(dst, src1, src2, s, cond); |
+} |
+ |
+ |
+void Assembler::sub_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ emit_it(cond); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ if (s == LeaveCC) { |
+ if (is_uint12(src2.imm32_)) { |
+ sub_imm_t4(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } else { |
+ if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
+ emit16(thumb16_mode1(SUB_IMM_1) | |
+ thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
+ return; |
+ } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { |
+ emit16(thumb16_mode1(SUB_IMM_2) | |
+ thumb16_lowreg_imm8_encoding(dst, src2)); |
+ return; |
+ } |
+ } |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ sub_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ ASSERT(cond == al); |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ sub_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { |
+ emit16(thumb16_mode1(SUB_REG) | |
+ thumb16_3lowreg_encoding(dst, src1, src2)); |
+ return; |
+ } else { |
+ sub_reg_t3(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::mov_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ emit_it(cond); |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ if (is_uint8(src.imm32_) && is_low_reg(dst) && s == SetCC) { |
+ emit16(thumb16_mode1(MOV_IMM) | |
+ thumb16_lowreg_imm8_encoding(dst, src)); |
+ return; |
+ } else { |
+ if (is_uint16(src.imm32_) && s == LeaveCC) { |
+ mov_imm_t3(dst, src, s, cond); |
+ return; |
+ } else { |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8) && |
+ !src.must_output_reloc_info(this)) { |
+ mov_imm_t2(dst, s, cond, i, imm3, imm8); |
+ return; |
+ } else { |
+ // TODO(rkrithiv): perform 32-bit imm move |
+ UNREACHABLE(); |
+ return; |
+ } |
+ } |
+ } |
+ } else { |
+ // Register. |
+ if (src.rs_.is_valid() || (!src.rs_.is_valid() && src.shift_imm_ != 0)) { |
+ switch (src.shift_op_) { |
+ case LSL: lsl_thumb(dst, src, s, cond); |
+ return; |
+ case LSR: lsr_thumb(dst, src, s, cond); |
+ return; |
+ case ASR: asr_thumb(dst, src, s, cond); |
+ return; |
+ case ROR: ror_thumb(dst, src, s, cond); |
+ return; |
+ case RRX: |
+ default: UNREACHABLE(); |
+ } |
+ return; |
+ } |
+ if (s == LeaveCC) { |
+ emit16(thumb16_mode3(MOV_REG_1) | |
+ thumb16_2anyreg_encoding(dst, src)); |
+ return; |
+ } else if (are_low_reg(dst, src.rm_)) { |
+ // Note: MOV_REG_2 is 0, so call not needed |
+ emit16(thumb16_2lowreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ mov_reg_t3(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::teq_thumb(Register dst, const Operand& src, Condition cond) { |
+ emit_it(cond); |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
+ teq_imm_t1(dst, cond, i, imm3, imm8); |
+ return; |
+ } else { |
+ pc_ -= kInstr16Size; |
+ mov_thumb(ip, src, LeaveCC, cond); |
+ emit_it(cond); |
+ teq_thumb(dst, Operand(ip), cond); |
+ return; |
+ } |
+ } else if (src.shift_imm_ == 0) { |
+ // Register. |
+ teq_reg_t1(dst, src, cond); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::cmp_thumb(Register dst, const Operand& src, Condition cond) { |
+ emit_it(cond); |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ if (is_uint8(src.imm32_) && is_low_reg(dst)) { |
+ emit16(thumb16_mode1(CMP_IMM) | |
+ thumb16_lowreg_imm8_encoding(dst, src)); |
+ return; |
+ } else { |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
+ cmp_imm_t2(dst, cond, i, imm3, imm8); |
+ return; |
+ } else { |
+ mov_thumb(ip, src, LeaveCC, al); |
+ cmp_thumb(dst, Operand(ip), al); |
+ return; |
+ } |
+ } |
+ } else if (src.shift_imm_ == 0) { |
+ // Register. |
+ if (are_low_reg(dst, src.rm_)) { |
+ emit16(thumb16_mode2(CMP_REG_1) | |
+ thumb16_2lowreg_encoding(dst, src)); |
+ return; |
+ } else if ((dst.code() > 7 || src.rm_.code() > 7) && |
+ (dst.code() != 15 && src.rm_.code() != 15)) { |
+ emit16(thumb16_mode3(CMP_REG_2) | |
+ thumb16_2anyreg_encoding(dst, src)); |
+ return; |
+ } else { // DecodeImmShift(type, imm3:imm2) |
+ cmp_reg_t3(dst, src, cond); |
+ return; |
+ } |
+ } else { // (src.shift_imm_ != 0) |
+ // Register. |
+ cmp_reg_t3(dst, src, cond); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::lsl_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src.rs_.is_valid()) { |
+ if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
+ emit16(thumb16_mode1(LSL_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
+ return; |
+ } else { |
+ lsl_imm_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } else { |
+ // Register src{rm rs, shift_imm} |
+ if (s == SetCC && dst.code() == src.rm_.code() && |
+ are_low_reg(dst, src.rs_)) { |
+ // Register 16 |
+ emit16(thumb16_mode2(LSL_REG) | |
+ thumb16_2lowreg_encoding(dst, src.rs_)); |
+ return; |
+ } else { |
+ // Register 32 |
+ lsl_reg_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::lsr_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src.rs_.is_valid()) { |
+ // Immediate |
+ if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
+ // Immediate 16 |
+ emit16(thumb16_mode1(LSR_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
+ return; |
+ } else { |
+ // Immediate 32 |
+ lsr_imm_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } else { |
+ if (s == SetCC && dst.code() == src.rm_.code() && |
+ are_low_reg(dst, src.rs_)) { |
+ emit16(thumb16_mode2(LSR_REG) | |
+ thumb16_2lowreg_encoding(dst, src.rs_)); |
+ return; |
+ } else { |
+ lsr_reg_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::ror_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src.rs_.is_valid()) { |
+ // Immediate |
+ if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
+ // Immediate 16 |
+ emit16(thumb16_mode1(ROR_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
+ return; |
+ } else { |
+ // Immediate 32 |
+ ror_imm_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } else { |
+ if (s == SetCC && dst.code() == src.rm_.code() && |
+ are_low_reg(dst, src.rs_)) { |
+ emit16(thumb16_mode2(ROR_REG) | |
+ thumb16_2lowreg_encoding(dst, src.rs_)); |
+ return; |
+ } else { |
+ ror_reg_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::asr_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ if (!src.rs_.is_valid()) { |
+ // Immediate |
+ if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
+ // Immediate 16 |
+ emit16(thumb16_mode1(ASR_IMM) | |
+ thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
+ return; |
+ } else { |
+ // Immediate 32 |
+ asr_imm_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } else { |
+ // Register |
+ if (s == SetCC && dst.code() == src.rm_.code() && |
+ are_low_reg(dst, src.rs_)) { |
+ // Register 16 |
+ emit16(thumb16_mode2(ASR_REG) | |
+ thumb16_2lowreg_encoding(dst, src.rs_)); |
+ return; |
+ } else { |
+ asr_reg_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::and_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (src2.imm32_ < 0 && thumb_expand_imm(~src2.imm32_, &i, &imm3, &imm8)) { |
+ bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } else if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ and_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ and_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(AND_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ and_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::eor_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ eor_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ eor_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(EOR_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ eor_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::adc_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ adc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ adc_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(ADC_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ adc_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::sbc_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ sbc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ sbc_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(SBC_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ sbc_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::rsb_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ emit_it(cond); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ if (src2.imm32_ == 0 && are_low_reg(dst, src1)) { |
+ emit16(thumb16_mode2(RSB_IMM) | |
+ thumb16_2lowreg_encoding(dst, src1)); |
+ return; |
+ } else { |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ rsb_imm_t2(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ ASSERT(cond == al); |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ rsb_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } |
+ } else { |
+ // Register. |
+ rsb_reg_t1(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::tst_thumb(Register src1, const Operand& src2, Condition cond) { |
+ emit_it(cond); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ tst_imm_t1(src1, cond, i, imm3, imm8); |
+ return; |
+ } |
+ ASSERT(cond == al); |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ tst_thumb(src1, Operand(ip), al); |
+ return; |
+ } else { |
+ // Register. |
+ if (are_low_reg(src1, src2.rm_) && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(TST_REG) | |
+ thumb16_2lowreg_encoding(src1, src2)); |
+ return; |
+ } else { |
+ tst_reg_t2(src1, src2, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::cmn_thumb(Register src1, const Operand& src2, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ cmn_imm_t1(src1, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ cmn_thumb(src1, Operand(ip), al); |
+ return; |
+ } else { |
+ // Register. |
+ if (are_low_reg(src1, src2.rm_) && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(CMN_REG) | |
+ thumb16_2lowreg_encoding(src1, src2)); |
+ return; |
+ } else { |
+ cmn_reg_t2(src1, src2, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::bic_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ bic_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(BIC_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ bic_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::mul_thumb(Register dst, Register src1, Register src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (dst.code() == src2.code() && are_low_reg(src1, src2) && s == SetCC) { |
+ emit16(thumb16_mode2(MUL_REG) | |
+ thumb16_2lowreg_encoding(dst, src1)); |
+ return; |
+ } else if (dst.code() == src1.code() && are_low_reg(src1, src2) && |
+ s == SetCC) { |
+ emit16(thumb16_mode2(MUL_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else { |
+ mul_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::mvn_thumb(Register dst, const Operand& src, SBit s, |
+ Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
+ mvn_imm_t1(dst, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src, LeaveCC, al); |
+ rsb_thumb(dst, ip, Operand(0), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (are_low_reg(dst, src.rm_) && s == SetCC && src.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(MVN_REG) | |
+ thumb16_2anyreg_encoding(dst, src)); |
+ return; |
+ } else { |
+ mvn_reg_t2(dst, src, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::orr_thumb(Register dst, Register src1, const Operand& src2, |
+ SBit s, Condition cond) { |
+ ASSERT(cond == al); |
+ if (!src2.rm_.is_valid()) { |
+ // Immediate. |
+ uint32_t i, imm3, imm8; |
+ if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
+ orr_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
+ return; |
+ } |
+ mov_thumb(ip, src2, LeaveCC, al); |
+ orr_thumb(dst, src1, Operand(ip), s, al); |
+ return; |
+ } else { |
+ // Register. |
+ if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
+ s == SetCC && src2.shift_imm_ == 0) { |
+ emit16(thumb16_mode2(ORR_REG) | |
+ thumb16_2lowreg_encoding(dst, src2)); |
+ return; |
+ } else if (src2.rs_.is_valid()) { |
+ ASSERT(src2.shift_op_ == LSL); |
+ lsl_thumb(dst, src2, s, cond); |
+ orr_thumb(dst, src1, Operand(dst), s, cond); |
+ return; |
+ } else { |
+ orr_reg_t2(dst, src1, src2, s, cond); |
+ return; |
+ } |
+ } |
+ UNREACHABLE(); |
+} |
+ |
+ |
+void Assembler::it_thumb(Condition cond, int num_instr, bool cond2, |
+ bool cond3, bool cond4) { |
+ ASSERT(cond != al); |
+ uint16_t mask = 0; |
+ uint16_t c = (static_cast<int>(cond) >> 28) & 0xf; |
+ if (num_instr == 2) { |
+ if (cond2 == false) { |
+ mask |= (!(c & 1)) << 3; |
+ } else { |
+ mask |= (c & 1) << 3; |
+ } |
+ mask |= 4; |
+ emit16(11*B12 | 15*B8 | (c << 4) | mask); |
+ } else { |
+ ASSERT(num_instr == 1); |
+ mask = 8; |
+ emit16(11*B12 | 15*B8 | (c << 4) | mask); |
+ } |
+} |
+ |
+} } // namespace v8::internal |
+ |
+#endif // V8_TARGET_ARCH_ARM |