OLD | NEW |
(Empty) | |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #include "v8.h" |
| 29 |
| 30 #if defined(V8_TARGET_ARCH_ARM) |
| 31 |
| 32 #include "arm/assembler-arm-inl.h" |
| 33 #include "serialize.h" |
| 34 |
| 35 namespace v8 { |
| 36 namespace internal { |
| 37 |
| 38 void Assembler::add_thumb(Register dst, Register src1, const Operand& src2, |
| 39 SBit s, Condition cond) { |
| 40 ASSERT(cond == al); |
| 41 if (!src2.rm_.is_valid()) { |
| 42 // Immediate. |
| 43 if (s == LeaveCC) { |
| 44 if (is_uint12(src2.imm32_)) { |
| 45 add_imm_t4(dst, src1, src2, s, cond); |
| 46 return; |
| 47 } |
| 48 } else { |
| 49 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
| 50 emit16(thumb16_mode1(ADD_IMM_1) | |
| 51 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
| 52 return; |
| 53 } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { |
| 54 emit16(thumb16_mode1(ADD_IMM_2) | |
| 55 thumb16_lowreg_imm8_encoding(dst, src2)); |
| 56 return; |
| 57 } |
| 58 } |
| 59 uint32_t i, imm3, imm8; |
| 60 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 61 add_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
| 62 return; |
| 63 } |
| 64 // Immediate - too big for 1 thumb instruction |
| 65 mov_thumb(ip, src2, LeaveCC, al); |
| 66 add_thumb(dst, src1, Operand(ip), s, al); |
| 67 return; |
| 68 } else if (src2.shift_imm_ == 0) { |
| 69 // Register. |
| 70 if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { |
| 71 emit16(thumb16_mode1(ADD_REG_1) | |
| 72 thumb16_3lowreg_encoding(dst, src1, src2)); |
| 73 return; |
| 74 } else if (s == LeaveCC && dst.code() == src1.code()) { |
| 75 emit16(thumb16_mode3(ADD_REG_2) | |
| 76 thumb16_2anyreg_encoding(dst, src2)); |
| 77 return; |
| 78 } |
| 79 } |
| 80 add_reg_t3(dst, src1, src2, s, cond); |
| 81 } |
| 82 |
| 83 |
| 84 void Assembler::sub_thumb(Register dst, Register src1, const Operand& src2, |
| 85 SBit s, Condition cond) { |
| 86 emit_it(cond); |
| 87 if (!src2.rm_.is_valid()) { |
| 88 // Immediate. |
| 89 if (s == LeaveCC) { |
| 90 if (is_uint12(src2.imm32_)) { |
| 91 sub_imm_t4(dst, src1, src2, s, cond); |
| 92 return; |
| 93 } |
| 94 } else { |
| 95 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
| 96 emit16(thumb16_mode1(SUB_IMM_1) | |
| 97 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
| 98 return; |
| 99 } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { |
| 100 emit16(thumb16_mode1(SUB_IMM_2) | |
| 101 thumb16_lowreg_imm8_encoding(dst, src2)); |
| 102 return; |
| 103 } |
| 104 } |
| 105 uint32_t i, imm3, imm8; |
| 106 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 107 sub_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
| 108 return; |
| 109 } |
| 110 ASSERT(cond == al); |
| 111 mov_thumb(ip, src2, LeaveCC, al); |
| 112 sub_thumb(dst, src1, Operand(ip), s, al); |
| 113 return; |
| 114 } else { |
| 115 // Register. |
| 116 if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { |
| 117 emit16(thumb16_mode1(SUB_REG) | |
| 118 thumb16_3lowreg_encoding(dst, src1, src2)); |
| 119 return; |
| 120 } else { |
| 121 sub_reg_t3(dst, src1, src2, s, cond); |
| 122 return; |
| 123 } |
| 124 } |
| 125 UNREACHABLE(); |
| 126 } |
| 127 |
| 128 |
| 129 void Assembler::mov_thumb(Register dst, const Operand& src, SBit s, |
| 130 Condition cond) { |
| 131 emit_it(cond); |
| 132 if (!src.rm_.is_valid()) { |
| 133 // Immediate. |
| 134 if (is_uint8(src.imm32_) && is_low_reg(dst) && s == SetCC) { |
| 135 emit16(thumb16_mode1(MOV_IMM) | |
| 136 thumb16_lowreg_imm8_encoding(dst, src)); |
| 137 return; |
| 138 } else { |
| 139 if (is_uint16(src.imm32_) && s == LeaveCC) { |
| 140 mov_imm_t3(dst, src, s, cond); |
| 141 return; |
| 142 } else { |
| 143 uint32_t i, imm3, imm8; |
| 144 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8) && |
| 145 !src.must_output_reloc_info(this)) { |
| 146 mov_imm_t2(dst, s, cond, i, imm3, imm8); |
| 147 return; |
| 148 } else { |
| 149 // TODO(rkrithiv): perform 32-bit imm move |
| 150 UNREACHABLE(); |
| 151 return; |
| 152 } |
| 153 } |
| 154 } |
| 155 } else { |
| 156 // Register. |
| 157 if (src.rs_.is_valid() || (!src.rs_.is_valid() && src.shift_imm_ != 0)) { |
| 158 switch (src.shift_op_) { |
| 159 case LSL: lsl_thumb(dst, src, s, cond); |
| 160 return; |
| 161 case LSR: lsr_thumb(dst, src, s, cond); |
| 162 return; |
| 163 case ASR: asr_thumb(dst, src, s, cond); |
| 164 return; |
| 165 case ROR: ror_thumb(dst, src, s, cond); |
| 166 return; |
| 167 case RRX: |
| 168 default: UNREACHABLE(); |
| 169 } |
| 170 return; |
| 171 } |
| 172 if (s == LeaveCC) { |
| 173 emit16(thumb16_mode3(MOV_REG_1) | |
| 174 thumb16_2anyreg_encoding(dst, src)); |
| 175 return; |
| 176 } else if (are_low_reg(dst, src.rm_)) { |
| 177 // Note: MOV_REG_2 is 0, so call not needed |
| 178 emit16(thumb16_2lowreg_encoding(dst, src)); |
| 179 return; |
| 180 } else { |
| 181 mov_reg_t3(dst, src, s, cond); |
| 182 return; |
| 183 } |
| 184 } |
| 185 UNREACHABLE(); |
| 186 } |
| 187 |
| 188 |
| 189 void Assembler::teq_thumb(Register dst, const Operand& src, Condition cond) { |
| 190 emit_it(cond); |
| 191 if (!src.rm_.is_valid()) { |
| 192 // Immediate. |
| 193 uint32_t i, imm3, imm8; |
| 194 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
| 195 teq_imm_t1(dst, cond, i, imm3, imm8); |
| 196 return; |
| 197 } else { |
| 198 pc_ -= kInstr16Size; |
| 199 mov_thumb(ip, src, LeaveCC, cond); |
| 200 emit_it(cond); |
| 201 teq_thumb(dst, Operand(ip), cond); |
| 202 return; |
| 203 } |
| 204 } else if (src.shift_imm_ == 0) { |
| 205 // Register. |
| 206 teq_reg_t1(dst, src, cond); |
| 207 return; |
| 208 } |
| 209 UNREACHABLE(); |
| 210 } |
| 211 |
| 212 |
| 213 void Assembler::cmp_thumb(Register dst, const Operand& src, Condition cond) { |
| 214 emit_it(cond); |
| 215 if (!src.rm_.is_valid()) { |
| 216 // Immediate. |
| 217 if (is_uint8(src.imm32_) && is_low_reg(dst)) { |
| 218 emit16(thumb16_mode1(CMP_IMM) | |
| 219 thumb16_lowreg_imm8_encoding(dst, src)); |
| 220 return; |
| 221 } else { |
| 222 uint32_t i, imm3, imm8; |
| 223 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
| 224 cmp_imm_t2(dst, cond, i, imm3, imm8); |
| 225 return; |
| 226 } else { |
| 227 mov_thumb(ip, src, LeaveCC, al); |
| 228 cmp_thumb(dst, Operand(ip), al); |
| 229 return; |
| 230 } |
| 231 } |
| 232 } else if (src.shift_imm_ == 0) { |
| 233 // Register. |
| 234 if (are_low_reg(dst, src.rm_)) { |
| 235 emit16(thumb16_mode2(CMP_REG_1) | |
| 236 thumb16_2lowreg_encoding(dst, src)); |
| 237 return; |
| 238 } else if ((dst.code() > 7 || src.rm_.code() > 7) && |
| 239 (dst.code() != 15 && src.rm_.code() != 15)) { |
| 240 emit16(thumb16_mode3(CMP_REG_2) | |
| 241 thumb16_2anyreg_encoding(dst, src)); |
| 242 return; |
| 243 } else { // DecodeImmShift(type, imm3:imm2) |
| 244 cmp_reg_t3(dst, src, cond); |
| 245 return; |
| 246 } |
| 247 } else { // (src.shift_imm_ != 0) |
| 248 // Register. |
| 249 cmp_reg_t3(dst, src, cond); |
| 250 return; |
| 251 } |
| 252 UNREACHABLE(); |
| 253 } |
| 254 |
| 255 |
| 256 void Assembler::lsl_thumb(Register dst, const Operand& src, SBit s, |
| 257 Condition cond) { |
| 258 ASSERT(cond == al); |
| 259 if (!src.rs_.is_valid()) { |
| 260 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
| 261 emit16(thumb16_mode1(LSL_IMM) | |
| 262 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
| 263 return; |
| 264 } else { |
| 265 lsl_imm_t2(dst, src, s, cond); |
| 266 return; |
| 267 } |
| 268 } else { |
| 269 // Register src{rm rs, shift_imm} |
| 270 if (s == SetCC && dst.code() == src.rm_.code() && |
| 271 are_low_reg(dst, src.rs_)) { |
| 272 // Register 16 |
| 273 emit16(thumb16_mode2(LSL_REG) | |
| 274 thumb16_2lowreg_encoding(dst, src.rs_)); |
| 275 return; |
| 276 } else { |
| 277 // Register 32 |
| 278 lsl_reg_t2(dst, src, s, cond); |
| 279 return; |
| 280 } |
| 281 } |
| 282 UNREACHABLE(); |
| 283 } |
| 284 |
| 285 |
| 286 void Assembler::lsr_thumb(Register dst, const Operand& src, SBit s, |
| 287 Condition cond) { |
| 288 ASSERT(cond == al); |
| 289 if (!src.rs_.is_valid()) { |
| 290 // Immediate |
| 291 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
| 292 // Immediate 16 |
| 293 emit16(thumb16_mode1(LSR_IMM) | |
| 294 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
| 295 return; |
| 296 } else { |
| 297 // Immediate 32 |
| 298 lsr_imm_t2(dst, src, s, cond); |
| 299 return; |
| 300 } |
| 301 } else { |
| 302 if (s == SetCC && dst.code() == src.rm_.code() && |
| 303 are_low_reg(dst, src.rs_)) { |
| 304 emit16(thumb16_mode2(LSR_REG) | |
| 305 thumb16_2lowreg_encoding(dst, src.rs_)); |
| 306 return; |
| 307 } else { |
| 308 lsr_reg_t2(dst, src, s, cond); |
| 309 return; |
| 310 } |
| 311 } |
| 312 UNREACHABLE(); |
| 313 } |
| 314 |
| 315 |
| 316 void Assembler::ror_thumb(Register dst, const Operand& src, SBit s, |
| 317 Condition cond) { |
| 318 ASSERT(cond == al); |
| 319 if (!src.rs_.is_valid()) { |
| 320 // Immediate |
| 321 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
| 322 // Immediate 16 |
| 323 emit16(thumb16_mode1(ROR_IMM) | |
| 324 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
| 325 return; |
| 326 } else { |
| 327 // Immediate 32 |
| 328 ror_imm_t2(dst, src, s, cond); |
| 329 return; |
| 330 } |
| 331 } else { |
| 332 if (s == SetCC && dst.code() == src.rm_.code() && |
| 333 are_low_reg(dst, src.rs_)) { |
| 334 emit16(thumb16_mode2(ROR_REG) | |
| 335 thumb16_2lowreg_encoding(dst, src.rs_)); |
| 336 return; |
| 337 } else { |
| 338 ror_reg_t2(dst, src, s, cond); |
| 339 return; |
| 340 } |
| 341 } |
| 342 UNREACHABLE(); |
| 343 } |
| 344 |
| 345 |
| 346 void Assembler::asr_thumb(Register dst, const Operand& src, SBit s, |
| 347 Condition cond) { |
| 348 if (!src.rs_.is_valid()) { |
| 349 // Immediate |
| 350 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { |
| 351 // Immediate 16 |
| 352 emit16(thumb16_mode1(ASR_IMM) | |
| 353 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
| 354 return; |
| 355 } else { |
| 356 // Immediate 32 |
| 357 asr_imm_t2(dst, src, s, cond); |
| 358 return; |
| 359 } |
| 360 } else { |
| 361 // Register |
| 362 if (s == SetCC && dst.code() == src.rm_.code() && |
| 363 are_low_reg(dst, src.rs_)) { |
| 364 // Register 16 |
| 365 emit16(thumb16_mode2(ASR_REG) | |
| 366 thumb16_2lowreg_encoding(dst, src.rs_)); |
| 367 return; |
| 368 } else { |
| 369 asr_reg_t2(dst, src, s, cond); |
| 370 return; |
| 371 } |
| 372 } |
| 373 UNREACHABLE(); |
| 374 } |
| 375 |
| 376 |
| 377 void Assembler::and_thumb(Register dst, Register src1, const Operand& src2, |
| 378 SBit s, Condition cond) { |
| 379 ASSERT(cond == al); |
| 380 if (!src2.rm_.is_valid()) { |
| 381 // Immediate. |
| 382 uint32_t i, imm3, imm8; |
| 383 if (src2.imm32_ < 0 && thumb_expand_imm(~src2.imm32_, &i, &imm3, &imm8)) { |
| 384 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 385 return; |
| 386 } else if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 387 and_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 388 return; |
| 389 } |
| 390 mov_thumb(ip, src2, LeaveCC, al); |
| 391 and_thumb(dst, src1, Operand(ip), s, al); |
| 392 return; |
| 393 } else { |
| 394 // Register. |
| 395 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 396 s == SetCC && src2.shift_imm_ == 0) { |
| 397 emit16(thumb16_mode2(AND_REG) | |
| 398 thumb16_2lowreg_encoding(dst, src2)); |
| 399 return; |
| 400 } else { |
| 401 and_reg_t2(dst, src1, src2, s, cond); |
| 402 return; |
| 403 } |
| 404 } |
| 405 UNREACHABLE(); |
| 406 } |
| 407 |
| 408 |
| 409 void Assembler::eor_thumb(Register dst, Register src1, const Operand& src2, |
| 410 SBit s, Condition cond) { |
| 411 ASSERT(cond == al); |
| 412 if (!src2.rm_.is_valid()) { |
| 413 // Immediate. |
| 414 uint32_t i, imm3, imm8; |
| 415 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 416 eor_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 417 return; |
| 418 } |
| 419 mov_thumb(ip, src2, LeaveCC, al); |
| 420 eor_thumb(dst, src1, Operand(ip), s, al); |
| 421 return; |
| 422 } else { |
| 423 // Register. |
| 424 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 425 s == SetCC && src2.shift_imm_ == 0) { |
| 426 emit16(thumb16_mode2(EOR_REG) | |
| 427 thumb16_2lowreg_encoding(dst, src2)); |
| 428 return; |
| 429 } else { |
| 430 eor_reg_t2(dst, src1, src2, s, cond); |
| 431 return; |
| 432 } |
| 433 } |
| 434 UNREACHABLE(); |
| 435 } |
| 436 |
| 437 |
| 438 void Assembler::adc_thumb(Register dst, Register src1, const Operand& src2, |
| 439 SBit s, Condition cond) { |
| 440 ASSERT(cond == al); |
| 441 if (!src2.rm_.is_valid()) { |
| 442 // Immediate. |
| 443 uint32_t i, imm3, imm8; |
| 444 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 445 adc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 446 return; |
| 447 } |
| 448 mov_thumb(ip, src2, LeaveCC, al); |
| 449 adc_thumb(dst, src1, Operand(ip), s, al); |
| 450 return; |
| 451 } else { |
| 452 // Register. |
| 453 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 454 s == SetCC && src2.shift_imm_ == 0) { |
| 455 emit16(thumb16_mode2(ADC_REG) | |
| 456 thumb16_2lowreg_encoding(dst, src2)); |
| 457 return; |
| 458 } else { |
| 459 adc_reg_t2(dst, src1, src2, s, cond); |
| 460 return; |
| 461 } |
| 462 } |
| 463 UNREACHABLE(); |
| 464 } |
| 465 |
| 466 |
| 467 void Assembler::sbc_thumb(Register dst, Register src1, const Operand& src2, |
| 468 SBit s, Condition cond) { |
| 469 ASSERT(cond == al); |
| 470 if (!src2.rm_.is_valid()) { |
| 471 // Immediate. |
| 472 uint32_t i, imm3, imm8; |
| 473 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 474 sbc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 475 return; |
| 476 } |
| 477 mov_thumb(ip, src2, LeaveCC, al); |
| 478 sbc_thumb(dst, src1, Operand(ip), s, al); |
| 479 return; |
| 480 } else { |
| 481 // Register. |
| 482 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 483 s == SetCC && src2.shift_imm_ == 0) { |
| 484 emit16(thumb16_mode2(SBC_REG) | |
| 485 thumb16_2lowreg_encoding(dst, src2)); |
| 486 return; |
| 487 } else { |
| 488 sbc_reg_t2(dst, src1, src2, s, cond); |
| 489 return; |
| 490 } |
| 491 } |
| 492 UNREACHABLE(); |
| 493 } |
| 494 |
| 495 |
| 496 void Assembler::rsb_thumb(Register dst, Register src1, const Operand& src2, |
| 497 SBit s, Condition cond) { |
| 498 emit_it(cond); |
| 499 if (!src2.rm_.is_valid()) { |
| 500 // Immediate. |
| 501 if (src2.imm32_ == 0 && are_low_reg(dst, src1)) { |
| 502 emit16(thumb16_mode2(RSB_IMM) | |
| 503 thumb16_2lowreg_encoding(dst, src1)); |
| 504 return; |
| 505 } else { |
| 506 uint32_t i, imm3, imm8; |
| 507 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 508 rsb_imm_t2(dst, src1, s, cond, i, imm3, imm8); |
| 509 return; |
| 510 } |
| 511 ASSERT(cond == al); |
| 512 mov_thumb(ip, src2, LeaveCC, al); |
| 513 rsb_thumb(dst, src1, Operand(ip), s, al); |
| 514 return; |
| 515 } |
| 516 } else { |
| 517 // Register. |
| 518 rsb_reg_t1(dst, src1, src2, s, cond); |
| 519 return; |
| 520 } |
| 521 UNREACHABLE(); |
| 522 } |
| 523 |
| 524 |
| 525 void Assembler::tst_thumb(Register src1, const Operand& src2, Condition cond) { |
| 526 emit_it(cond); |
| 527 if (!src2.rm_.is_valid()) { |
| 528 // Immediate. |
| 529 uint32_t i, imm3, imm8; |
| 530 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 531 tst_imm_t1(src1, cond, i, imm3, imm8); |
| 532 return; |
| 533 } |
| 534 ASSERT(cond == al); |
| 535 mov_thumb(ip, src2, LeaveCC, al); |
| 536 tst_thumb(src1, Operand(ip), al); |
| 537 return; |
| 538 } else { |
| 539 // Register. |
| 540 if (are_low_reg(src1, src2.rm_) && src2.shift_imm_ == 0) { |
| 541 emit16(thumb16_mode2(TST_REG) | |
| 542 thumb16_2lowreg_encoding(src1, src2)); |
| 543 return; |
| 544 } else { |
| 545 tst_reg_t2(src1, src2, cond); |
| 546 return; |
| 547 } |
| 548 } |
| 549 UNREACHABLE(); |
| 550 } |
| 551 |
| 552 |
| 553 void Assembler::cmn_thumb(Register src1, const Operand& src2, Condition cond) { |
| 554 ASSERT(cond == al); |
| 555 if (!src2.rm_.is_valid()) { |
| 556 // Immediate. |
| 557 uint32_t i, imm3, imm8; |
| 558 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 559 cmn_imm_t1(src1, cond, i, imm3, imm8); |
| 560 return; |
| 561 } |
| 562 mov_thumb(ip, src2, LeaveCC, al); |
| 563 cmn_thumb(src1, Operand(ip), al); |
| 564 return; |
| 565 } else { |
| 566 // Register. |
| 567 if (are_low_reg(src1, src2.rm_) && src2.shift_imm_ == 0) { |
| 568 emit16(thumb16_mode2(CMN_REG) | |
| 569 thumb16_2lowreg_encoding(src1, src2)); |
| 570 return; |
| 571 } else { |
| 572 cmn_reg_t2(src1, src2, cond); |
| 573 return; |
| 574 } |
| 575 } |
| 576 UNREACHABLE(); |
| 577 } |
| 578 |
| 579 |
| 580 void Assembler::bic_thumb(Register dst, Register src1, const Operand& src2, |
| 581 SBit s, Condition cond) { |
| 582 ASSERT(cond == al); |
| 583 if (!src2.rm_.is_valid()) { |
| 584 // Immediate. |
| 585 uint32_t i, imm3, imm8; |
| 586 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 587 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 588 return; |
| 589 } |
| 590 mov_thumb(ip, src2, LeaveCC, al); |
| 591 bic_thumb(dst, src1, Operand(ip), s, al); |
| 592 return; |
| 593 } else { |
| 594 // Register. |
| 595 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 596 s == SetCC && src2.shift_imm_ == 0) { |
| 597 emit16(thumb16_mode2(BIC_REG) | |
| 598 thumb16_2lowreg_encoding(dst, src2)); |
| 599 return; |
| 600 } else { |
| 601 bic_reg_t2(dst, src1, src2, s, cond); |
| 602 return; |
| 603 } |
| 604 } |
| 605 UNREACHABLE(); |
| 606 } |
| 607 |
| 608 |
| 609 void Assembler::mul_thumb(Register dst, Register src1, Register src2, |
| 610 SBit s, Condition cond) { |
| 611 ASSERT(cond == al); |
| 612 if (dst.code() == src2.code() && are_low_reg(src1, src2) && s == SetCC) { |
| 613 emit16(thumb16_mode2(MUL_REG) | |
| 614 thumb16_2lowreg_encoding(dst, src1)); |
| 615 return; |
| 616 } else if (dst.code() == src1.code() && are_low_reg(src1, src2) && |
| 617 s == SetCC) { |
| 618 emit16(thumb16_mode2(MUL_REG) | |
| 619 thumb16_2lowreg_encoding(dst, src2)); |
| 620 return; |
| 621 } else { |
| 622 mul_t2(dst, src1, src2, s, cond); |
| 623 return; |
| 624 } |
| 625 UNREACHABLE(); |
| 626 } |
| 627 |
| 628 |
| 629 void Assembler::mvn_thumb(Register dst, const Operand& src, SBit s, |
| 630 Condition cond) { |
| 631 ASSERT(cond == al); |
| 632 if (!src.rm_.is_valid()) { |
| 633 // Immediate. |
| 634 uint32_t i, imm3, imm8; |
| 635 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
| 636 mvn_imm_t1(dst, s, cond, i, imm3, imm8); |
| 637 return; |
| 638 } |
| 639 mov_thumb(ip, src, LeaveCC, al); |
| 640 rsb_thumb(dst, ip, Operand(0), s, al); |
| 641 return; |
| 642 } else { |
| 643 // Register. |
| 644 if (are_low_reg(dst, src.rm_) && s == SetCC && src.shift_imm_ == 0) { |
| 645 emit16(thumb16_mode2(MVN_REG) | |
| 646 thumb16_2anyreg_encoding(dst, src)); |
| 647 return; |
| 648 } else { |
| 649 mvn_reg_t2(dst, src, s, cond); |
| 650 return; |
| 651 } |
| 652 } |
| 653 UNREACHABLE(); |
| 654 } |
| 655 |
| 656 |
| 657 void Assembler::orr_thumb(Register dst, Register src1, const Operand& src2, |
| 658 SBit s, Condition cond) { |
| 659 ASSERT(cond == al); |
| 660 if (!src2.rm_.is_valid()) { |
| 661 // Immediate. |
| 662 uint32_t i, imm3, imm8; |
| 663 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
| 664 orr_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
| 665 return; |
| 666 } |
| 667 mov_thumb(ip, src2, LeaveCC, al); |
| 668 orr_thumb(dst, src1, Operand(ip), s, al); |
| 669 return; |
| 670 } else { |
| 671 // Register. |
| 672 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
| 673 s == SetCC && src2.shift_imm_ == 0) { |
| 674 emit16(thumb16_mode2(ORR_REG) | |
| 675 thumb16_2lowreg_encoding(dst, src2)); |
| 676 return; |
| 677 } else if (src2.rs_.is_valid()) { |
| 678 ASSERT(src2.shift_op_ == LSL); |
| 679 lsl_thumb(dst, src2, s, cond); |
| 680 orr_thumb(dst, src1, Operand(dst), s, cond); |
| 681 return; |
| 682 } else { |
| 683 orr_reg_t2(dst, src1, src2, s, cond); |
| 684 return; |
| 685 } |
| 686 } |
| 687 UNREACHABLE(); |
| 688 } |
| 689 |
| 690 |
| 691 void Assembler::it_thumb(Condition cond, int num_instr, bool cond2, |
| 692 bool cond3, bool cond4) { |
| 693 ASSERT(cond != al); |
| 694 uint16_t mask = 0; |
| 695 uint16_t c = (static_cast<int>(cond) >> 28) & 0xf; |
| 696 if (num_instr == 2) { |
| 697 if (cond2 == false) { |
| 698 mask |= (!(c & 1)) << 3; |
| 699 } else { |
| 700 mask |= (c & 1) << 3; |
| 701 } |
| 702 mask |= 4; |
| 703 emit16(11*B12 | 15*B8 | (c << 4) | mask); |
| 704 } else { |
| 705 ASSERT(num_instr == 1); |
| 706 mask = 8; |
| 707 emit16(11*B12 | 15*B8 | (c << 4) | mask); |
| 708 } |
| 709 } |
| 710 |
| 711 } } // namespace v8::internal |
| 712 |
| 713 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |