Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(325)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 23536056: Thumb2 Backend: Enable Assembler to encode Thumb2 instructions Base URL: HEAD^
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-thumb.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 492 matching lines...) Expand 10 before | Expand all | Expand 10 after
503 const Instr kStrRegFpOffsetPattern = 503 const Instr kStrRegFpOffsetPattern =
504 al | B26 | Offset | kRegister_fp_Code * B16; 504 al | B26 | Offset | kRegister_fp_Code * B16;
505 const Instr kLdrRegFpNegOffsetPattern = 505 const Instr kLdrRegFpNegOffsetPattern =
506 al | B26 | L | NegOffset | kRegister_fp_Code * B16; 506 al | B26 | L | NegOffset | kRegister_fp_Code * B16;
507 const Instr kStrRegFpNegOffsetPattern = 507 const Instr kStrRegFpNegOffsetPattern =
508 al | B26 | NegOffset | kRegister_fp_Code * B16; 508 al | B26 | NegOffset | kRegister_fp_Code * B16;
509 const Instr kLdrStrInstrTypeMask = 0xffff0000; 509 const Instr kLdrStrInstrTypeMask = 0xffff0000;
510 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; 510 const Instr kLdrStrInstrArgumentMask = 0x0000ffff;
511 const Instr kLdrStrOffsetMask = 0x00000fff; 511 const Instr kLdrStrOffsetMask = 0x00000fff;
512 512
513 const Instr kThumbLdrPCMask = BH15 | 7 * BH12 | 15 * BH8 | 7 * BH4 | 15 * BH0;
514 const Instr kThumbLdrPCPattern = BH15 | 7 * BH12 | BH11 | BH6 | BH4 | 15 * BH0;
513 515
514 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 516 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
515 : AssemblerBase(isolate, buffer, buffer_size), 517 : AssemblerBase(isolate, buffer, buffer_size),
516 recorded_ast_id_(TypeFeedbackId::None()), 518 recorded_ast_id_(TypeFeedbackId::None()),
517 positions_recorder_(this) { 519 positions_recorder_(this),
520 thumb_mode_(false) {
518 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 521 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
519 num_pending_reloc_info_ = 0; 522 num_pending_reloc_info_ = 0;
520 num_pending_64_bit_reloc_info_ = 0; 523 num_pending_64_bit_reloc_info_ = 0;
521 next_buffer_check_ = 0; 524 next_buffer_check_ = 0;
522 const_pool_blocked_nesting_ = 0; 525 const_pool_blocked_nesting_ = 0;
523 no_const_pool_before_ = 0; 526 no_const_pool_before_ = 0;
524 first_const_pool_use_ = -1; 527 first_const_pool_use_ = -1;
525 last_bound_pos_ = 0; 528 last_bound_pos_ = 0;
526 ClearRecordedAstId(); 529 ClearRecordedAstId();
527 } 530 }
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
768 // 771 //
769 // The linked labels form a link chain by making the branch offset 772 // The linked labels form a link chain by making the branch offset
770 // in the instruction steam to point to the previous branch 773 // in the instruction steam to point to the previous branch
771 // instruction using the same label. 774 // instruction using the same label.
772 // 775 //
773 // The link chain is terminated by a branch offset pointing to the 776 // The link chain is terminated by a branch offset pointing to the
774 // same position. 777 // same position.
775 778
776 779
777 int Assembler::target_at(int pos) { 780 int Assembler::target_at(int pos) {
781 if (is_thumb_mode()) {
782 return target_at_thumb(pos);
783 }
784
778 Instr instr = instr_at(pos); 785 Instr instr = instr_at(pos);
779 if (is_uint24(instr)) { 786 if (is_uint24(instr)) {
780 // Emitted link to a label, not part of a branch. 787 // Emitted link to a label, not part of a branch.
781 return instr; 788 return instr;
782 } 789 }
783 ASSERT((instr & 7*B25) == 5*B25); // b, bl, or blx imm24 790 ASSERT((instr & 7*B25) == 5*B25); // b, bl, or blx imm24
784 int imm26 = ((instr & kImm24Mask) << 8) >> 6; 791 int imm26 = ((instr & kImm24Mask) << 8) >> 6;
785 if ((Instruction::ConditionField(instr) == kSpecialCondition) && 792 if ((Instruction::ConditionField(instr) == kSpecialCondition) &&
786 ((instr & B24) != 0)) { 793 ((instr & B24) != 0)) {
787 // blx uses bit 24 to encode bit 2 of imm26 794 // blx uses bit 24 to encode bit 2 of imm26
788 imm26 += 2; 795 imm26 += 2;
789 } 796 }
790 return pos + kPcLoadDelta + imm26; 797 return pos + kPcLoadDelta + imm26;
791 } 798 }
792 799
793 800
794 void Assembler::target_at_put(int pos, int target_pos) { 801 void Assembler::target_at_put(int pos, int target_pos) {
802 if (is_thumb_mode()) {
803 target_at_put_thumb(pos, target_pos);
804 return;
805 }
806
795 Instr instr = instr_at(pos); 807 Instr instr = instr_at(pos);
796 if (is_uint24(instr)) { 808 if (is_uint24(instr)) {
797 ASSERT(target_pos == pos || target_pos >= 0); 809 ASSERT(target_pos == pos || target_pos >= 0);
798 // Emitted link to a label, not part of a branch. 810 // Emitted link to a label, not part of a branch.
799 // Load the position of the label relative to the generated code object 811 // Load the position of the label relative to the generated code object
800 // pointer in a register. 812 // pointer in a register.
801 813
802 // Here are the instructions we need to emit: 814 // Here are the instructions we need to emit:
803 // For ARMv7: target24 => target16_1:target16_0 815 // For ARMv7: target24 => target16_1:target16_0
804 // movw dst, #target16_0 816 // movw dst, #target16_0
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
1306 } else { 1318 } else {
1307 // First entry of the link chain points to itself. 1319 // First entry of the link chain points to itself.
1308 target_pos = pc_offset(); 1320 target_pos = pc_offset();
1309 } 1321 }
1310 L->link_to(pc_offset()); 1322 L->link_to(pc_offset());
1311 } 1323 }
1312 1324
1313 // Block the emission of the constant pool, since the branch instruction must 1325 // Block the emission of the constant pool, since the branch instruction must
1314 // be emitted at the pc offset recorded by the label. 1326 // be emitted at the pc offset recorded by the label.
1315 BlockConstPoolFor(1); 1327 BlockConstPoolFor(1);
1328 if (is_thumb_mode()) {
1329 return target_pos - (pc_offset() + kThumbPcLoadDelta);
1330 }
1316 return target_pos - (pc_offset() + kPcLoadDelta); 1331 return target_pos - (pc_offset() + kPcLoadDelta);
1317 } 1332 }
1318 1333
1319 1334
1320 // Branch instructions. 1335 // Branch instructions.
1321 void Assembler::b(int branch_offset, Condition cond) { 1336 void Assembler::b(int branch_offset, Condition cond) {
1337 if (is_thumb_mode()) {
1338 ASSERT((branch_offset & 1) == 0);
1339 b_thumb(branch_offset, cond);
1340 return;
1341 }
1322 ASSERT((branch_offset & 3) == 0); 1342 ASSERT((branch_offset & 3) == 0);
1323 int imm24 = branch_offset >> 2; 1343 int imm24 = branch_offset >> 2;
1324 ASSERT(is_int24(imm24)); 1344 ASSERT(is_int24(imm24));
1325 emit(cond | B27 | B25 | (imm24 & kImm24Mask)); 1345 emit(cond | B27 | B25 | (imm24 & kImm24Mask));
1326 1346
1327 if (cond == al) { 1347 if (cond == al) {
1328 // Dead code is a good location to emit the constant pool. 1348 // Dead code is a good location to emit the constant pool.
1329 CheckConstPool(false, false); 1349 CheckConstPool(false, false);
1330 } 1350 }
1331 } 1351 }
1332 1352
1333 1353
1334 void Assembler::bl(int branch_offset, Condition cond) { 1354 void Assembler::bl(int branch_offset, Condition cond) {
1335 positions_recorder()->WriteRecordedPositions(); 1355 positions_recorder()->WriteRecordedPositions();
1356 if (is_thumb_mode()) {
1357 ASSERT(cond == al);
1358 ASSERT((branch_offset & 1) == 0);
1359 int imm = branch_offset >> 1;
1360 emit32(thumb32_mode4(BL_32_IMM) | thumb32_sign_extend_imm24(imm));
1361 return;
1362 }
1336 ASSERT((branch_offset & 3) == 0); 1363 ASSERT((branch_offset & 3) == 0);
1337 int imm24 = branch_offset >> 2; 1364 int imm24 = branch_offset >> 2;
1338 ASSERT(is_int24(imm24)); 1365 ASSERT(is_int24(imm24));
1339 emit(cond | B27 | B25 | B24 | (imm24 & kImm24Mask)); 1366 emit(cond | B27 | B25 | B24 | (imm24 & kImm24Mask));
1340 } 1367 }
1341 1368
1342 1369
1343 void Assembler::blx(int branch_offset) { // v5 and above 1370 void Assembler::blx(int branch_offset) { // v5 and above
1344 positions_recorder()->WriteRecordedPositions(); 1371 positions_recorder()->WriteRecordedPositions();
1372 if (is_thumb_mode()) {
1373 ASSERT((branch_offset & 3) == 0);
1374 int imm = branch_offset >> 1;
1375 emit32(thumb32_mode4(BLX_32_IMM) | thumb32_sign_extend_imm24(imm));
1376 return;
1377 }
1345 ASSERT((branch_offset & 1) == 0); 1378 ASSERT((branch_offset & 1) == 0);
1346 int h = ((branch_offset & 2) >> 1)*B24; 1379 int h = ((branch_offset & 2) >> 1)*B24;
1347 int imm24 = branch_offset >> 2; 1380 int imm24 = branch_offset >> 2;
1348 ASSERT(is_int24(imm24)); 1381 ASSERT(is_int24(imm24));
1349 emit(kSpecialCondition | B27 | B25 | h | (imm24 & kImm24Mask)); 1382 emit(kSpecialCondition | B27 | B25 | h | (imm24 & kImm24Mask));
1350 } 1383 }
1351 1384
1352 1385
1353 void Assembler::blx(Register target, Condition cond) { // v5 and above 1386 void Assembler::blx(Register target, Condition cond) { // v5 and above
1354 positions_recorder()->WriteRecordedPositions(); 1387 positions_recorder()->WriteRecordedPositions();
1355 ASSERT(!target.is(pc)); 1388 ASSERT(!target.is(pc));
1389 if (is_thumb_mode()) {
1390 ASSERT(cond == al);
1391 emit16(thumb16_mode3(BLX_REG) | thumb16_anyreg_encoding(target));
1392 return;
1393 }
1356 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | target.code()); 1394 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | target.code());
1357 } 1395 }
1358 1396
1359 1397
1360 void Assembler::bx(Register target, Condition cond) { // v5 and above, plus v4t 1398 void Assembler::bx(Register target, Condition cond) { // v5 and above, plus v4t
1361 positions_recorder()->WriteRecordedPositions(); 1399 positions_recorder()->WriteRecordedPositions();
1362 ASSERT(!target.is(pc)); // use of pc is actually allowed, but discouraged 1400 ASSERT(!target.is(pc)); // use of pc is actually allowed, but discouraged
1401 if (is_thumb_mode()) {
1402 ASSERT(cond == al);
1403 emit16(thumb16_mode3(BX_REG) | thumb16_anyreg_encoding(target));
1404 return;
1405 }
1363 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code()); 1406 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code());
1364 } 1407 }
1365 1408
1366 1409
1367 // Data-processing instructions. 1410 // Data-processing instructions.
1368 1411
1369 void Assembler::and_(Register dst, Register src1, const Operand& src2, 1412 void Assembler::and_(Register dst, Register src1, const Operand& src2,
1370 SBit s, Condition cond) { 1413 SBit s, Condition cond) {
1414 if (is_thumb_mode()) {
1415 and_thumb(dst, src1, src2, s, cond);
1416 return;
1417 }
1371 addrmod1(cond | AND | s, src1, dst, src2); 1418 addrmod1(cond | AND | s, src1, dst, src2);
1372 } 1419 }
1373 1420
1374 1421
1375 void Assembler::eor(Register dst, Register src1, const Operand& src2, 1422 void Assembler::eor(Register dst, Register src1, const Operand& src2,
1376 SBit s, Condition cond) { 1423 SBit s, Condition cond) {
1424 if (is_thumb_mode()) {
1425 eor_thumb(dst, src1, src2, s, cond);
1426 return;
1427 }
1377 addrmod1(cond | EOR | s, src1, dst, src2); 1428 addrmod1(cond | EOR | s, src1, dst, src2);
1378 } 1429 }
1379 1430
1380 1431
1381 void Assembler::sub(Register dst, Register src1, const Operand& src2, 1432 void Assembler::sub(Register dst, Register src1, const Operand& src2,
1382 SBit s, Condition cond) { 1433 SBit s, Condition cond) {
1434 if (is_thumb_mode()) {
1435 sub_thumb(dst, src1, src2, s, cond);
1436 return;
1437 }
1383 addrmod1(cond | SUB | s, src1, dst, src2); 1438 addrmod1(cond | SUB | s, src1, dst, src2);
1384 } 1439 }
1385 1440
1386 1441
1387 void Assembler::rsb(Register dst, Register src1, const Operand& src2, 1442 void Assembler::rsb(Register dst, Register src1, const Operand& src2,
1388 SBit s, Condition cond) { 1443 SBit s, Condition cond) {
1444 if (is_thumb_mode()) {
1445 rsb_thumb(dst, src1, src2, s, cond);
1446 return;
1447 }
1389 addrmod1(cond | RSB | s, src1, dst, src2); 1448 addrmod1(cond | RSB | s, src1, dst, src2);
1390 } 1449 }
1391 1450
1392 1451
1393 void Assembler::add(Register dst, Register src1, const Operand& src2, 1452 void Assembler::add(Register dst, Register src1, const Operand& src2,
1394 SBit s, Condition cond) { 1453 SBit s, Condition cond) {
1454 if (is_thumb_mode()) {
1455 add_thumb(dst, src1, src2, s, cond);
1456 return;
1457 }
1395 addrmod1(cond | ADD | s, src1, dst, src2); 1458 addrmod1(cond | ADD | s, src1, dst, src2);
1396 } 1459 }
1397 1460
1398 1461
1399 void Assembler::adc(Register dst, Register src1, const Operand& src2, 1462 void Assembler::adc(Register dst, Register src1, const Operand& src2,
1400 SBit s, Condition cond) { 1463 SBit s, Condition cond) {
1464 if (is_thumb_mode()) {
1465 adc_thumb(dst, src1, src2, s, cond);
1466 return;
1467 }
1401 addrmod1(cond | ADC | s, src1, dst, src2); 1468 addrmod1(cond | ADC | s, src1, dst, src2);
1402 } 1469 }
1403 1470
1404 1471
1405 void Assembler::sbc(Register dst, Register src1, const Operand& src2, 1472 void Assembler::sbc(Register dst, Register src1, const Operand& src2,
1406 SBit s, Condition cond) { 1473 SBit s, Condition cond) {
1474 if (is_thumb_mode()) {
1475 sbc_thumb(dst, src1, src2, s, cond);
1476 return;
1477 }
1407 addrmod1(cond | SBC | s, src1, dst, src2); 1478 addrmod1(cond | SBC | s, src1, dst, src2);
1408 } 1479 }
1409 1480
1410 1481
1411 void Assembler::rsc(Register dst, Register src1, const Operand& src2, 1482 void Assembler::rsc(Register dst, Register src1, const Operand& src2,
1412 SBit s, Condition cond) { 1483 SBit s, Condition cond) {
1413 addrmod1(cond | RSC | s, src1, dst, src2); 1484 addrmod1(cond | RSC | s, src1, dst, src2);
1414 } 1485 }
1415 1486
1416 1487
1417 void Assembler::tst(Register src1, const Operand& src2, Condition cond) { 1488 void Assembler::tst(Register src1, const Operand& src2, Condition cond) {
1489 if (is_thumb_mode()) {
1490 tst_thumb(src1, src2, cond);
1491 return;
1492 }
1418 addrmod1(cond | TST | S, src1, r0, src2); 1493 addrmod1(cond | TST | S, src1, r0, src2);
1419 } 1494 }
1420 1495
1421 1496
1422 void Assembler::teq(Register src1, const Operand& src2, Condition cond) { 1497 void Assembler::teq(Register src1, const Operand& src2, Condition cond) {
1498 if (is_thumb_mode()) {
1499 teq_thumb(src1, src2, cond);
1500 return;
1501 }
1423 addrmod1(cond | TEQ | S, src1, r0, src2); 1502 addrmod1(cond | TEQ | S, src1, r0, src2);
1424 } 1503 }
1425 1504
1426 1505
1427 void Assembler::cmp(Register src1, const Operand& src2, Condition cond) { 1506 void Assembler::cmp(Register src1, const Operand& src2, Condition cond) {
1507 if (is_thumb_mode()) {
1508 cmp_thumb(src1, src2, cond);
1509 return;
1510 }
1428 addrmod1(cond | CMP | S, src1, r0, src2); 1511 addrmod1(cond | CMP | S, src1, r0, src2);
1429 } 1512 }
1430 1513
1431 1514
1432 void Assembler::cmp_raw_immediate( 1515 void Assembler::cmp_raw_immediate(
1433 Register src, int raw_immediate, Condition cond) { 1516 Register src, int raw_immediate, Condition cond) {
1434 ASSERT(is_uint12(raw_immediate)); 1517 ASSERT(is_uint12(raw_immediate));
1435 emit(cond | I | CMP | S | src.code() << 16 | raw_immediate); 1518 emit(cond | I | CMP | S | src.code() << 16 | raw_immediate);
1436 } 1519 }
1437 1520
1438 1521
1439 void Assembler::cmn(Register src1, const Operand& src2, Condition cond) { 1522 void Assembler::cmn(Register src1, const Operand& src2, Condition cond) {
1523 if (is_thumb_mode()) {
1524 cmn_thumb(src1, src2, cond);
1525 return;
1526 }
1440 addrmod1(cond | CMN | S, src1, r0, src2); 1527 addrmod1(cond | CMN | S, src1, r0, src2);
1441 } 1528 }
1442 1529
1443 1530
1444 void Assembler::orr(Register dst, Register src1, const Operand& src2, 1531 void Assembler::orr(Register dst, Register src1, const Operand& src2,
1445 SBit s, Condition cond) { 1532 SBit s, Condition cond) {
1533 if (is_thumb_mode()) {
1534 orr_thumb(dst, src1, src2, s, cond);
1535 return;
1536 }
1446 addrmod1(cond | ORR | s, src1, dst, src2); 1537 addrmod1(cond | ORR | s, src1, dst, src2);
1447 } 1538 }
1448 1539
1449 1540
1450 void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) { 1541 void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) {
1451 if (dst.is(pc)) { 1542 if (dst.is(pc)) {
1452 positions_recorder()->WriteRecordedPositions(); 1543 positions_recorder()->WriteRecordedPositions();
1453 } 1544 }
1454 // Don't allow nop instructions in the form mov rn, rn to be generated using 1545 // Don't allow nop instructions in the form mov rn, rn to be generated using
1455 // the mov instruction. They must be generated using nop(int/NopMarkerTypes) 1546 // the mov instruction. They must be generated using nop(int/NopMarkerTypes)
1456 // or MarkCode(int/NopMarkerTypes) pseudo instructions. 1547 // or MarkCode(int/NopMarkerTypes) pseudo instructions.
1457 ASSERT(!(src.is_reg() && src.rm().is(dst) && s == LeaveCC && cond == al)); 1548 ASSERT(!(src.is_reg() && src.rm().is(dst) && s == LeaveCC && cond == al));
1549 if (is_thumb_mode()) {
1550 mov_thumb(dst, src, s, cond);
1551 return;
1552 }
1458 addrmod1(cond | MOV | s, r0, dst, src); 1553 addrmod1(cond | MOV | s, r0, dst, src);
1459 } 1554 }
1460 1555
1461 1556
1462 void Assembler::mov_label_offset(Register dst, Label* label) { 1557 void Assembler::mov_label_offset(Register dst, Label* label) {
1463 if (label->is_bound()) { 1558 if (label->is_bound()) {
1464 mov(dst, Operand(label->pos() + (Code::kHeaderSize - kHeapObjectTag))); 1559 mov(dst, Operand(label->pos() + (Code::kHeaderSize - kHeapObjectTag)));
1465 } else { 1560 } else {
1466 // Emit the link to the label in the code stream followed by extra nop 1561 // Emit the link to the label in the code stream followed by extra nop
1467 // instructions. 1562 // instructions.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1501 void Assembler::movw(Register reg, uint32_t immediate, Condition cond) { 1596 void Assembler::movw(Register reg, uint32_t immediate, Condition cond) {
1502 ASSERT(immediate < 0x10000); 1597 ASSERT(immediate < 0x10000);
1503 // May use movw if supported, but on unsupported platforms will try to use 1598 // May use movw if supported, but on unsupported platforms will try to use
1504 // equivalent rotated immed_8 value and other tricks before falling back to a 1599 // equivalent rotated immed_8 value and other tricks before falling back to a
1505 // constant pool load. 1600 // constant pool load.
1506 mov(reg, Operand(immediate), LeaveCC, cond); 1601 mov(reg, Operand(immediate), LeaveCC, cond);
1507 } 1602 }
1508 1603
1509 1604
1510 void Assembler::movt(Register reg, uint32_t immediate, Condition cond) { 1605 void Assembler::movt(Register reg, uint32_t immediate, Condition cond) {
1606 if (is_thumb_mode()) {
1607 movt_thumb(reg, immediate, cond);
1608 return;
1609 }
1511 emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate)); 1610 emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate));
1512 } 1611 }
1513 1612
1514 1613
1515 void Assembler::bic(Register dst, Register src1, const Operand& src2, 1614 void Assembler::bic(Register dst, Register src1, const Operand& src2,
1516 SBit s, Condition cond) { 1615 SBit s, Condition cond) {
1616 if (is_thumb_mode()) {
1617 bic_thumb(dst, src1, src2, s, cond);
1618 return;
1619 }
1517 addrmod1(cond | BIC | s, src1, dst, src2); 1620 addrmod1(cond | BIC | s, src1, dst, src2);
1518 } 1621 }
1519 1622
1520 1623
1521 void Assembler::mvn(Register dst, const Operand& src, SBit s, Condition cond) { 1624 void Assembler::mvn(Register dst, const Operand& src, SBit s, Condition cond) {
1625 if (is_thumb_mode()) {
1626 mvn_thumb(dst, src, s, cond);
1627 return;
1628 }
1522 addrmod1(cond | MVN | s, r0, dst, src); 1629 addrmod1(cond | MVN | s, r0, dst, src);
1523 } 1630 }
1524 1631
1525 1632
1526 // Multiply instructions. 1633 // Multiply instructions.
1527 void Assembler::mla(Register dst, Register src1, Register src2, Register srcA, 1634 void Assembler::mla(Register dst, Register src1, Register src2, Register srcA,
1528 SBit s, Condition cond) { 1635 SBit s, Condition cond) {
1636 if (is_thumb_mode()) {
1637 mla_thumb(dst, src1, src2, srcA, s, cond);
1638 return;
1639 }
1529 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); 1640 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc));
1530 emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 | 1641 emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 |
1531 src2.code()*B8 | B7 | B4 | src1.code()); 1642 src2.code()*B8 | B7 | B4 | src1.code());
1532 } 1643 }
1533 1644
1534 1645
1535 void Assembler::mls(Register dst, Register src1, Register src2, Register srcA, 1646 void Assembler::mls(Register dst, Register src1, Register src2, Register srcA,
1536 Condition cond) { 1647 Condition cond) {
1648 if (is_thumb_mode()) {
1649 mls_thumb(dst, src1, src2, srcA, cond);
1650 return;
1651 }
1537 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); 1652 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc));
1538 emit(cond | B22 | B21 | dst.code()*B16 | srcA.code()*B12 | 1653 emit(cond | B22 | B21 | dst.code()*B16 | srcA.code()*B12 |
1539 src2.code()*B8 | B7 | B4 | src1.code()); 1654 src2.code()*B8 | B7 | B4 | src1.code());
1540 } 1655 }
1541 1656
1542 1657
1543 void Assembler::sdiv(Register dst, Register src1, Register src2, 1658 void Assembler::sdiv(Register dst, Register src1, Register src2,
1544 Condition cond) { 1659 Condition cond) {
1660 if (is_thumb_mode()) {
1661 sdiv_thumb(dst, src1, src2, cond);
1662 return;
1663 }
1545 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); 1664 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc));
1546 ASSERT(IsEnabled(SUDIV)); 1665 ASSERT(IsEnabled(SUDIV));
1547 emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 | 1666 emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 |
1548 src2.code()*B8 | B4 | src1.code()); 1667 src2.code()*B8 | B4 | src1.code());
1549 } 1668 }
1550 1669
1551 1670
1552 void Assembler::mul(Register dst, Register src1, Register src2, 1671 void Assembler::mul(Register dst, Register src1, Register src2,
1553 SBit s, Condition cond) { 1672 SBit s, Condition cond) {
1554 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); 1673 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc));
1674 if (is_thumb_mode()) {
1675 mul_thumb(dst, src1, src2, s, cond);
1676 return;
1677 }
1555 // dst goes in bits 16-19 for this instruction! 1678 // dst goes in bits 16-19 for this instruction!
1556 emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code()); 1679 emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code());
1557 } 1680 }
1558 1681
1559 1682
1560 void Assembler::smlal(Register dstL, 1683 void Assembler::smlal(Register dstL,
1561 Register dstH, 1684 Register dstH,
1562 Register src1, 1685 Register src1,
1563 Register src2, 1686 Register src2,
1564 SBit s, 1687 SBit s,
1565 Condition cond) { 1688 Condition cond) {
1689 if (is_thumb_mode()) {
1690 smlal_thumb(dstL, dstH, src1, src2, s, cond);
1691 return;
1692 }
1566 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); 1693 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
1567 ASSERT(!dstL.is(dstH)); 1694 ASSERT(!dstL.is(dstH));
1568 emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 | 1695 emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 |
1569 src2.code()*B8 | B7 | B4 | src1.code()); 1696 src2.code()*B8 | B7 | B4 | src1.code());
1570 } 1697 }
1571 1698
1572 1699
1573 void Assembler::smull(Register dstL, 1700 void Assembler::smull(Register dstL,
1574 Register dstH, 1701 Register dstH,
1575 Register src1, 1702 Register src1,
1576 Register src2, 1703 Register src2,
1577 SBit s, 1704 SBit s,
1578 Condition cond) { 1705 Condition cond) {
1706 if (is_thumb_mode()) {
1707 smull_thumb(dstL, dstH, src1, src2, s, cond);
1708 return;
1709 }
1579 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); 1710 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
1580 ASSERT(!dstL.is(dstH)); 1711 ASSERT(!dstL.is(dstH));
1581 emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 | 1712 emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 |
1582 src2.code()*B8 | B7 | B4 | src1.code()); 1713 src2.code()*B8 | B7 | B4 | src1.code());
1583 } 1714 }
1584 1715
1585 1716
1586 void Assembler::umlal(Register dstL, 1717 void Assembler::umlal(Register dstL,
1587 Register dstH, 1718 Register dstH,
1588 Register src1, 1719 Register src1,
1589 Register src2, 1720 Register src2,
1590 SBit s, 1721 SBit s,
1591 Condition cond) { 1722 Condition cond) {
1723 if (is_thumb_mode()) {
1724 umlal_thumb(dstL, dstH, src1, src2, s, cond);
1725 return;
1726 }
1592 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); 1727 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
1593 ASSERT(!dstL.is(dstH)); 1728 ASSERT(!dstL.is(dstH));
1594 emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 | 1729 emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 |
1595 src2.code()*B8 | B7 | B4 | src1.code()); 1730 src2.code()*B8 | B7 | B4 | src1.code());
1596 } 1731 }
1597 1732
1598 1733
1599 void Assembler::umull(Register dstL, 1734 void Assembler::umull(Register dstL,
1600 Register dstH, 1735 Register dstH,
1601 Register src1, 1736 Register src1,
1602 Register src2, 1737 Register src2,
1603 SBit s, 1738 SBit s,
1604 Condition cond) { 1739 Condition cond) {
1740 if (is_thumb_mode()) {
1741 umull_thumb(dstL, dstH, src1, src2, s, cond);
1742 return;
1743 }
1605 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); 1744 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
1606 ASSERT(!dstL.is(dstH)); 1745 ASSERT(!dstL.is(dstH));
1607 emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 | 1746 emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 |
1608 src2.code()*B8 | B7 | B4 | src1.code()); 1747 src2.code()*B8 | B7 | B4 | src1.code());
1609 } 1748 }
1610 1749
1611 1750
1612 // Miscellaneous arithmetic instructions. 1751 // Miscellaneous arithmetic instructions.
1613 void Assembler::clz(Register dst, Register src, Condition cond) { 1752 void Assembler::clz(Register dst, Register src, Condition cond) {
1614 // v5 and above. 1753 // v5 and above.
1615 ASSERT(!dst.is(pc) && !src.is(pc)); 1754 ASSERT(!dst.is(pc) && !src.is(pc));
1755
1756 if (is_thumb_mode()) {
1757 ASSERT(cond == al);
1758 emit32(BH15 | BH14 | BH13 | BH12 | BH11 | BH9 | BH7 | src.code()*BH0 |
1759 B15 | B14 | B13 | B12 | dst.code()*B8 | B7 | src.code());
1760 return;
1761 }
1762
1616 emit(cond | B24 | B22 | B21 | 15*B16 | dst.code()*B12 | 1763 emit(cond | B24 | B22 | B21 | 15*B16 | dst.code()*B12 |
1617 15*B8 | CLZ | src.code()); 1764 15*B8 | CLZ | src.code());
1618 } 1765 }
1619 1766
1620 1767
1621 // Saturating instructions. 1768 // Saturating instructions.
1622 1769
1623 // Unsigned saturate. 1770 // Unsigned saturate.
1624 void Assembler::usat(Register dst, 1771 void Assembler::usat(Register dst,
1625 int satpos, 1772 int satpos,
1626 const Operand& src, 1773 const Operand& src,
1627 Condition cond) { 1774 Condition cond) {
1628 // v6 and above. 1775 // v6 and above.
1629 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1776 ASSERT(CpuFeatures::IsSupported(ARMv7));
1630 ASSERT(!dst.is(pc) && !src.rm_.is(pc)); 1777 ASSERT(!dst.is(pc) && !src.rm_.is(pc));
1631 ASSERT((satpos >= 0) && (satpos <= 31)); 1778 ASSERT((satpos >= 0) && (satpos <= 31));
1632 ASSERT((src.shift_op_ == ASR) || (src.shift_op_ == LSL)); 1779 ASSERT((src.shift_op_ == ASR) || (src.shift_op_ == LSL));
1633 ASSERT(src.rs_.is(no_reg)); 1780 ASSERT(src.rs_.is(no_reg));
1634 1781
1635 int sh = 0; 1782 int sh = 0;
1636 if (src.shift_op_ == ASR) { 1783 if (src.shift_op_ == ASR) {
1637 sh = 1; 1784 sh = 1;
1638 } 1785 }
1639 1786
1787 if (is_thumb_mode()) {
1788 ASSERT(cond == al);
1789 emit32(thumb32_mode3(USAT_32_IMM) | sh*BH5 |
1790 thumb32_bit_field(src.rm_, dst, src.shift_imm_, satpos));
1791 return;
1792 }
1793
1640 emit(cond | 0x6*B24 | 0xe*B20 | satpos*B16 | dst.code()*B12 | 1794 emit(cond | 0x6*B24 | 0xe*B20 | satpos*B16 | dst.code()*B12 |
1641 src.shift_imm_*B7 | sh*B6 | 0x1*B4 | src.rm_.code()); 1795 src.shift_imm_*B7 | sh*B6 | 0x1*B4 | src.rm_.code());
1642 } 1796 }
1643 1797
1644 1798
1645 // Bitfield manipulation instructions. 1799 // Bitfield manipulation instructions.
1646 1800
1647 // Unsigned bit field extract. 1801 // Unsigned bit field extract.
1648 // Extracts #width adjacent bits from position #lsb in a register, and 1802 // Extracts #width adjacent bits from position #lsb in a register, and
1649 // writes them to the low bits of a destination register. 1803 // writes them to the low bits of a destination register.
1650 // ubfx dst, src, #lsb, #width 1804 // ubfx dst, src, #lsb, #width
1651 void Assembler::ubfx(Register dst, 1805 void Assembler::ubfx(Register dst,
1652 Register src, 1806 Register src,
1653 int lsb, 1807 int lsb,
1654 int width, 1808 int width,
1655 Condition cond) { 1809 Condition cond) {
1656 // v7 and above. 1810 // v7 and above.
1657 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1811 ASSERT(CpuFeatures::IsSupported(ARMv7));
1658 ASSERT(!dst.is(pc) && !src.is(pc)); 1812 ASSERT(!dst.is(pc) && !src.is(pc));
1659 ASSERT((lsb >= 0) && (lsb <= 31)); 1813 ASSERT((lsb >= 0) && (lsb <= 31));
1660 ASSERT((width >= 1) && (width <= (32 - lsb))); 1814 ASSERT((width >= 1) && (width <= (32 - lsb)));
1815
1816 if (is_thumb_mode()) {
1817 ASSERT(cond == al);
1818 emit32(thumb32_mode3(UBFX_32_IMM) |
1819 thumb32_bit_field(src, dst, lsb, width - 1));
1820 return;
1821 }
1822
1661 emit(cond | 0xf*B23 | B22 | B21 | (width - 1)*B16 | dst.code()*B12 | 1823 emit(cond | 0xf*B23 | B22 | B21 | (width - 1)*B16 | dst.code()*B12 |
1662 lsb*B7 | B6 | B4 | src.code()); 1824 lsb*B7 | B6 | B4 | src.code());
1663 } 1825 }
1664 1826
1665 1827
1666 // Signed bit field extract. 1828 // Signed bit field extract.
1667 // Extracts #width adjacent bits from position #lsb in a register, and 1829 // Extracts #width adjacent bits from position #lsb in a register, and
1668 // writes them to the low bits of a destination register. The extracted 1830 // writes them to the low bits of a destination register. The extracted
1669 // value is sign extended to fill the destination register. 1831 // value is sign extended to fill the destination register.
1670 // sbfx dst, src, #lsb, #width 1832 // sbfx dst, src, #lsb, #width
1671 void Assembler::sbfx(Register dst, 1833 void Assembler::sbfx(Register dst,
1672 Register src, 1834 Register src,
1673 int lsb, 1835 int lsb,
1674 int width, 1836 int width,
1675 Condition cond) { 1837 Condition cond) {
1676 // v7 and above. 1838 // v7 and above.
1677 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1839 ASSERT(CpuFeatures::IsSupported(ARMv7));
1678 ASSERT(!dst.is(pc) && !src.is(pc)); 1840 ASSERT(!dst.is(pc) && !src.is(pc));
1679 ASSERT((lsb >= 0) && (lsb <= 31)); 1841 ASSERT((lsb >= 0) && (lsb <= 31));
1680 ASSERT((width >= 1) && (width <= (32 - lsb))); 1842 ASSERT((width >= 1) && (width <= (32 - lsb)));
1843
1844 if (is_thumb_mode()) {
1845 ASSERT(cond == al);
1846 emit32(thumb32_mode3(SBFX_32_IMM) |
1847 thumb32_bit_field(src, dst, lsb, width - 1));
1848 return;
1849 }
1850
1681 emit(cond | 0xf*B23 | B21 | (width - 1)*B16 | dst.code()*B12 | 1851 emit(cond | 0xf*B23 | B21 | (width - 1)*B16 | dst.code()*B12 |
1682 lsb*B7 | B6 | B4 | src.code()); 1852 lsb*B7 | B6 | B4 | src.code());
1683 } 1853 }
1684 1854
1685 1855
1686 // Bit field clear. 1856 // Bit field clear.
1687 // Sets #width adjacent bits at position #lsb in the destination register 1857 // Sets #width adjacent bits at position #lsb in the destination register
1688 // to zero, preserving the value of the other bits. 1858 // to zero, preserving the value of the other bits.
1689 // bfc dst, #lsb, #width 1859 // bfc dst, #lsb, #width
1690 void Assembler::bfc(Register dst, int lsb, int width, Condition cond) { 1860 void Assembler::bfc(Register dst, int lsb, int width, Condition cond) {
1691 // v7 and above. 1861 // v7 and above.
1692 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1862 ASSERT(CpuFeatures::IsSupported(ARMv7));
1693 ASSERT(!dst.is(pc)); 1863 ASSERT(!dst.is(pc));
1694 ASSERT((lsb >= 0) && (lsb <= 31)); 1864 ASSERT((lsb >= 0) && (lsb <= 31));
1695 ASSERT((width >= 1) && (width <= (32 - lsb))); 1865 ASSERT((width >= 1) && (width <= (32 - lsb)));
1696 int msb = lsb + width - 1; 1866 int msb = lsb + width - 1;
1867
1868 if (is_thumb_mode()) {
1869 ASSERT(cond == al);
1870 emit32(thumb32_mode3(BFC_32_IMM) | thumb32_bit_field(pc, dst, lsb, msb));
1871 return;
1872 }
1873
1697 emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 | 0xf); 1874 emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 | 0xf);
1698 } 1875 }
1699 1876
1700 1877
1701 // Bit field insert. 1878 // Bit field insert.
1702 // Inserts #width adjacent bits from the low bits of the source register 1879 // Inserts #width adjacent bits from the low bits of the source register
1703 // into position #lsb of the destination register. 1880 // into position #lsb of the destination register.
1704 // bfi dst, src, #lsb, #width 1881 // bfi dst, src, #lsb, #width
1705 void Assembler::bfi(Register dst, 1882 void Assembler::bfi(Register dst,
1706 Register src, 1883 Register src,
1707 int lsb, 1884 int lsb,
1708 int width, 1885 int width,
1709 Condition cond) { 1886 Condition cond) {
1710 // v7 and above. 1887 // v7 and above.
1711 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1888 ASSERT(CpuFeatures::IsSupported(ARMv7));
1712 ASSERT(!dst.is(pc) && !src.is(pc)); 1889 ASSERT(!dst.is(pc) && !src.is(pc));
1713 ASSERT((lsb >= 0) && (lsb <= 31)); 1890 ASSERT((lsb >= 0) && (lsb <= 31));
1714 ASSERT((width >= 1) && (width <= (32 - lsb))); 1891 ASSERT((width >= 1) && (width <= (32 - lsb)));
1715 int msb = lsb + width - 1; 1892 int msb = lsb + width - 1;
1893
1894 if (is_thumb_mode()) {
1895 ASSERT(cond == al);
1896 emit32(thumb32_mode3(BFI_32_IMM) | thumb32_bit_field(src, dst, lsb, msb));
1897 return;
1898 }
1899
1716 emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 | 1900 emit(cond | 0x1f*B22 | msb*B16 | dst.code()*B12 | lsb*B7 | B4 |
1717 src.code()); 1901 src.code());
1718 } 1902 }
1719 1903
1720 1904
1721 void Assembler::pkhbt(Register dst, 1905 void Assembler::pkhbt(Register dst,
1722 Register src1, 1906 Register src1,
1723 const Operand& src2, 1907 const Operand& src2,
1724 Condition cond ) { 1908 Condition cond ) {
1725 // Instruction details available in ARM DDI 0406C.b, A8.8.125. 1909 // Instruction details available in ARM DDI 0406C.b, A8.8.125.
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
1849 } 2033 }
1850 emit(cond | instr | B24 | B21 | fields | 15*B12); 2034 emit(cond | instr | B24 | B21 | fields | 15*B12);
1851 } 2035 }
1852 2036
1853 2037
1854 // Load/Store instructions. 2038 // Load/Store instructions.
1855 void Assembler::ldr(Register dst, const MemOperand& src, Condition cond) { 2039 void Assembler::ldr(Register dst, const MemOperand& src, Condition cond) {
1856 if (dst.is(pc)) { 2040 if (dst.is(pc)) {
1857 positions_recorder()->WriteRecordedPositions(); 2041 positions_recorder()->WriteRecordedPositions();
1858 } 2042 }
2043
2044 if (is_thumb_mode()) {
2045 ldr_thumb(dst, src);
2046 return;
2047 }
2048
1859 addrmod2(cond | B26 | L, dst, src); 2049 addrmod2(cond | B26 | L, dst, src);
1860 } 2050 }
1861 2051
1862 2052
1863 void Assembler::str(Register src, const MemOperand& dst, Condition cond) { 2053 void Assembler::str(Register src, const MemOperand& dst, Condition cond) {
2054 if (is_thumb_mode()) {
2055 ASSERT(cond == al);
2056 str_thumb(src, dst);
2057 return;
2058 }
1864 addrmod2(cond | B26, src, dst); 2059 addrmod2(cond | B26, src, dst);
1865 } 2060 }
1866 2061
1867 2062
1868 void Assembler::ldrb(Register dst, const MemOperand& src, Condition cond) { 2063 void Assembler::ldrb(Register dst, const MemOperand& src, Condition cond) {
2064 if (is_thumb_mode()) {
2065 ASSERT(cond == al);
2066 ldrb_thumb(dst, src);
2067 return;
2068 }
1869 addrmod2(cond | B26 | B | L, dst, src); 2069 addrmod2(cond | B26 | B | L, dst, src);
1870 } 2070 }
1871 2071
1872 2072
1873 void Assembler::strb(Register src, const MemOperand& dst, Condition cond) { 2073 void Assembler::strb(Register src, const MemOperand& dst, Condition cond) {
2074 if (is_thumb_mode()) {
2075 ASSERT(cond == al);
2076 strb_thumb(src, dst);
2077 return;
2078 }
1874 addrmod2(cond | B26 | B, src, dst); 2079 addrmod2(cond | B26 | B, src, dst);
1875 } 2080 }
1876 2081
1877 2082
1878 void Assembler::ldrh(Register dst, const MemOperand& src, Condition cond) { 2083 void Assembler::ldrh(Register dst, const MemOperand& src, Condition cond) {
2084 if (is_thumb_mode()) {
2085 ASSERT(cond == al);
2086 ldrh_thumb(dst, src);
2087 return;
2088 }
1879 addrmod3(cond | L | B7 | H | B4, dst, src); 2089 addrmod3(cond | L | B7 | H | B4, dst, src);
1880 } 2090 }
1881 2091
1882 2092
1883 void Assembler::strh(Register src, const MemOperand& dst, Condition cond) { 2093 void Assembler::strh(Register src, const MemOperand& dst, Condition cond) {
2094 if (is_thumb_mode()) {
2095 ASSERT(cond == al);
2096 strh_thumb(src, dst);
2097 return;
2098 }
1884 addrmod3(cond | B7 | H | B4, src, dst); 2099 addrmod3(cond | B7 | H | B4, src, dst);
1885 } 2100 }
1886 2101
1887 2102
1888 void Assembler::ldrsb(Register dst, const MemOperand& src, Condition cond) { 2103 void Assembler::ldrsb(Register dst, const MemOperand& src, Condition cond) {
2104 if (is_thumb_mode()) {
2105 ASSERT(cond == al);
2106 ldrsb_thumb(dst, src);
2107 return;
2108 }
1889 addrmod3(cond | L | B7 | S6 | B4, dst, src); 2109 addrmod3(cond | L | B7 | S6 | B4, dst, src);
1890 } 2110 }
1891 2111
1892 2112
1893 void Assembler::ldrsh(Register dst, const MemOperand& src, Condition cond) { 2113 void Assembler::ldrsh(Register dst, const MemOperand& src, Condition cond) {
2114 if (is_thumb_mode()) {
2115 ASSERT(cond == al);
2116 ldrsh_thumb(dst, src);
2117 return;
2118 }
1894 addrmod3(cond | L | B7 | S6 | H | B4, dst, src); 2119 addrmod3(cond | L | B7 | S6 | H | B4, dst, src);
1895 } 2120 }
1896 2121
1897 2122
1898 void Assembler::ldrd(Register dst1, Register dst2, 2123 void Assembler::ldrd(Register dst1, Register dst2,
1899 const MemOperand& src, Condition cond) { 2124 const MemOperand& src, Condition cond) {
1900 ASSERT(IsEnabled(ARMv7)); 2125 ASSERT(IsEnabled(ARMv7));
1901 ASSERT(src.rm().is(no_reg)); 2126 ASSERT(src.rm().is(no_reg));
1902 ASSERT(!dst1.is(lr)); // r14. 2127 ASSERT(!dst1.is(lr)); // r14.
1903 ASSERT_EQ(0, dst1.code() % 2); 2128 ASSERT_EQ(0, dst1.code() % 2);
2129 if (is_thumb_mode()) {
2130 ASSERT(cond == al);
2131 ASSERT(!src.rm_.is_valid()); // Immediate.
2132 ldrd_imm_t1(dst1, dst2, src);
2133 return;
2134 }
1904 ASSERT_EQ(dst1.code() + 1, dst2.code()); 2135 ASSERT_EQ(dst1.code() + 1, dst2.code());
1905 addrmod3(cond | B7 | B6 | B4, dst1, src); 2136 addrmod3(cond | B7 | B6 | B4, dst1, src);
1906 } 2137 }
1907 2138
1908 2139
1909 void Assembler::strd(Register src1, Register src2, 2140 void Assembler::strd(Register src1, Register src2,
1910 const MemOperand& dst, Condition cond) { 2141 const MemOperand& dst, Condition cond) {
1911 ASSERT(dst.rm().is(no_reg)); 2142 ASSERT(dst.rm().is(no_reg));
1912 ASSERT(!src1.is(lr)); // r14. 2143 ASSERT(!src1.is(lr)); // r14.
1913 ASSERT_EQ(0, src1.code() % 2); 2144 ASSERT_EQ(0, src1.code() % 2);
2145 ASSERT(IsEnabled(ARMv7));
2146 if (is_thumb_mode()) {
2147 ASSERT(cond == al);
2148 ASSERT(!dst.rm_.is_valid()); // Immediate.
2149 strd_imm_t1(src1, src2, dst);
2150 return;
2151 }
1914 ASSERT_EQ(src1.code() + 1, src2.code()); 2152 ASSERT_EQ(src1.code() + 1, src2.code());
1915 ASSERT(IsEnabled(ARMv7));
1916 addrmod3(cond | B7 | B6 | B5 | B4, src1, dst); 2153 addrmod3(cond | B7 | B6 | B5 | B4, src1, dst);
1917 } 2154 }
1918 2155
1919 2156
1920 // Preload instructions. 2157 // Preload instructions.
1921 void Assembler::pld(const MemOperand& address) { 2158 void Assembler::pld(const MemOperand& address) {
1922 // Instruction details available in ARM DDI 0406C.b, A8.8.128. 2159 // Instruction details available in ARM DDI 0406C.b, A8.8.128.
1923 // 1111(31-28) | 0111(27-24) | U(23) | R(22) | 01(21-20) | Rn(19-16) | 2160 // 1111(31-28) | 0111(27-24) | U(23) | R(22) | 01(21-20) | Rn(19-16) |
1924 // 1111(15-12) | imm5(11-07) | type(6-5) | 0(4)| Rm(3-0) | 2161 // 1111(15-12) | imm5(11-07) | type(6-5) | 0(4)| Rm(3-0) |
1925 ASSERT(address.rm().is(no_reg)); 2162 ASSERT(address.rm().is(no_reg));
(...skipping 10 matching lines...) Expand all
1936 } 2173 }
1937 2174
1938 2175
1939 // Load/Store multiple instructions. 2176 // Load/Store multiple instructions.
1940 void Assembler::ldm(BlockAddrMode am, 2177 void Assembler::ldm(BlockAddrMode am,
1941 Register base, 2178 Register base,
1942 RegList dst, 2179 RegList dst,
1943 Condition cond) { 2180 Condition cond) {
1944 // ABI stack constraint: ldmxx base, {..sp..} base != sp is not restartable. 2181 // ABI stack constraint: ldmxx base, {..sp..} base != sp is not restartable.
1945 ASSERT(base.is(sp) || (dst & sp.bit()) == 0); 2182 ASSERT(base.is(sp) || (dst & sp.bit()) == 0);
1946 2183 if (is_thumb_mode()) {
2184 ldm_thumb(am, base, dst, cond);
2185 return;
2186 }
1947 addrmod4(cond | B27 | am | L, base, dst); 2187 addrmod4(cond | B27 | am | L, base, dst);
1948 2188
1949 // Emit the constant pool after a function return implemented by ldm ..{..pc}. 2189 // Emit the constant pool after a function return implemented by ldm ..{..pc}.
1950 if (cond == al && (dst & pc.bit()) != 0) { 2190 if (cond == al && (dst & pc.bit()) != 0) {
1951 // There is a slight chance that the ldm instruction was actually a call, 2191 // There is a slight chance that the ldm instruction was actually a call,
1952 // in which case it would be wrong to return into the constant pool; we 2192 // in which case it would be wrong to return into the constant pool; we
1953 // recognize this case by checking if the emission of the pool was blocked 2193 // recognize this case by checking if the emission of the pool was blocked
1954 // at the pc of the ldm instruction by a mov lr, pc instruction; if this is 2194 // at the pc of the ldm instruction by a mov lr, pc instruction; if this is
1955 // the case, we emit a jump over the pool. 2195 // the case, we emit a jump over the pool.
1956 CheckConstPool(true, no_const_pool_before_ == pc_offset() - kInstrSize); 2196 CheckConstPool(true, no_const_pool_before_ == pc_offset() - kInstrSize);
1957 } 2197 }
1958 } 2198 }
1959 2199
1960 2200
1961 void Assembler::stm(BlockAddrMode am, 2201 void Assembler::stm(BlockAddrMode am,
1962 Register base, 2202 Register base,
1963 RegList src, 2203 RegList src,
1964 Condition cond) { 2204 Condition cond) {
2205 if (is_thumb_mode()) {
2206 stm_thumb(am, base, src, cond);
2207 return;
2208 }
1965 addrmod4(cond | B27 | am, base, src); 2209 addrmod4(cond | B27 | am, base, src);
1966 } 2210 }
1967 2211
1968 2212
1969 // Exception-generating instructions and debugging support. 2213 // Exception-generating instructions and debugging support.
1970 // Stops with a non-negative code less than kNumOfWatchedStops support 2214 // Stops with a non-negative code less than kNumOfWatchedStops support
1971 // enabling/disabling and a counter feature. See simulator-arm.h . 2215 // enabling/disabling and a counter feature. See simulator-arm.h .
1972 void Assembler::stop(const char* msg, Condition cond, int32_t code) { 2216 void Assembler::stop(const char* msg, Condition cond, int32_t code) {
1973 #ifndef __arm__ 2217 #ifndef __arm__
1974 ASSERT(code >= kDefaultStopCode); 2218 ASSERT(code >= kDefaultStopCode);
(...skipping 15 matching lines...) Expand all
1990 bkpt(0); 2234 bkpt(0);
1991 bind(&skip); 2235 bind(&skip);
1992 } else { 2236 } else {
1993 bkpt(0); 2237 bkpt(0);
1994 } 2238 }
1995 #endif // def __arm__ 2239 #endif // def __arm__
1996 } 2240 }
1997 2241
1998 2242
1999 void Assembler::bkpt(uint32_t imm16) { // v5 and above 2243 void Assembler::bkpt(uint32_t imm16) { // v5 and above
2244 if (is_thumb_mode()) {
2245 ASSERT(is_uint8(imm16));
2246 emit16(B15 | B13 | B12 | B11 | B10 | B9 | (imm16 & 0xFF));
2247 return;
2248 }
2000 ASSERT(is_uint16(imm16)); 2249 ASSERT(is_uint16(imm16));
2001 emit(al | B24 | B21 | (imm16 >> 4)*B8 | BKPT | (imm16 & 0xf)); 2250 emit(al | B24 | B21 | (imm16 >> 4)*B8 | BKPT | (imm16 & 0xf));
2002 } 2251 }
2003 2252
2004 2253
2005 void Assembler::svc(uint32_t imm24, Condition cond) { 2254 void Assembler::svc(uint32_t imm24, Condition cond) {
2006 ASSERT(is_uint24(imm24)); 2255 ASSERT(is_uint24(imm24));
2007 emit(cond | 15*B24 | imm24); 2256 emit(cond | 15*B24 | imm24);
2008 } 2257 }
2009 2258
(...skipping 522 matching lines...) Expand 10 before | Expand all | Expand 10 after
2532 const VmovIndex index, 2781 const VmovIndex index,
2533 const Register src, 2782 const Register src,
2534 const Condition cond) { 2783 const Condition cond) {
2535 // Dd[index] = Rt 2784 // Dd[index] = Rt
2536 // Instruction details available in ARM DDI 0406C.b, A8-940. 2785 // Instruction details available in ARM DDI 0406C.b, A8-940.
2537 // cond(31-28) | 1110(27-24) | 0(23) | opc1=0index(22-21) | 0(20) | 2786 // cond(31-28) | 1110(27-24) | 0(23) | opc1=0index(22-21) | 0(20) |
2538 // Vd(19-16) | Rt(15-12) | 1011(11-8) | D(7) | opc2=00(6-5) | 1(4) | 0000(3-0) 2787 // Vd(19-16) | Rt(15-12) | 1011(11-8) | D(7) | opc2=00(6-5) | 1(4) | 0000(3-0)
2539 ASSERT(index.index == 0 || index.index == 1); 2788 ASSERT(index.index == 0 || index.index == 1);
2540 int vd, d; 2789 int vd, d;
2541 dst.split_code(&vd, &d); 2790 dst.split_code(&vd, &d);
2791 if (is_thumb_mode() && cond != al) {
2792 it_thumb(cond, 1);
2793 emit(al | 0xE*B24 | index.index*B21 | vd*B16 | src.code()*B12 | 0xB*B8 |
2794 d*B7 | B4);
2795 return;
2796 }
2542 emit(cond | 0xE*B24 | index.index*B21 | vd*B16 | src.code()*B12 | 0xB*B8 | 2797 emit(cond | 0xE*B24 | index.index*B21 | vd*B16 | src.code()*B12 | 0xB*B8 |
2543 d*B7 | B4); 2798 d*B7 | B4);
2544 } 2799 }
2545 2800
2546 2801
2547 void Assembler::vmov(const Register dst, 2802 void Assembler::vmov(const Register dst,
2548 const VmovIndex index, 2803 const VmovIndex index,
2549 const DwVfpRegister src, 2804 const DwVfpRegister src,
2550 const Condition cond) { 2805 const Condition cond) {
2551 // Dd[index] = Rt 2806 // Dd[index] = Rt
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
2809 void Assembler::vneg(const DwVfpRegister dst, 3064 void Assembler::vneg(const DwVfpRegister dst,
2810 const DwVfpRegister src, 3065 const DwVfpRegister src,
2811 const Condition cond) { 3066 const Condition cond) {
2812 // Instruction details available in ARM DDI 0406C.b, A8-968. 3067 // Instruction details available in ARM DDI 0406C.b, A8-968.
2813 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0001(19-16) | Vd(15-12) | 3068 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0001(19-16) | Vd(15-12) |
2814 // 101(11-9) | sz=1(8) | 0(7) | 1(6) | M(5) | 0(4) | Vm(3-0) 3069 // 101(11-9) | sz=1(8) | 0(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
2815 int vd, d; 3070 int vd, d;
2816 dst.split_code(&vd, &d); 3071 dst.split_code(&vd, &d);
2817 int vm, m; 3072 int vm, m;
2818 src.split_code(&vm, &m); 3073 src.split_code(&vm, &m);
2819 3074 if (is_thumb_mode() && cond != al) {
3075 it_thumb(cond, 1);
3076 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | B16 | vd*B12 | 0x5*B9 | B8 | B6 |
3077 m*B5 | vm);
3078 return;
3079 }
2820 emit(cond | 0x1D*B23 | d*B22 | 0x3*B20 | B16 | vd*B12 | 0x5*B9 | B8 | B6 | 3080 emit(cond | 0x1D*B23 | d*B22 | 0x3*B20 | B16 | vd*B12 | 0x5*B9 | B8 | B6 |
2821 m*B5 | vm); 3081 m*B5 | vm);
2822 } 3082 }
2823 3083
2824 3084
2825 void Assembler::vabs(const DwVfpRegister dst, 3085 void Assembler::vabs(const DwVfpRegister dst,
2826 const DwVfpRegister src, 3086 const DwVfpRegister src,
2827 const Condition cond) { 3087 const Condition cond) {
2828 // Instruction details available in ARM DDI 0406C.b, A8-524. 3088 // Instruction details available in ARM DDI 0406C.b, A8-524.
2829 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0000(19-16) | Vd(15-12) | 3089 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0000(19-16) | Vd(15-12) |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
3061 3321
3062 3322
3063 // Pseudo instructions. 3323 // Pseudo instructions.
3064 void Assembler::nop(int type) { 3324 void Assembler::nop(int type) {
3065 // ARMv6{K/T2} and v7 have an actual NOP instruction but it serializes 3325 // ARMv6{K/T2} and v7 have an actual NOP instruction but it serializes
3066 // some of the CPU's pipeline and has to issue. Older ARM chips simply used 3326 // some of the CPU's pipeline and has to issue. Older ARM chips simply used
3067 // MOV Rx, Rx as NOP and it performs better even in newer CPUs. 3327 // MOV Rx, Rx as NOP and it performs better even in newer CPUs.
3068 // We therefore use MOV Rx, Rx, even on newer CPUs, and use Rx to encode 3328 // We therefore use MOV Rx, Rx, even on newer CPUs, and use Rx to encode
3069 // a type. 3329 // a type.
3070 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. 3330 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop.
3331 if (is_thumb_mode()) {
3332 uint16_t d = type >> 3;
3333 emit16(4*B12 | 6*B8 | d*B7 | type*B3 | (type & 7));
3334 return;
3335 }
3071 emit(al | 13*B21 | type*B12 | type); 3336 emit(al | 13*B21 | type*B12 | type);
3072 } 3337 }
3073 3338
3074 3339
3075 bool Assembler::IsMovT(Instr instr) { 3340 bool Assembler::IsMovT(Instr instr) {
3076 instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions 3341 instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions
3077 ((kNumRegisters-1)*B12) | // mask out register 3342 ((kNumRegisters-1)*B12) | // mask out register
3078 EncodeMovwImmediate(0xFFFF)); // mask out immediate value 3343 EncodeMovwImmediate(0xFFFF)); // mask out immediate value
3079 return instr == 0x34*B20; 3344 return instr == 0x34*B20;
3080 } 3345 }
3081 3346
3082 3347
3083 bool Assembler::IsMovW(Instr instr) { 3348 bool Assembler::IsMovW(Instr instr) {
3084 instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions 3349 instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions
3085 ((kNumRegisters-1)*B12) | // mask out destination 3350 ((kNumRegisters-1)*B12) | // mask out destination
3086 EncodeMovwImmediate(0xFFFF)); // mask out immediate value 3351 EncodeMovwImmediate(0xFFFF)); // mask out immediate value
3087 return instr == 0x30*B20; 3352 return instr == 0x30*B20;
3088 } 3353 }
3089 3354
3090 3355
3356 bool Assembler::IsMovTThumb(Instr instr) {
3357 return ((instr & ~MOVW_THUMB_IMM_MASK) == (MOVW_THUMB_MASK | BH7));
3358 }
3359
3360
3361 bool Assembler::IsMovWThumb(Instr instr) {
3362 return ((instr & ~MOVW_THUMB_IMM_MASK) == MOVW_THUMB_MASK);
3363 }
3364
3365
3091 bool Assembler::IsNop(Instr instr, int type) { 3366 bool Assembler::IsNop(Instr instr, int type) {
3092 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. 3367 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop.
3093 // Check for mov rx, rx where x = type. 3368 // Check for mov rx, rx where x = type.
3094 return instr == (al | 13*B21 | type*B12 | type); 3369 return instr == (al | 13*B21 | type*B12 | type);
3095 } 3370 }
3096 3371
3097 3372
3098 bool Assembler::ImmediateFitsAddrMode1Instruction(int32_t imm32) { 3373 bool Assembler::ImmediateFitsAddrMode1Instruction(int32_t imm32) {
3099 uint32_t dummy1; 3374 uint32_t dummy1;
3100 uint32_t dummy2; 3375 uint32_t dummy2;
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
3372 RecordConstPool(size); 3647 RecordConstPool(size);
3373 3648
3374 // Emit jump over constant pool if necessary. 3649 // Emit jump over constant pool if necessary.
3375 Label after_pool; 3650 Label after_pool;
3376 if (require_jump) { 3651 if (require_jump) {
3377 b(&after_pool); 3652 b(&after_pool);
3378 } 3653 }
3379 3654
3380 // Put down constant pool marker "Undefined instruction". 3655 // Put down constant pool marker "Undefined instruction".
3381 // The data size helps disassembly know what to print. 3656 // The data size helps disassembly know what to print.
3382 emit(kConstantPoolMarker | 3657 if (is_thumb_mode()) {
3383 EncodeConstantPoolLength(size_after_marker / kPointerSize)); 3658 emit32(kConstantPoolMarker |
3659 EncodeConstantPoolLength(size_after_marker / kPointerSize));
3660 } else {
3661 emit(kConstantPoolMarker |
3662 EncodeConstantPoolLength(size_after_marker / kPointerSize));
3663 }
3384 3664
3385 if (require_64_bit_align) { 3665 if (require_64_bit_align) {
3386 emit(kConstantPoolMarker); 3666 emit(kConstantPoolMarker);
3387 } 3667 }
3388 3668
3389 // Emit 64-bit constant pool entries first: their range is smaller than 3669 // Emit 64-bit constant pool entries first: their range is smaller than
3390 // 32-bit entries. 3670 // 32-bit entries.
3391 for (int i = 0; i < num_pending_reloc_info_; i++) { 3671 for (int i = 0; i < num_pending_reloc_info_; i++) {
3392 RelocInfo& rinfo = pending_reloc_info_[i]; 3672 RelocInfo& rinfo = pending_reloc_info_[i];
3393 3673
(...skipping 27 matching lines...) Expand all
3421 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && 3701 ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
3422 rinfo.rmode() != RelocInfo::POSITION && 3702 rinfo.rmode() != RelocInfo::POSITION &&
3423 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && 3703 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
3424 rinfo.rmode() != RelocInfo::CONST_POOL); 3704 rinfo.rmode() != RelocInfo::CONST_POOL);
3425 3705
3426 if (rinfo.rmode() == RelocInfo::NONE64) { 3706 if (rinfo.rmode() == RelocInfo::NONE64) {
3427 // 64-bit values emitted earlier. 3707 // 64-bit values emitted earlier.
3428 continue; 3708 continue;
3429 } 3709 }
3430 3710
3711 if (is_thumb_mode()) {
3712 Instr instr = thumb32_instr_at(rinfo.pc());
3713 int thumb_ldr_delta = 2;
3714 if ((reinterpret_cast<int>(rinfo.pc()) & 3) == 0) {
3715 thumb_ldr_delta = 4;
3716 }
3717 int delta = pc_ - rinfo.pc() - thumb_ldr_delta;
3718 ASSERT(is_uint12(delta));
3719 instr &= ~kOff12Mask;
3720 instr |= delta;
3721 thumb32_instr_at_put(rinfo.pc(), instr);
3722 set_arm_mode();
3723 emit(rinfo.data());
3724 set_thumb_mode();
3725 continue;
3726 }
3727
3431 Instr instr = instr_at(rinfo.pc()); 3728 Instr instr = instr_at(rinfo.pc());
3432 3729
3433 // 64-bit loads shouldn't get here. 3730 // 64-bit loads shouldn't get here.
3434 ASSERT(!IsVldrDPcImmediateOffset(instr)); 3731 ASSERT(!IsVldrDPcImmediateOffset(instr));
3435 3732
3436 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3733 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
3437 // 0 is the smallest delta: 3734 // 0 is the smallest delta:
3438 // ldr rd, [pc, #0] 3735 // ldr rd, [pc, #0]
3439 // constant pool marker 3736 // constant pool marker
3440 // data 3737 // data
(...skipping 22 matching lines...) Expand all
3463 3760
3464 // Since a constant pool was just emitted, move the check offset forward by 3761 // Since a constant pool was just emitted, move the check offset forward by
3465 // the standard interval. 3762 // the standard interval.
3466 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3763 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3467 } 3764 }
3468 3765
3469 3766
3470 } } // namespace v8::internal 3767 } } // namespace v8::internal
3471 3768
3472 #endif // V8_TARGET_ARCH_ARM 3769 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-thumb.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698