| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 910 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 921 } | 921 } |
| 922 | 922 |
| 923 | 923 |
| 924 // Low-level code emission routines depending on the addressing mode. | 924 // Low-level code emission routines depending on the addressing mode. |
| 925 // If this returns true then you have to use the rotate_imm and immed_8 | 925 // If this returns true then you have to use the rotate_imm and immed_8 |
| 926 // that it returns, because it may have already changed the instruction | 926 // that it returns, because it may have already changed the instruction |
| 927 // to match them! | 927 // to match them! |
| 928 static bool fits_shifter(uint32_t imm32, | 928 static bool fits_shifter(uint32_t imm32, |
| 929 uint32_t* rotate_imm, | 929 uint32_t* rotate_imm, |
| 930 uint32_t* immed_8, | 930 uint32_t* immed_8, |
| 931 SBitMode* smode, |
| 931 Instr* instr) { | 932 Instr* instr) { |
| 932 // imm32 must be unsigned. | 933 // imm32 must be unsigned. |
| 933 for (int rot = 0; rot < 16; rot++) { | 934 for (int rot = 0; rot < 16; rot++) { |
| 934 uint32_t imm8 = (imm32 << 2*rot) | (imm32 >> (32 - 2*rot)); | 935 uint32_t imm8 = (imm32 << 2*rot) | (imm32 >> (32 - 2*rot)); |
| 935 if ((imm8 <= 0xff)) { | 936 if ((imm8 <= 0xff)) { |
| 936 *rotate_imm = rot; | 937 *rotate_imm = rot; |
| 937 *immed_8 = imm8; | 938 *immed_8 = imm8; |
| 938 return true; | 939 return true; |
| 939 } | 940 } |
| 940 } | 941 } |
| 941 // If the opcode is one with a complementary version and the complementary | 942 // If the opcode is one with a complementary version and the complementary |
| 942 // immediate fits, change the opcode. | 943 // immediate fits, change the opcode. |
| 943 if (instr != NULL) { | 944 if (instr != NULL) { |
| 944 if ((*instr & kMovMvnMask) == kMovMvnPattern) { | 945 if ((*instr & kMovMvnMask) == kMovMvnPattern) { |
| 945 if (fits_shifter(~imm32, rotate_imm, immed_8, NULL)) { | 946 if (fits_shifter(~imm32, rotate_imm, immed_8, smode, NULL)) { |
| 946 *instr ^= kMovMvnFlip; | 947 *instr ^= kMovMvnFlip; |
| 947 return true; | 948 return true; |
| 948 } else if ((*instr & kMovLeaveCCMask) == kMovLeaveCCPattern) { | 949 } else if (CpuFeatures::IsSupported(ARMv7) && |
| 949 if (CpuFeatures::IsSupported(ARMv7)) { | 950 *smode != SetCC && |
| 950 if (imm32 < 0x10000) { | 951 imm32 < 0x10000) { |
| 951 *instr ^= kMovwLeaveCCFlip; | 952 *smode = LeaveCC; // might have been DontCareCC |
| 952 *instr |= EncodeMovwImmediate(imm32); | 953 *instr ^= kMovwLeaveCCFlip; |
| 953 *rotate_imm = *immed_8 = 0; // Not used for movw. | 954 *instr |= EncodeMovwImmediate(imm32); |
| 954 return true; | 955 *rotate_imm = *immed_8 = 0; // Not used for movw. |
| 955 } | 956 return true; |
| 956 } | |
| 957 } | 957 } |
| 958 } else if ((*instr & kCmpCmnMask) == kCmpCmnPattern) { | 958 } else if ((*instr & kCmpCmnMask) == kCmpCmnPattern) { |
| 959 if (fits_shifter(-static_cast<int>(imm32), rotate_imm, immed_8, NULL)) { | 959 if (fits_shifter(-static_cast<int>(imm32), rotate_imm, |
| 960 immed_8, NULL, NULL)) { |
| 960 *instr ^= kCmpCmnFlip; | 961 *instr ^= kCmpCmnFlip; |
| 961 return true; | 962 return true; |
| 962 } | 963 } |
| 963 } else { | 964 } else { |
| 964 Instr alu_insn = (*instr & kALUMask); | 965 Instr alu_insn = (*instr & kALUMask); |
| 965 if (alu_insn == ADD || | 966 if (alu_insn == ADD || |
| 966 alu_insn == SUB) { | 967 alu_insn == SUB) { |
| 967 if (fits_shifter(-static_cast<int>(imm32), rotate_imm, immed_8, NULL)) { | 968 if (fits_shifter(-static_cast<int>(imm32), rotate_imm, |
| 969 immed_8, smode, NULL)) { |
| 968 *instr ^= kAddSubFlip; | 970 *instr ^= kAddSubFlip; |
| 969 return true; | 971 return true; |
| 970 } | 972 } |
| 971 } else if (alu_insn == AND || | 973 } else if (alu_insn == AND || |
| 972 alu_insn == BIC) { | 974 alu_insn == BIC) { |
| 973 if (fits_shifter(~imm32, rotate_imm, immed_8, NULL)) { | 975 if (fits_shifter(~imm32, rotate_imm, immed_8, smode, NULL)) { |
| 974 *instr ^= kAndBicFlip; | 976 *instr ^= kAndBicFlip; |
| 975 return true; | 977 return true; |
| 976 } | 978 } |
| 977 } | 979 } |
| 978 } | 980 } |
| 979 } | 981 } |
| 980 return false; | 982 return false; |
| 981 } | 983 } |
| 982 | 984 |
| 983 | 985 |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1009 return false; | 1011 return false; |
| 1010 } | 1012 } |
| 1011 return CpuFeatures::IsSupported(ARMv7); | 1013 return CpuFeatures::IsSupported(ARMv7); |
| 1012 } | 1014 } |
| 1013 | 1015 |
| 1014 | 1016 |
| 1015 bool Operand::is_single_instruction(const Assembler* assembler, | 1017 bool Operand::is_single_instruction(const Assembler* assembler, |
| 1016 Instr instr) const { | 1018 Instr instr) const { |
| 1017 if (rm_.is_valid()) return true; | 1019 if (rm_.is_valid()) return true; |
| 1018 uint32_t dummy1, dummy2; | 1020 uint32_t dummy1, dummy2; |
| 1021 SBitMode smode = (instr & S) != 0 ? SetCC : LeaveCC; |
| 1019 if (must_output_reloc_info(assembler) || | 1022 if (must_output_reloc_info(assembler) || |
| 1020 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 1023 !fits_shifter(imm32_, &dummy1, &dummy2, &smode, &instr)) { |
| 1021 // The immediate operand cannot be encoded as a shifter operand, or use of | 1024 // The immediate operand cannot be encoded as a shifter operand, or use of |
| 1022 // constant pool is required. For a mov instruction not setting the | 1025 // constant pool is required. For a mov instruction not setting the |
| 1023 // condition code additional instruction conventions can be used. | 1026 // condition code additional instruction conventions can be used. |
| 1024 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1027 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
| 1025 return !use_movw_movt(*this, assembler); | 1028 return !use_movw_movt(*this, assembler); |
| 1026 } else { | 1029 } else { |
| 1027 // If this is not a mov or mvn instruction there will always an additional | 1030 // If this is not a mov or mvn instruction there will always an additional |
| 1028 // instructions - either mov or ldr. The mov might actually be two | 1031 // instructions - either mov or ldr. The mov might actually be two |
| 1029 // instructions mov or movw followed by movt so including the actual | 1032 // instructions mov or movw followed by movt so including the actual |
| 1030 // instruction two or three instructions will be generated. | 1033 // instruction two or three instructions will be generated. |
| 1031 return false; | 1034 return false; |
| 1032 } | 1035 } |
| 1033 } else { | 1036 } else { |
| 1034 // No use of constant pool and the immediate operand can be encoded as a | 1037 // No use of constant pool and the immediate operand can be encoded as a |
| 1035 // shifter operand. | 1038 // shifter operand. |
| 1036 return true; | 1039 return true; |
| 1037 } | 1040 } |
| 1038 } | 1041 } |
| 1039 | 1042 |
| 1040 | 1043 |
| 1041 void Assembler::move_32_bit_immediate_thumb(Register rd, | 1044 void Assembler::move_32_bit_immediate_thumb(Register rd, |
| 1042 SBit s, | 1045 SBitMode smode, |
| 1043 const Operand& x, | 1046 const Operand& x, |
| 1044 Condition cond) { | 1047 Condition cond) { |
| 1045 if (rd.code() != pc.code() && s == LeaveCC) { | 1048 if (rd.code() != pc.code() && smode != SetCC) { |
| 1046 if (use_movw_movt(x, this)) { | 1049 if (use_movw_movt(x, this)) { |
| 1047 if (x.must_output_reloc_info(this)) { | 1050 if (x.must_output_reloc_info(this)) { |
| 1048 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | 1051 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); |
| 1049 // Make sure the movw/movt doesn't get separated. | 1052 // Make sure the movw/movt doesn't get separated. |
| 1050 BlockConstPoolFor(2); | 1053 BlockConstPoolFor(2); |
| 1051 } | 1054 } |
| 1052 if (cond != al) { | 1055 if (cond != al) { |
| 1053 pc_ -= kInstr16Size; | 1056 pc_ -= kInstr16Size; |
| 1054 it_thumb(cond, 2, true); | 1057 it_thumb(cond, 2, true); |
| 1055 } | 1058 } |
| 1056 emit32(thumb32_mode3(MOV_32_IMM3) | | 1059 emit32(thumb32_mode3(MOV_32_IMM3) | |
| 1057 thumb32_1reg_zero_extend_imm_split_4i38(rd, (x.imm32_ & 0xffff))); | 1060 thumb32_1reg_zero_extend_imm_split_4i38(rd, (x.imm32_ & 0xffff))); |
| 1058 movt_thumb(rd, static_cast<uint32_t>(x.imm32_) >> 16, al); | 1061 movt_thumb(rd, static_cast<uint32_t>(x.imm32_) >> 16, al); |
| 1059 return; | 1062 return; |
| 1060 } | 1063 } |
| 1061 } | 1064 } |
| 1062 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 1065 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); |
| 1063 ldr_thumb(rd, MemOperand(pc, 0)); | 1066 ldr_thumb(rd, MemOperand(pc, 0)); |
| 1064 } | 1067 } |
| 1065 | 1068 |
| 1066 | 1069 |
| 1067 void Assembler::move_32_bit_immediate(Condition cond, | 1070 void Assembler::move_32_bit_immediate(Condition cond, |
| 1068 Register rd, | 1071 Register rd, |
| 1069 SBit s, | |
| 1070 const Operand& x) { | 1072 const Operand& x) { |
| 1071 if (rd.code() != pc.code() && s == LeaveCC) { | 1073 if (rd.code() != pc.code()) { |
| 1072 if (use_movw_movt(x, this)) { | 1074 if (use_movw_movt(x, this)) { |
| 1073 if (x.must_output_reloc_info(this)) { | 1075 if (x.must_output_reloc_info(this)) { |
| 1074 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | 1076 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); |
| 1075 // Make sure the movw/movt doesn't get separated. | 1077 // Make sure the movw/movt doesn't get separated. |
| 1076 BlockConstPoolFor(2); | 1078 BlockConstPoolFor(2); |
| 1077 } | 1079 } |
| 1078 emit(cond | 0x30*B20 | rd.code()*B12 | | 1080 emit(cond | 0x30*B20 | rd.code()*B12 | |
| 1079 EncodeMovwImmediate(x.imm32_ & 0xffff)); | 1081 EncodeMovwImmediate(x.imm32_ & 0xffff)); |
| 1080 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); | 1082 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); |
| 1081 return; | 1083 return; |
| 1082 } | 1084 } |
| 1083 } | 1085 } |
| 1084 | 1086 |
| 1085 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 1087 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); |
| 1086 ldr(rd, MemOperand(pc, 0), cond); | 1088 ldr(rd, MemOperand(pc, 0), cond); |
| 1087 } | 1089 } |
| 1088 | 1090 |
| 1089 | 1091 |
| 1090 void Assembler::addrmod1(Instr instr, | 1092 void Assembler::addrmod1(Instr instr, |
| 1093 SBitMode smode, |
| 1091 Register rn, | 1094 Register rn, |
| 1092 Register rd, | 1095 Register rd, |
| 1093 const Operand& x) { | 1096 const Operand& x) { |
| 1094 CheckBuffer(); | 1097 CheckBuffer(); |
| 1095 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 1098 ASSERT((instr & ~(kCondMask | kOpCodeMask)) == 0); |
| 1096 if (!x.rm_.is_valid()) { | 1099 if (!x.rm_.is_valid()) { |
| 1097 // Immediate. | 1100 // Immediate. |
| 1098 uint32_t rotate_imm; | 1101 uint32_t rotate_imm; |
| 1099 uint32_t immed_8; | 1102 uint32_t immed_8; |
| 1100 if (x.must_output_reloc_info(this) || | 1103 if (!x.must_output_reloc_info(this) && |
| 1101 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { | 1104 fits_shifter(x.imm32_, &rotate_imm, &immed_8, &smode, &instr)) { |
| 1102 // The immediate operand cannot be encoded as a shifter operand, so load | 1105 // The immediate operand can be encoded directly in the shifter. |
| 1103 // it first to register ip and change the original instruction to use ip. | 1106 instr |= I | rotate_imm * B8 | immed_8; |
| 1104 // However, if the original instruction is a 'mov rd, x' (not setting the | 1107 } else { |
| 1105 // condition code), then replace it with a 'ldr rd, [pc]'. | 1108 // The immediate operand cannot be encoded as a shifter operand. We will |
| 1106 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed | 1109 // need to generate extra instructions. |
| 1110 CHECK(!rn.is(ip)); |
| 1107 Condition cond = Instruction::ConditionField(instr); | 1111 Condition cond = Instruction::ConditionField(instr); |
| 1108 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1112 if ((instr & kMovMvnMask) == kMovMvnPattern && smode != SetCC) { |
| 1109 move_32_bit_immediate(cond, rd, LeaveCC, x); | 1113 // If this is a move that doesn't set flags, we can just load directly |
| 1114 // into rd using movw/movt or a load from the constant pool. |
| 1115 move_32_bit_immediate(cond, rd, x); |
| 1116 return; |
| 1110 } else { | 1117 } else { |
| 1111 if ((instr & kMovMvnMask) == kMovMvnPattern) { | 1118 // Otherwise, we move the value into ip. This could be encoded as |
| 1112 // Moves need to use a constant pool entry. | 1119 // mvn, movw/movt, or a constant pool load: whatever is most efficient. |
| 1113 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 1120 SBitMode mov_smode = smode == LeaveCC ? LeaveCC : DontCareCC; |
| 1114 ldr(ip, MemOperand(pc, 0), cond); | 1121 mov(ip, x, mov_smode, cond); |
| 1115 } else if (x.must_output_reloc_info(this)) { | 1122 addrmod1(instr, smode, rn, rd, Operand(ip)); |
| 1116 // Otherwise, use most efficient form of fetching from constant pool. | 1123 return; |
| 1117 move_32_bit_immediate(cond, ip, LeaveCC, x); | |
| 1118 } else { | |
| 1119 // If this is not a mov or mvn instruction we may still be able to | |
| 1120 // avoid a constant pool entry by using mvn or movw. | |
| 1121 mov(ip, x, LeaveCC, cond); | |
| 1122 } | |
| 1123 addrmod1(instr, rn, rd, Operand(ip)); | |
| 1124 } | 1124 } |
| 1125 return; | |
| 1126 } | 1125 } |
| 1127 instr |= I | rotate_imm*B8 | immed_8; | |
| 1128 } else if (!x.rs_.is_valid()) { | 1126 } else if (!x.rs_.is_valid()) { |
| 1129 // Immediate shift. | 1127 // Immediate shift. |
| 1130 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); | 1128 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); |
| 1131 } else { | 1129 } else { |
| 1132 // Register shift. | 1130 // Register shift. |
| 1133 ASSERT(!rn.is(pc) && !rd.is(pc) && !x.rm_.is(pc) && !x.rs_.is(pc)); | 1131 ASSERT(!rn.is(pc) && !rd.is(pc) && !x.rm_.is(pc) && !x.rs_.is(pc)); |
| 1134 instr |= x.rs_.code()*B8 | x.shift_op_ | B4 | x.rm_.code(); | 1132 instr |= x.rs_.code()*B8 | x.shift_op_ | B4 | x.rm_.code(); |
| 1135 } | 1133 } |
| 1136 emit(instr | rn.code()*B16 | rd.code()*B12); | 1134 SBit s = sbit_from_mode(smode); |
| 1135 emit(instr | s | rn.code()*B16 | rd.code()*B12); |
| 1137 if (rn.is(pc) || x.rm_.is(pc)) { | 1136 if (rn.is(pc) || x.rm_.is(pc)) { |
| 1138 // Block constant pool emission for one instruction after reading pc. | 1137 // Block constant pool emission for one instruction after reading pc. |
| 1139 BlockConstPoolFor(1); | 1138 BlockConstPoolFor(1); |
| 1140 } | 1139 } |
| 1141 } | 1140 } |
| 1142 | 1141 |
| 1143 | 1142 |
| 1144 void Assembler::addrmod2(Instr instr, Register rd, const MemOperand& x) { | 1143 void Assembler::addrmod2(Instr instr, Register rd, const MemOperand& x) { |
| 1145 ASSERT((instr & ~(kCondMask | B | L)) == B26); | 1144 ASSERT((instr & ~(kCondMask | B | L)) == B26); |
| 1146 int am = x.am_; | 1145 int am = x.am_; |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1356 emit16(thumb16_mode3(BX_REG) | thumb16_anyreg_encoding(target)); | 1355 emit16(thumb16_mode3(BX_REG) | thumb16_anyreg_encoding(target)); |
| 1357 return; | 1356 return; |
| 1358 } | 1357 } |
| 1359 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code()); | 1358 emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code()); |
| 1360 } | 1359 } |
| 1361 | 1360 |
| 1362 | 1361 |
| 1363 // Data-processing instructions. | 1362 // Data-processing instructions. |
| 1364 | 1363 |
| 1365 void Assembler::and_(Register dst, Register src1, const Operand& src2, | 1364 void Assembler::and_(Register dst, Register src1, const Operand& src2, |
| 1366 SBit s, Condition cond) { | 1365 SBitMode smode, Condition cond) { |
| 1367 if (is_thumb_mode()) { | 1366 if (is_thumb_mode()) { |
| 1368 and_thumb(dst, src1, src2, s, cond); | 1367 and_thumb(dst, src1, src2, smode, cond); |
| 1369 return; | 1368 return; |
| 1370 } | 1369 } |
| 1371 addrmod1(cond | AND | s, src1, dst, src2); | 1370 addrmod1(cond | AND, smode, src1, dst, src2); |
| 1372 } | 1371 } |
| 1373 | 1372 |
| 1374 | 1373 |
| 1375 void Assembler::eor(Register dst, Register src1, const Operand& src2, | 1374 void Assembler::eor(Register dst, Register src1, const Operand& src2, |
| 1376 SBit s, Condition cond) { | 1375 SBitMode smode, Condition cond) { |
| 1377 if (is_thumb_mode()) { | 1376 if (is_thumb_mode()) { |
| 1378 eor_thumb(dst, src1, src2, s, cond); | 1377 eor_thumb(dst, src1, src2, smode, cond); |
| 1379 return; | 1378 return; |
| 1380 } | 1379 } |
| 1381 addrmod1(cond | EOR | s, src1, dst, src2); | 1380 addrmod1(cond | EOR, smode, src1, dst, src2); |
| 1382 } | 1381 } |
| 1383 | 1382 |
| 1384 | 1383 |
| 1385 void Assembler::sub(Register dst, Register src1, const Operand& src2, | 1384 void Assembler::sub(Register dst, Register src1, const Operand& src2, |
| 1386 SBit s, Condition cond) { | 1385 SBitMode smode, Condition cond) { |
| 1387 if (is_thumb_mode()) { | 1386 if (is_thumb_mode()) { |
| 1388 sub_thumb(dst, src1, src2, s, cond); | 1387 sub_thumb(dst, src1, src2, smode, cond); |
| 1389 return; | 1388 return; |
| 1390 } | 1389 } |
| 1391 addrmod1(cond | SUB | s, src1, dst, src2); | 1390 addrmod1(cond | SUB, smode, src1, dst, src2); |
| 1392 } | 1391 } |
| 1393 | 1392 |
| 1394 | 1393 |
| 1395 void Assembler::rsb(Register dst, Register src1, const Operand& src2, | 1394 void Assembler::rsb(Register dst, Register src1, const Operand& src2, |
| 1396 SBit s, Condition cond) { | 1395 SBitMode smode, Condition cond) { |
| 1397 if (is_thumb_mode()) { | 1396 if (is_thumb_mode()) { |
| 1398 rsb_thumb(dst, src1, src2, s, cond); | 1397 rsb_thumb(dst, src1, src2, smode, cond); |
| 1399 return; | 1398 return; |
| 1400 } | 1399 } |
| 1401 addrmod1(cond | RSB | s, src1, dst, src2); | 1400 addrmod1(cond | RSB, smode, src1, dst, src2); |
| 1402 } | 1401 } |
| 1403 | 1402 |
| 1404 | 1403 |
| 1405 void Assembler::add(Register dst, Register src1, const Operand& src2, | 1404 void Assembler::add(Register dst, Register src1, const Operand& src2, |
| 1406 SBit s, Condition cond) { | 1405 SBitMode smode, Condition cond) { |
| 1407 if (is_thumb_mode()) { | 1406 if (is_thumb_mode()) { |
| 1408 add_thumb(dst, src1, src2, s, cond); | 1407 add_thumb(dst, src1, src2, smode, cond); |
| 1409 return; | 1408 return; |
| 1410 } | 1409 } |
| 1411 addrmod1(cond | ADD | s, src1, dst, src2); | 1410 addrmod1(cond | ADD, smode, src1, dst, src2); |
| 1412 } | 1411 } |
| 1413 | 1412 |
| 1414 | 1413 |
| 1415 void Assembler::adc(Register dst, Register src1, const Operand& src2, | 1414 void Assembler::adc(Register dst, Register src1, const Operand& src2, |
| 1416 SBit s, Condition cond) { | 1415 SBitMode smode, Condition cond) { |
| 1417 if (is_thumb_mode()) { | 1416 if (is_thumb_mode()) { |
| 1418 adc_thumb(dst, src1, src2, s, cond); | 1417 adc_thumb(dst, src1, src2, smode, cond); |
| 1419 return; | 1418 return; |
| 1420 } | 1419 } |
| 1421 addrmod1(cond | ADC | s, src1, dst, src2); | 1420 addrmod1(cond | ADC, smode, src1, dst, src2); |
| 1422 } | 1421 } |
| 1423 | 1422 |
| 1424 | 1423 |
| 1425 void Assembler::sbc(Register dst, Register src1, const Operand& src2, | 1424 void Assembler::sbc(Register dst, Register src1, const Operand& src2, |
| 1426 SBit s, Condition cond) { | 1425 SBitMode smode, Condition cond) { |
| 1427 if (is_thumb_mode()) { | 1426 if (is_thumb_mode()) { |
| 1428 sbc_thumb(dst, src1, src2, s, cond); | 1427 sbc_thumb(dst, src1, src2, smode, cond); |
| 1429 return; | 1428 return; |
| 1430 } | 1429 } |
| 1431 addrmod1(cond | SBC | s, src1, dst, src2); | 1430 addrmod1(cond | SBC, smode, src1, dst, src2); |
| 1432 } | 1431 } |
| 1433 | 1432 |
| 1434 | 1433 |
| 1435 void Assembler::rsc(Register dst, Register src1, const Operand& src2, | 1434 void Assembler::rsc(Register dst, Register src1, const Operand& src2, |
| 1436 SBit s, Condition cond) { | 1435 SBitMode smode, Condition cond) { |
| 1437 addrmod1(cond | RSC | s, src1, dst, src2); | 1436 addrmod1(cond | RSC, smode, src1, dst, src2); |
| 1438 } | 1437 } |
| 1439 | 1438 |
| 1440 | 1439 |
| 1441 void Assembler::tst(Register src1, const Operand& src2, Condition cond) { | 1440 void Assembler::tst(Register src1, const Operand& src2, Condition cond) { |
| 1442 if (is_thumb_mode()) { | 1441 if (is_thumb_mode()) { |
| 1443 tst_thumb(src1, src2, cond); | 1442 tst_thumb(src1, src2, cond); |
| 1444 return; | 1443 return; |
| 1445 } | 1444 } |
| 1446 addrmod1(cond | TST | S, src1, r0, src2); | 1445 addrmod1(cond | TST, SetCC, src1, r0, src2); |
| 1447 } | 1446 } |
| 1448 | 1447 |
| 1449 | 1448 |
| 1450 void Assembler::teq(Register src1, const Operand& src2, Condition cond) { | 1449 void Assembler::teq(Register src1, const Operand& src2, Condition cond) { |
| 1451 if (is_thumb_mode()) { | 1450 if (is_thumb_mode()) { |
| 1452 teq_thumb(src1, src2, cond); | 1451 teq_thumb(src1, src2, cond); |
| 1453 return; | 1452 return; |
| 1454 } | 1453 } |
| 1455 addrmod1(cond | TEQ | S, src1, r0, src2); | 1454 addrmod1(cond | TEQ, SetCC, src1, r0, src2); |
| 1456 } | 1455 } |
| 1457 | 1456 |
| 1458 | 1457 |
| 1459 void Assembler::cmp(Register src1, const Operand& src2, Condition cond) { | 1458 void Assembler::cmp(Register src1, const Operand& src2, Condition cond) { |
| 1460 if (is_thumb_mode()) { | 1459 if (is_thumb_mode()) { |
| 1461 cmp_thumb(src1, src2, cond); | 1460 cmp_thumb(src1, src2, cond); |
| 1462 return; | 1461 return; |
| 1463 } | 1462 } |
| 1464 addrmod1(cond | CMP | S, src1, r0, src2); | 1463 addrmod1(cond | CMP, SetCC, src1, r0, src2); |
| 1465 } | 1464 } |
| 1466 | 1465 |
| 1467 | 1466 |
| 1468 void Assembler::cmp_raw_immediate( | 1467 void Assembler::cmp_raw_immediate( |
| 1469 Register src, int raw_immediate, Condition cond) { | 1468 Register src, int raw_immediate, Condition cond) { |
| 1470 ASSERT(is_uint12(raw_immediate)); | 1469 ASSERT(is_uint12(raw_immediate)); |
| 1471 emit(cond | I | CMP | S | src.code() << 16 | raw_immediate); | 1470 emit(cond | I | CMP | S | src.code() << 16 | raw_immediate); |
| 1472 } | 1471 } |
| 1473 | 1472 |
| 1474 | 1473 |
| 1475 void Assembler::cmn(Register src1, const Operand& src2, Condition cond) { | 1474 void Assembler::cmn(Register src1, const Operand& src2, Condition cond) { |
| 1476 if (is_thumb_mode()) { | 1475 if (is_thumb_mode()) { |
| 1477 cmn_thumb(src1, src2, cond); | 1476 cmn_thumb(src1, src2, cond); |
| 1478 return; | 1477 return; |
| 1479 } | 1478 } |
| 1480 addrmod1(cond | CMN | S, src1, r0, src2); | 1479 addrmod1(cond | CMN, SetCC, src1, r0, src2); |
| 1481 } | 1480 } |
| 1482 | 1481 |
| 1483 | 1482 |
| 1484 void Assembler::orr(Register dst, Register src1, const Operand& src2, | 1483 void Assembler::orr(Register dst, Register src1, const Operand& src2, |
| 1485 SBit s, Condition cond) { | 1484 SBitMode smode, Condition cond) { |
| 1486 if (is_thumb_mode()) { | 1485 if (is_thumb_mode()) { |
| 1487 orr_thumb(dst, src1, src2, s, cond); | 1486 orr_thumb(dst, src1, src2, smode, cond); |
| 1488 return; | 1487 return; |
| 1489 } | 1488 } |
| 1490 addrmod1(cond | ORR | s, src1, dst, src2); | 1489 addrmod1(cond | ORR, smode, src1, dst, src2); |
| 1491 } | 1490 } |
| 1492 | 1491 |
| 1493 | 1492 |
| 1494 void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) { | 1493 void Assembler::mov(Register dst, |
| 1494 const Operand& src, |
| 1495 SBitMode smode, |
| 1496 Condition cond) { |
| 1495 if (dst.is(pc)) { | 1497 if (dst.is(pc)) { |
| 1496 positions_recorder()->WriteRecordedPositions(); | 1498 positions_recorder()->WriteRecordedPositions(); |
| 1497 } | 1499 } |
| 1498 // Don't allow nop instructions in the form mov rn, rn to be generated using | 1500 // Don't allow nop instructions in the form mov rn, rn to be generated using |
| 1499 // the mov instruction. They must be generated using nop(int/NopMarkerTypes) | 1501 // the mov instruction. They must be generated using nop(int/NopMarkerTypes) |
| 1500 // or MarkCode(int/NopMarkerTypes) pseudo instructions. | 1502 // or MarkCode(int/NopMarkerTypes) pseudo instructions. |
| 1501 ASSERT(!(src.is_reg() && src.rm().is(dst) && s == LeaveCC && cond == al)); | 1503 ASSERT(!(src.is_reg() && src.rm().is(dst) && smode != SetCC && cond == al)); |
| 1502 if (is_thumb_mode()) { | 1504 if (is_thumb_mode()) { |
| 1503 mov_thumb(dst, src, s, cond); | 1505 mov_thumb(dst, src, smode, cond); |
| 1504 return; | 1506 return; |
| 1505 } | 1507 } |
| 1506 addrmod1(cond | MOV | s, r0, dst, src); | 1508 if (dst.code() == 15 && smode == DontCareCC) { |
| 1509 smode = LeaveCC; |
| 1510 } |
| 1511 addrmod1(cond | MOV, smode, r0, dst, src); |
| 1507 } | 1512 } |
| 1508 | 1513 |
| 1509 | 1514 |
| 1510 void Assembler::movw(Register reg, uint32_t immediate, Condition cond) { | 1515 void Assembler::movw(Register reg, uint32_t immediate, Condition cond) { |
| 1511 ASSERT(immediate < 0x10000); | 1516 ASSERT(immediate < 0x10000); |
| 1517 if (is_thumb_mode()) { |
| 1518 ASSERT(cond == al); |
| 1519 mov_imm_t3(reg, Operand(immediate), LeaveCCBit, al); |
| 1520 return; |
| 1521 } |
| 1522 |
| 1512 // May use movw if supported, but on unsupported platforms will try to use | 1523 // May use movw if supported, but on unsupported platforms will try to use |
| 1513 // equivalent rotated immed_8 value and other tricks before falling back to a | 1524 // equivalent rotated immed_8 value and other tricks before falling back to a |
| 1514 // constant pool load. | 1525 // constant pool load. |
| 1515 mov(reg, Operand(immediate), LeaveCC, cond); | 1526 mov(reg, Operand(immediate), LeaveCC, cond); |
| 1516 } | 1527 } |
| 1517 | 1528 |
| 1518 | 1529 |
| 1519 void Assembler::movt(Register reg, uint32_t immediate, Condition cond) { | 1530 void Assembler::movt(Register reg, uint32_t immediate, Condition cond) { |
| 1520 if (is_thumb_mode()) { | 1531 if (is_thumb_mode()) { |
| 1521 movt_thumb(reg, immediate, cond); | 1532 movt_thumb(reg, immediate, cond); |
| 1522 return; | 1533 return; |
| 1523 } | 1534 } |
| 1524 emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate)); | 1535 emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate)); |
| 1525 } | 1536 } |
| 1526 | 1537 |
| 1527 | 1538 |
| 1528 void Assembler::bic(Register dst, Register src1, const Operand& src2, | 1539 void Assembler::bic(Register dst, Register src1, const Operand& src2, |
| 1529 SBit s, Condition cond) { | 1540 SBitMode smode, Condition cond) { |
| 1530 if (is_thumb_mode()) { | 1541 if (is_thumb_mode()) { |
| 1531 bic_thumb(dst, src1, src2, s, cond); | 1542 bic_thumb(dst, src1, src2, smode, cond); |
| 1532 return; | 1543 return; |
| 1533 } | 1544 } |
| 1534 addrmod1(cond | BIC | s, src1, dst, src2); | 1545 addrmod1(cond | BIC, smode, src1, dst, src2); |
| 1535 } | 1546 } |
| 1536 | 1547 |
| 1537 | 1548 |
| 1538 void Assembler::mvn(Register dst, const Operand& src, SBit s, Condition cond) { | 1549 void Assembler::mvn(Register dst, const Operand& src, |
| 1550 SBitMode smode, Condition cond) { |
| 1539 if (is_thumb_mode()) { | 1551 if (is_thumb_mode()) { |
| 1540 mvn_thumb(dst, src, s, cond); | 1552 mvn_thumb(dst, src, smode, cond); |
| 1541 return; | 1553 return; |
| 1542 } | 1554 } |
| 1543 addrmod1(cond | MVN | s, r0, dst, src); | 1555 addrmod1(cond | MVN, smode, r0, dst, src); |
| 1544 } | 1556 } |
| 1545 | 1557 |
| 1546 | 1558 |
| 1547 // Multiply instructions. | 1559 // Multiply instructions. |
| 1548 void Assembler::mla(Register dst, Register src1, Register src2, Register srcA, | 1560 void Assembler::mla(Register dst, Register src1, Register src2, Register srcA, |
| 1549 SBit s, Condition cond) { | 1561 SBitMode smode, Condition cond) { |
| 1550 if (is_thumb_mode()) { | 1562 if (is_thumb_mode()) { |
| 1551 mla_thumb(dst, src1, src2, srcA, s, cond); | 1563 mla_thumb(dst, src1, src2, srcA, smode, cond); |
| 1552 return; | 1564 return; |
| 1553 } | 1565 } |
| 1554 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); | 1566 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc)); |
| 1567 SBit s = sbit_from_mode(smode); |
| 1555 emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 | | 1568 emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 | |
| 1556 src2.code()*B8 | B7 | B4 | src1.code()); | 1569 src2.code()*B8 | B7 | B4 | src1.code()); |
| 1557 } | 1570 } |
| 1558 | 1571 |
| 1559 | 1572 |
| 1560 void Assembler::mls(Register dst, Register src1, Register src2, Register srcA, | 1573 void Assembler::mls(Register dst, Register src1, Register src2, Register srcA, |
| 1561 Condition cond) { | 1574 Condition cond) { |
| 1562 if (is_thumb_mode()) { | 1575 if (is_thumb_mode()) { |
| 1563 mls_thumb(dst, src1, src2, srcA, cond); | 1576 mls_thumb(dst, src1, src2, srcA, cond); |
| 1564 return; | 1577 return; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1576 return; | 1589 return; |
| 1577 } | 1590 } |
| 1578 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1591 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1579 ASSERT(IsEnabled(SUDIV)); | 1592 ASSERT(IsEnabled(SUDIV)); |
| 1580 emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 | | 1593 emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 | |
| 1581 src2.code()*B8 | B4 | src1.code()); | 1594 src2.code()*B8 | B4 | src1.code()); |
| 1582 } | 1595 } |
| 1583 | 1596 |
| 1584 | 1597 |
| 1585 void Assembler::mul(Register dst, Register src1, Register src2, | 1598 void Assembler::mul(Register dst, Register src1, Register src2, |
| 1586 SBit s, Condition cond) { | 1599 SBitMode smode, Condition cond) { |
| 1587 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1600 ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1588 if (is_thumb_mode()) { | 1601 if (is_thumb_mode()) { |
| 1589 mul_thumb(dst, src1, src2, s, cond); | 1602 mul_thumb(dst, src1, src2, smode, cond); |
| 1590 return; | 1603 return; |
| 1591 } | 1604 } |
| 1605 SBit s = sbit_from_mode(smode); |
| 1592 // dst goes in bits 16-19 for this instruction! | 1606 // dst goes in bits 16-19 for this instruction! |
| 1593 emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code()); | 1607 emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code()); |
| 1594 } | 1608 } |
| 1595 | 1609 |
| 1596 | 1610 |
| 1597 void Assembler::smlal(Register dstL, | 1611 void Assembler::smlal(Register dstL, |
| 1598 Register dstH, | 1612 Register dstH, |
| 1599 Register src1, | 1613 Register src1, |
| 1600 Register src2, | 1614 Register src2, |
| 1601 SBit s, | 1615 SBitMode smode, |
| 1602 Condition cond) { | 1616 Condition cond) { |
| 1603 if (is_thumb_mode()) { | 1617 if (is_thumb_mode()) { |
| 1604 smlal_thumb(dstL, dstH, src1, src2, s, cond); | 1618 smlal_thumb(dstL, dstH, src1, src2, smode, cond); |
| 1605 return; | 1619 return; |
| 1606 } | 1620 } |
| 1607 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1621 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1608 ASSERT(!dstL.is(dstH)); | 1622 ASSERT(!dstL.is(dstH)); |
| 1623 SBit s = smode == SetCC ? SetCCBit : LeaveCCBit; |
| 1609 emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 | | 1624 emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 | |
| 1610 src2.code()*B8 | B7 | B4 | src1.code()); | 1625 src2.code()*B8 | B7 | B4 | src1.code()); |
| 1611 } | 1626 } |
| 1612 | 1627 |
| 1613 | 1628 |
| 1614 void Assembler::smull(Register dstL, | 1629 void Assembler::smull(Register dstL, |
| 1615 Register dstH, | 1630 Register dstH, |
| 1616 Register src1, | 1631 Register src1, |
| 1617 Register src2, | 1632 Register src2, |
| 1618 SBit s, | 1633 SBitMode smode, |
| 1619 Condition cond) { | 1634 Condition cond) { |
| 1620 if (is_thumb_mode()) { | 1635 if (is_thumb_mode()) { |
| 1621 smull_thumb(dstL, dstH, src1, src2, s, cond); | 1636 smull_thumb(dstL, dstH, src1, src2, smode, cond); |
| 1622 return; | 1637 return; |
| 1623 } | 1638 } |
| 1624 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1639 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1625 ASSERT(!dstL.is(dstH)); | 1640 ASSERT(!dstL.is(dstH)); |
| 1641 SBit s = smode == SetCC ? SetCCBit : LeaveCCBit; |
| 1626 emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 | | 1642 emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 | |
| 1627 src2.code()*B8 | B7 | B4 | src1.code()); | 1643 src2.code()*B8 | B7 | B4 | src1.code()); |
| 1628 } | 1644 } |
| 1629 | 1645 |
| 1630 | 1646 |
| 1631 void Assembler::umlal(Register dstL, | 1647 void Assembler::umlal(Register dstL, |
| 1632 Register dstH, | 1648 Register dstH, |
| 1633 Register src1, | 1649 Register src1, |
| 1634 Register src2, | 1650 Register src2, |
| 1635 SBit s, | 1651 SBitMode smode, |
| 1636 Condition cond) { | 1652 Condition cond) { |
| 1637 if (is_thumb_mode()) { | 1653 if (is_thumb_mode()) { |
| 1638 umlal_thumb(dstL, dstH, src1, src2, s, cond); | 1654 umlal_thumb(dstL, dstH, src1, src2, smode, cond); |
| 1639 return; | 1655 return; |
| 1640 } | 1656 } |
| 1641 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1657 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1642 ASSERT(!dstL.is(dstH)); | 1658 ASSERT(!dstL.is(dstH)); |
| 1659 SBit s = smode == SetCC ? SetCCBit : LeaveCCBit; |
| 1643 emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 | | 1660 emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 | |
| 1644 src2.code()*B8 | B7 | B4 | src1.code()); | 1661 src2.code()*B8 | B7 | B4 | src1.code()); |
| 1645 } | 1662 } |
| 1646 | 1663 |
| 1647 | 1664 |
| 1648 void Assembler::umull(Register dstL, | 1665 void Assembler::umull(Register dstL, |
| 1649 Register dstH, | 1666 Register dstH, |
| 1650 Register src1, | 1667 Register src1, |
| 1651 Register src2, | 1668 Register src2, |
| 1652 SBit s, | 1669 SBitMode smode, |
| 1653 Condition cond) { | 1670 Condition cond) { |
| 1654 if (is_thumb_mode()) { | 1671 if (is_thumb_mode()) { |
| 1655 umull_thumb(dstL, dstH, src1, src2, s, cond); | 1672 umull_thumb(dstL, dstH, src1, src2, smode, cond); |
| 1656 return; | 1673 return; |
| 1657 } | 1674 } |
| 1658 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); | 1675 ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc)); |
| 1659 ASSERT(!dstL.is(dstH)); | 1676 ASSERT(!dstL.is(dstH)); |
| 1677 SBit s = smode == SetCC ? SetCCBit : LeaveCCBit; |
| 1660 emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 | | 1678 emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 | |
| 1661 src2.code()*B8 | B7 | B4 | src1.code()); | 1679 src2.code()*B8 | B7 | B4 | src1.code()); |
| 1662 } | 1680 } |
| 1663 | 1681 |
| 1664 | 1682 |
| 1665 // Miscellaneous arithmetic instructions. | 1683 // Miscellaneous arithmetic instructions. |
| 1666 void Assembler::clz(Register dst, Register src, Condition cond) { | 1684 void Assembler::clz(Register dst, Register src, Condition cond) { |
| 1667 // v5 and above. | 1685 // v5 and above. |
| 1668 ASSERT(!dst.is(pc) && !src.is(pc)); | 1686 ASSERT(!dst.is(pc) && !src.is(pc)); |
| 1669 | 1687 |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1926 | 1944 |
| 1927 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, | 1945 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, |
| 1928 Condition cond) { | 1946 Condition cond) { |
| 1929 ASSERT(fields >= B16 && fields < B20); // at least one field set | 1947 ASSERT(fields >= B16 && fields < B20); // at least one field set |
| 1930 Instr instr; | 1948 Instr instr; |
| 1931 if (!src.rm_.is_valid()) { | 1949 if (!src.rm_.is_valid()) { |
| 1932 // Immediate. | 1950 // Immediate. |
| 1933 uint32_t rotate_imm; | 1951 uint32_t rotate_imm; |
| 1934 uint32_t immed_8; | 1952 uint32_t immed_8; |
| 1935 if (src.must_output_reloc_info(this) || | 1953 if (src.must_output_reloc_info(this) || |
| 1936 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { | 1954 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL, NULL)) { |
| 1937 // Immediate operand cannot be encoded, load it first to register ip. | 1955 // Immediate operand cannot be encoded, load it first to register ip. |
| 1938 RecordRelocInfo(src.rmode_, src.imm32_); | 1956 RecordRelocInfo(src.rmode_, src.imm32_); |
| 1939 ldr(ip, MemOperand(pc, 0), cond); | 1957 ldr(ip, MemOperand(pc, 0), cond); |
| 1940 msr(fields, Operand(ip), cond); | 1958 msr(fields, Operand(ip), cond); |
| 1941 return; | 1959 return; |
| 1942 } | 1960 } |
| 1943 instr = I | rotate_imm*B8 | immed_8; | 1961 instr = I | rotate_imm*B8 | immed_8; |
| 1944 } else { | 1962 } else { |
| 1945 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed | 1963 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed |
| 1946 instr = src.rm_.code(); | 1964 instr = src.rm_.code(); |
| (...skipping 1333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3280 bool Assembler::IsNop(Instr instr, int type) { | 3298 bool Assembler::IsNop(Instr instr, int type) { |
| 3281 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. | 3299 ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop. |
| 3282 // Check for mov rx, rx where x = type. | 3300 // Check for mov rx, rx where x = type. |
| 3283 return instr == (al | 13*B21 | type*B12 | type); | 3301 return instr == (al | 13*B21 | type*B12 | type); |
| 3284 } | 3302 } |
| 3285 | 3303 |
| 3286 | 3304 |
| 3287 bool Assembler::ImmediateFitsAddrMode1Instruction(int32_t imm32) { | 3305 bool Assembler::ImmediateFitsAddrMode1Instruction(int32_t imm32) { |
| 3288 uint32_t dummy1; | 3306 uint32_t dummy1; |
| 3289 uint32_t dummy2; | 3307 uint32_t dummy2; |
| 3290 return fits_shifter(imm32, &dummy1, &dummy2, NULL); | 3308 return fits_shifter(imm32, &dummy1, &dummy2, NULL, NULL); |
| 3291 } | 3309 } |
| 3292 | 3310 |
| 3293 | 3311 |
| 3294 // Debugging. | 3312 // Debugging. |
| 3295 void Assembler::RecordJSReturn() { | 3313 void Assembler::RecordJSReturn() { |
| 3296 positions_recorder()->WriteRecordedPositions(); | 3314 positions_recorder()->WriteRecordedPositions(); |
| 3297 CheckBuffer(); | 3315 CheckBuffer(); |
| 3298 RecordRelocInfo(RelocInfo::JS_RETURN); | 3316 RecordRelocInfo(RelocInfo::JS_RETURN); |
| 3299 } | 3317 } |
| 3300 | 3318 |
| (...skipping 373 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3674 | 3692 |
| 3675 // Since a constant pool was just emitted, move the check offset forward by | 3693 // Since a constant pool was just emitted, move the check offset forward by |
| 3676 // the standard interval. | 3694 // the standard interval. |
| 3677 next_buffer_check_ = pc_offset() + kCheckPoolInterval; | 3695 next_buffer_check_ = pc_offset() + kCheckPoolInterval; |
| 3678 } | 3696 } |
| 3679 | 3697 |
| 3680 | 3698 |
| 3681 } } // namespace v8::internal | 3699 } } // namespace v8::internal |
| 3682 | 3700 |
| 3683 #endif // V8_TARGET_ARCH_ARM | 3701 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |