OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 18 matching lines...) Expand all Loading... |
29 | 29 |
30 #if defined(V8_TARGET_ARCH_ARM) | 30 #if defined(V8_TARGET_ARCH_ARM) |
31 | 31 |
32 #include "arm/assembler-arm-inl.h" | 32 #include "arm/assembler-arm-inl.h" |
33 #include "serialize.h" | 33 #include "serialize.h" |
34 | 34 |
35 namespace v8 { | 35 namespace v8 { |
36 namespace internal { | 36 namespace internal { |
37 | 37 |
38 void Assembler::add_thumb(Register dst, Register src1, const Operand& src2, | 38 void Assembler::add_thumb(Register dst, Register src1, const Operand& src2, |
39 SBit s, Condition cond) { | 39 SBitMode smode, Condition cond) { |
40 ASSERT(cond == al); | 40 bool is_in_it_block = emit_it(cond); |
| 41 SBit s = sbit_from_mode(smode); |
41 if (!src2.rm_.is_valid()) { | 42 if (!src2.rm_.is_valid()) { |
42 // Immediate. | 43 // Immediate. |
43 if (s == LeaveCC) { | 44 if (it_block_smode_check(is_in_it_block, smode)) { |
44 if (is_uint12(src2.imm32_)) { | |
45 add_imm_t4(dst, src1, src2, s, cond); | |
46 return; | |
47 } | |
48 } else { | |
49 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { | 45 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
50 emit16(thumb16_mode1(ADD_IMM_1) | | 46 emit16(thumb16_mode1(ADD_IMM_1) | |
51 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); | 47 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
52 return; | 48 return; |
53 } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { | 49 } else if (is_low_reg(dst) && |
| 50 dst.code() == src1.code() && |
| 51 is_uint8(src2.imm32_)) { |
54 emit16(thumb16_mode1(ADD_IMM_2) | | 52 emit16(thumb16_mode1(ADD_IMM_2) | |
55 thumb16_lowreg_imm8_encoding(dst, src2)); | 53 thumb16_lowreg_imm8_encoding(dst, src2)); |
56 return; | 54 return; |
57 } | 55 } |
58 } | 56 } |
| 57 if (smode != SetCC && is_uint12(src2.imm32_)) { |
| 58 add_imm_t4(dst, src1, src2, s, cond); |
| 59 return; |
| 60 } |
59 uint32_t i, imm3, imm8; | 61 uint32_t i, imm3, imm8; |
60 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 62 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
61 add_imm_t3(dst, src1, s, cond, i, imm3, imm8); | 63 add_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
62 return; | 64 return; |
63 } | 65 } |
64 // Immediate - too big for 1 thumb instruction | 66 // Immediate - too big for 1 thumb instruction |
65 mov_thumb(ip, src2, LeaveCC, al); | 67 mov_thumb(ip, src2, LeaveCC, al); |
66 add_thumb(dst, src1, Operand(ip), s, al); | 68 add_thumb(dst, src1, Operand(ip), smode, al); |
67 return; | 69 return; |
68 } else if (src2.shift_imm_ == 0) { | 70 } else if (src2.shift_imm_ == 0) { |
69 // Register. | 71 // Register. |
70 if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { | 72 if (it_block_smode_check(is_in_it_block, smode) && |
| 73 are_low_reg(dst, src1) && |
| 74 is_low_reg(src2.rm_)) { |
71 emit16(thumb16_mode1(ADD_REG_1) | | 75 emit16(thumb16_mode1(ADD_REG_1) | |
72 thumb16_3lowreg_encoding(dst, src1, src2)); | 76 thumb16_3lowreg_encoding(dst, src1, src2)); |
73 return; | 77 return; |
74 } else if (s == LeaveCC && dst.code() == src1.code()) { | 78 } else if (smode != SetCC && dst.code() == src1.code()) { |
75 emit16(thumb16_mode3(ADD_REG_2) | | 79 emit16(thumb16_mode3(ADD_REG_2) | |
76 thumb16_2anyreg_encoding(dst, src2)); | 80 thumb16_2anyreg_encoding(dst, src2)); |
77 return; | 81 return; |
78 } | 82 } |
79 } | 83 } |
80 add_reg_t3(dst, src1, src2, s, cond); | 84 add_reg_t3(dst, src1, src2, s, cond); |
81 } | 85 } |
82 | 86 |
83 | 87 |
84 void Assembler::sub_thumb(Register dst, Register src1, const Operand& src2, | 88 void Assembler::sub_thumb(Register dst, Register src1, const Operand& src2, |
85 SBit s, Condition cond) { | 89 SBitMode smode, Condition cond) { |
86 emit_it(cond); | 90 bool is_in_it_block = emit_it(cond); |
| 91 SBit s = sbit_from_mode(smode); |
87 if (!src2.rm_.is_valid()) { | 92 if (!src2.rm_.is_valid()) { |
88 // Immediate. | 93 // Immediate. |
89 if (s == LeaveCC) { | 94 if (it_block_smode_check(is_in_it_block, smode)) { |
90 if (is_uint12(src2.imm32_)) { | |
91 sub_imm_t4(dst, src1, src2, s, cond); | |
92 return; | |
93 } | |
94 } else { | |
95 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { | 95 if (is_uint3(src2.imm32_) && are_low_reg(dst, src1)) { |
96 emit16(thumb16_mode1(SUB_IMM_1) | | 96 emit16(thumb16_mode1(SUB_IMM_1) | |
97 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); | 97 thumb16_2lowreg_imm3_encoding(dst, src1, src2)); |
98 return; | 98 return; |
99 } else if (is_uint8(src2.imm32_) && dst.code() == src1.code()) { | 99 } else if (is_low_reg(dst) && |
| 100 dst.code() == src1.code() && |
| 101 is_uint8(src2.imm32_)) { |
100 emit16(thumb16_mode1(SUB_IMM_2) | | 102 emit16(thumb16_mode1(SUB_IMM_2) | |
101 thumb16_lowreg_imm8_encoding(dst, src2)); | 103 thumb16_lowreg_imm8_encoding(dst, src2)); |
102 return; | 104 return; |
103 } | 105 } |
104 } | 106 } |
| 107 if (smode != SetCC && is_uint12(src2.imm32_)) { |
| 108 sub_imm_t4(dst, src1, src2, s, cond); |
| 109 return; |
| 110 } |
105 uint32_t i, imm3, imm8; | 111 uint32_t i, imm3, imm8; |
106 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 112 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
107 sub_imm_t3(dst, src1, s, cond, i, imm3, imm8); | 113 sub_imm_t3(dst, src1, s, cond, i, imm3, imm8); |
108 return; | 114 return; |
109 } | 115 } |
110 ASSERT(cond == al); | 116 ASSERT(cond == al); |
111 mov_thumb(ip, src2, LeaveCC, al); | 117 mov_thumb(ip, src2, LeaveCC, al); |
112 sub_thumb(dst, src1, Operand(ip), s, al); | 118 sub_thumb(dst, src1, Operand(ip), smode, al); |
113 return; | 119 return; |
114 } else { | 120 } else { |
115 // Register. | 121 // Register. |
116 if (s == SetCC && are_low_reg(dst, src1) && is_low_reg(src2.rm_)) { | 122 if (it_block_smode_check(is_in_it_block, smode) && |
| 123 are_low_reg(dst, src1) && |
| 124 is_low_reg(src2.rm_)) { |
117 emit16(thumb16_mode1(SUB_REG) | | 125 emit16(thumb16_mode1(SUB_REG) | |
118 thumb16_3lowreg_encoding(dst, src1, src2)); | 126 thumb16_3lowreg_encoding(dst, src1, src2)); |
119 return; | 127 return; |
120 } else { | 128 } else { |
121 sub_reg_t3(dst, src1, src2, s, cond); | 129 sub_reg_t3(dst, src1, src2, s, cond); |
122 return; | 130 return; |
123 } | 131 } |
124 } | 132 } |
125 UNREACHABLE(); | 133 UNREACHABLE(); |
126 } | 134 } |
127 | 135 |
128 | 136 |
129 void Assembler::mov_thumb(Register dst, const Operand& src, SBit s, | 137 void Assembler::mov_thumb(Register dst, const Operand& src, SBitMode smode, |
130 Condition cond) { | 138 Condition cond) { |
131 emit_it(cond); | 139 bool is_in_it_block = emit_it(cond); |
| 140 SBit s = sbit_from_mode(smode); |
132 if (!src.rm_.is_valid()) { | 141 if (!src.rm_.is_valid()) { |
133 // Immediate. | 142 // Immediate. |
134 if (is_uint8(src.imm32_) && is_low_reg(dst) && s == SetCC) { | 143 if (it_block_smode_check(is_in_it_block, smode) && |
| 144 is_uint8(src.imm32_) && |
| 145 is_low_reg(dst)) { |
135 emit16(thumb16_mode1(MOV_IMM) | | 146 emit16(thumb16_mode1(MOV_IMM) | |
136 thumb16_lowreg_imm8_encoding(dst, src)); | 147 thumb16_lowreg_imm8_encoding(dst, src)); |
137 return; | 148 return; |
138 } else { | 149 } else { |
139 if (is_uint16(src.imm32_) && s == LeaveCC) { | 150 if (is_uint16(src.imm32_) && smode != SetCC) { |
140 mov_imm_t3(dst, src, s, cond); | 151 mov_imm_t3(dst, src, s, cond); |
141 return; | 152 return; |
142 } else { | 153 } else { |
143 uint32_t i, imm3, imm8; | 154 uint32_t i, imm3, imm8; |
144 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8) && | 155 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8) && |
145 !src.must_output_reloc_info(this)) { | 156 !src.must_output_reloc_info(this)) { |
146 mov_imm_t2(dst, s, cond, i, imm3, imm8); | 157 mov_imm_t2(dst, s, cond, i, imm3, imm8); |
147 return; | 158 return; |
148 } else { | 159 } else { |
149 move_32_bit_immediate_thumb(dst, s, src, cond); | 160 move_32_bit_immediate_thumb(dst, smode, src, cond); |
150 return; | 161 return; |
151 } | 162 } |
152 } | 163 } |
153 } | 164 } |
154 } else { | 165 } else { |
155 // Register. | 166 // Register. |
156 if (src.rs_.is_valid() || (!src.rs_.is_valid() && src.shift_imm_ != 0)) { | 167 if (src.rs_.is_valid() || (!src.rs_.is_valid() && src.shift_imm_ != 0)) { |
157 switch (src.shift_op_) { | 168 switch (src.shift_op_) { |
158 case LSL: lsl_thumb(dst, src, s, cond); | 169 case LSL: lsl_thumb(dst, src, smode, cond); |
159 return; | 170 return; |
160 case LSR: lsr_thumb(dst, src, s, cond); | 171 case LSR: lsr_thumb(dst, src, smode, cond); |
161 return; | 172 return; |
162 case ASR: asr_thumb(dst, src, s, cond); | 173 case ASR: asr_thumb(dst, src, smode, cond, is_in_it_block); |
163 return; | 174 return; |
164 case ROR: ror_thumb(dst, src, s, cond); | 175 case ROR: ror_thumb(dst, src, smode, cond); |
165 return; | 176 return; |
166 case RRX: | 177 case RRX: |
167 default: UNREACHABLE(); | 178 default: UNREACHABLE(); |
168 } | 179 } |
169 return; | 180 return; |
170 } | 181 } |
171 if (s == LeaveCC) { | 182 if (smode != SetCC) { |
172 emit16(thumb16_mode3(MOV_REG_1) | | 183 emit16(thumb16_mode3(MOV_REG_1) | |
173 thumb16_2anyreg_encoding(dst, src)); | 184 thumb16_2anyreg_encoding(dst, src)); |
174 return; | 185 return; |
175 } else if (are_low_reg(dst, src.rm_)) { | 186 } else if (smode != LeaveCC && !is_in_it_block && |
| 187 are_low_reg(dst, src.rm_)) { |
176 // Note: MOV_REG_2 is 0, so call not needed | 188 // Note: MOV_REG_2 is 0, so call not needed |
177 emit16(thumb16_2lowreg_encoding(dst, src)); | 189 emit16(thumb16_2lowreg_encoding(dst, src)); |
178 return; | 190 return; |
179 } else { | 191 } else { |
180 mov_reg_t3(dst, src, s, cond); | 192 mov_reg_t3(dst, src, s, cond); |
181 return; | 193 return; |
182 } | 194 } |
183 } | 195 } |
184 UNREACHABLE(); | 196 UNREACHABLE(); |
185 } | 197 } |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
245 } | 257 } |
246 } else { // (src.shift_imm_ != 0) | 258 } else { // (src.shift_imm_ != 0) |
247 // Register. | 259 // Register. |
248 cmp_reg_t3(dst, src, cond); | 260 cmp_reg_t3(dst, src, cond); |
249 return; | 261 return; |
250 } | 262 } |
251 UNREACHABLE(); | 263 UNREACHABLE(); |
252 } | 264 } |
253 | 265 |
254 | 266 |
255 void Assembler::lsl_thumb(Register dst, const Operand& src, SBit s, | 267 void Assembler::lsl_thumb(Register dst, const Operand& src, SBitMode smode, |
256 Condition cond) { | 268 Condition cond) { |
257 ASSERT(cond == al); | 269 ASSERT(cond == al); |
| 270 SBit s = sbit_from_mode(smode); |
258 if (!src.rs_.is_valid()) { | 271 if (!src.rs_.is_valid()) { |
259 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { | 272 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && |
| 273 smode != LeaveCC) { |
260 emit16(thumb16_mode1(LSL_IMM) | | 274 emit16(thumb16_mode1(LSL_IMM) | |
261 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); | 275 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
262 return; | 276 return; |
263 } else { | 277 } else { |
264 lsl_imm_t2(dst, src, s, cond); | 278 lsl_imm_t2(dst, src, s, cond); |
265 return; | 279 return; |
266 } | 280 } |
267 } else { | 281 } else { |
268 // Register src{rm rs, shift_imm} | 282 // Register src{rm rs, shift_imm} |
269 if (s == SetCC && dst.code() == src.rm_.code() && | 283 if (smode != LeaveCC && dst.code() == src.rm_.code() && |
270 are_low_reg(dst, src.rs_)) { | 284 are_low_reg(dst, src.rs_)) { |
271 // Register 16 | 285 // Register 16 |
272 emit16(thumb16_mode2(LSL_REG) | | 286 emit16(thumb16_mode2(LSL_REG) | |
273 thumb16_2lowreg_encoding(dst, src.rs_)); | 287 thumb16_2lowreg_encoding(dst, src.rs_)); |
274 return; | 288 return; |
275 } else { | 289 } else { |
276 // Register 32 | 290 // Register 32 |
277 lsl_reg_t2(dst, src, s, cond); | 291 lsl_reg_t2(dst, src, s, cond); |
278 return; | 292 return; |
279 } | 293 } |
280 } | 294 } |
281 UNREACHABLE(); | 295 UNREACHABLE(); |
282 } | 296 } |
283 | 297 |
284 | 298 |
285 void Assembler::lsr_thumb(Register dst, const Operand& src, SBit s, | 299 void Assembler::lsr_thumb(Register dst, const Operand& src, SBitMode smode, |
286 Condition cond) { | 300 Condition cond) { |
287 ASSERT(cond == al); | 301 ASSERT(cond == al); |
| 302 SBit s = sbit_from_mode(smode); |
288 if (!src.rs_.is_valid()) { | 303 if (!src.rs_.is_valid()) { |
289 // Immediate | 304 // Immediate |
290 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { | 305 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && |
| 306 smode != LeaveCC) { |
291 // Immediate 16 | 307 // Immediate 16 |
292 emit16(thumb16_mode1(LSR_IMM) | | 308 emit16(thumb16_mode1(LSR_IMM) | |
293 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); | 309 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
294 return; | 310 return; |
295 } else { | 311 } else { |
296 // Immediate 32 | 312 // Immediate 32 |
297 lsr_imm_t2(dst, src, s, cond); | 313 lsr_imm_t2(dst, src, s, cond); |
298 return; | 314 return; |
299 } | 315 } |
300 } else { | 316 } else { |
301 if (s == SetCC && dst.code() == src.rm_.code() && | 317 if (smode != LeaveCC && dst.code() == src.rm_.code() && |
302 are_low_reg(dst, src.rs_)) { | 318 are_low_reg(dst, src.rs_)) { |
303 emit16(thumb16_mode2(LSR_REG) | | 319 emit16(thumb16_mode2(LSR_REG) | |
304 thumb16_2lowreg_encoding(dst, src.rs_)); | 320 thumb16_2lowreg_encoding(dst, src.rs_)); |
305 return; | 321 return; |
306 } else { | 322 } else { |
307 lsr_reg_t2(dst, src, s, cond); | 323 lsr_reg_t2(dst, src, s, cond); |
308 return; | 324 return; |
309 } | 325 } |
310 } | 326 } |
311 UNREACHABLE(); | 327 UNREACHABLE(); |
312 } | 328 } |
313 | 329 |
314 | 330 |
315 void Assembler::ror_thumb(Register dst, const Operand& src, SBit s, | 331 void Assembler::ror_thumb(Register dst, const Operand& src, SBitMode smode, |
316 Condition cond) { | 332 Condition cond) { |
317 ASSERT(cond == al); | 333 ASSERT(cond == al); |
| 334 SBit s = sbit_from_mode(smode); |
318 if (!src.rs_.is_valid()) { | 335 if (!src.rs_.is_valid()) { |
319 // Immediate | 336 // Immediate |
320 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { | 337 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && |
| 338 smode != LeaveCC) { |
321 // Immediate 16 | 339 // Immediate 16 |
322 emit16(thumb16_mode1(ROR_IMM) | | 340 emit16(thumb16_mode1(ROR_IMM) | |
323 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); | 341 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
324 return; | 342 return; |
325 } else { | 343 } else { |
326 // Immediate 32 | 344 // Immediate 32 |
327 ror_imm_t2(dst, src, s, cond); | 345 ror_imm_t2(dst, src, s, cond); |
328 return; | 346 return; |
329 } | 347 } |
330 } else { | 348 } else { |
331 if (s == SetCC && dst.code() == src.rm_.code() && | 349 if (smode != LeaveCC && dst.code() == src.rm_.code() && |
332 are_low_reg(dst, src.rs_)) { | 350 are_low_reg(dst, src.rs_)) { |
333 emit16(thumb16_mode2(ROR_REG) | | 351 emit16(thumb16_mode2(ROR_REG) | |
334 thumb16_2lowreg_encoding(dst, src.rs_)); | 352 thumb16_2lowreg_encoding(dst, src.rs_)); |
335 return; | 353 return; |
336 } else { | 354 } else { |
337 ror_reg_t2(dst, src, s, cond); | 355 ror_reg_t2(dst, src, s, cond); |
338 return; | 356 return; |
339 } | 357 } |
340 } | 358 } |
341 UNREACHABLE(); | 359 UNREACHABLE(); |
342 } | 360 } |
343 | 361 |
344 | 362 |
345 void Assembler::asr_thumb(Register dst, const Operand& src, SBit s, | 363 void Assembler::asr_thumb(Register dst, const Operand& src, SBitMode smode, |
346 Condition cond) { | 364 Condition cond, bool is_in_it_block) { |
| 365 SBit s = sbit_from_mode(smode); |
347 if (!src.rs_.is_valid()) { | 366 if (!src.rs_.is_valid()) { |
348 // Immediate | 367 // Immediate |
349 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && s == SetCC) { | 368 if (is_int5(src.shift_imm_) && are_low_reg(dst, src.rm_) && |
| 369 it_block_smode_check(is_in_it_block, smode)) { |
350 // Immediate 16 | 370 // Immediate 16 |
351 emit16(thumb16_mode1(ASR_IMM) | | 371 emit16(thumb16_mode1(ASR_IMM) | |
352 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); | 372 thumb16_2lowreg_imm5_encoding(dst, src.rm_, src)); |
353 return; | 373 return; |
354 } else { | 374 } else { |
355 // Immediate 32 | 375 // Immediate 32 |
356 asr_imm_t2(dst, src, s, cond); | 376 asr_imm_t2(dst, src, s, cond); |
357 return; | 377 return; |
358 } | 378 } |
359 } else { | 379 } else { |
360 // Register | 380 // Register |
361 if (s == SetCC && dst.code() == src.rm_.code() && | 381 if (it_block_smode_check(is_in_it_block, smode) && |
362 are_low_reg(dst, src.rs_)) { | 382 dst.code() == src.rm_.code() && are_low_reg(dst, src.rs_)) { |
363 // Register 16 | 383 // Register 16 |
364 emit16(thumb16_mode2(ASR_REG) | | 384 emit16(thumb16_mode2(ASR_REG) | |
365 thumb16_2lowreg_encoding(dst, src.rs_)); | 385 thumb16_2lowreg_encoding(dst, src.rs_)); |
366 return; | 386 return; |
367 } else { | 387 } else { |
368 asr_reg_t2(dst, src, s, cond); | 388 asr_reg_t2(dst, src, s, cond); |
369 return; | 389 return; |
370 } | 390 } |
371 } | 391 } |
372 UNREACHABLE(); | 392 UNREACHABLE(); |
373 } | 393 } |
374 | 394 |
375 | 395 |
376 void Assembler::and_thumb(Register dst, Register src1, const Operand& src2, | 396 void Assembler::and_thumb(Register dst, Register src1, const Operand& src2, |
377 SBit s, Condition cond) { | 397 SBitMode smode, Condition cond) { |
378 ASSERT(cond == al); | 398 ASSERT(cond == al); |
| 399 SBit s = sbit_from_mode(smode); |
379 if (!src2.rm_.is_valid()) { | 400 if (!src2.rm_.is_valid()) { |
380 // Immediate. | 401 // Immediate. |
381 uint32_t i, imm3, imm8; | 402 uint32_t i, imm3, imm8; |
382 if (src2.imm32_ < 0 && thumb_expand_imm(~src2.imm32_, &i, &imm3, &imm8)) { | 403 if (src2.imm32_ < 0 && thumb_expand_imm(~src2.imm32_, &i, &imm3, &imm8)) { |
383 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 404 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
384 return; | 405 return; |
385 } else if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 406 } else if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
386 and_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 407 and_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
387 return; | 408 return; |
388 } | 409 } |
389 mov_thumb(ip, src2, LeaveCC, al); | 410 mov_thumb(ip, src2, LeaveCC, al); |
390 and_thumb(dst, src1, Operand(ip), s, al); | 411 and_thumb(dst, src1, Operand(ip), smode, al); |
391 return; | 412 return; |
392 } else { | 413 } else { |
393 // Register. | 414 // Register. |
394 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 415 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
395 s == SetCC && src2.shift_imm_ == 0) { | 416 smode != LeaveCC && src2.shift_imm_ == 0) { |
396 emit16(thumb16_mode2(AND_REG) | | 417 emit16(thumb16_mode2(AND_REG) | |
397 thumb16_2lowreg_encoding(dst, src2)); | 418 thumb16_2lowreg_encoding(dst, src2)); |
398 return; | 419 return; |
399 } else { | 420 } else { |
400 and_reg_t2(dst, src1, src2, s, cond); | 421 and_reg_t2(dst, src1, src2, s, cond); |
401 return; | 422 return; |
402 } | 423 } |
403 } | 424 } |
404 UNREACHABLE(); | 425 UNREACHABLE(); |
405 } | 426 } |
406 | 427 |
407 | 428 |
408 void Assembler::eor_thumb(Register dst, Register src1, const Operand& src2, | 429 void Assembler::eor_thumb(Register dst, Register src1, const Operand& src2, |
409 SBit s, Condition cond) { | 430 SBitMode smode, Condition cond) { |
410 ASSERT(cond == al); | 431 ASSERT(cond == al); |
| 432 SBit s = sbit_from_mode(smode); |
411 if (!src2.rm_.is_valid()) { | 433 if (!src2.rm_.is_valid()) { |
412 // Immediate. | 434 // Immediate. |
413 uint32_t i, imm3, imm8; | 435 uint32_t i, imm3, imm8; |
414 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 436 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
415 eor_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 437 eor_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
416 return; | 438 return; |
417 } | 439 } |
418 mov_thumb(ip, src2, LeaveCC, al); | 440 mov_thumb(ip, src2, LeaveCC, al); |
419 eor_thumb(dst, src1, Operand(ip), s, al); | 441 eor_thumb(dst, src1, Operand(ip), smode, al); |
420 return; | 442 return; |
421 } else { | 443 } else { |
422 // Register. | 444 // Register. |
423 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 445 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
424 s == SetCC && src2.shift_imm_ == 0) { | 446 smode != LeaveCC && src2.shift_imm_ == 0) { |
425 emit16(thumb16_mode2(EOR_REG) | | 447 emit16(thumb16_mode2(EOR_REG) | |
426 thumb16_2lowreg_encoding(dst, src2)); | 448 thumb16_2lowreg_encoding(dst, src2)); |
427 return; | 449 return; |
428 } else { | 450 } else { |
429 eor_reg_t2(dst, src1, src2, s, cond); | 451 eor_reg_t2(dst, src1, src2, s, cond); |
430 return; | 452 return; |
431 } | 453 } |
432 } | 454 } |
433 UNREACHABLE(); | 455 UNREACHABLE(); |
434 } | 456 } |
435 | 457 |
436 | 458 |
437 void Assembler::adc_thumb(Register dst, Register src1, const Operand& src2, | 459 void Assembler::adc_thumb(Register dst, Register src1, const Operand& src2, |
438 SBit s, Condition cond) { | 460 SBitMode smode, Condition cond) { |
439 ASSERT(cond == al); | 461 ASSERT(cond == al); |
| 462 SBit s = sbit_from_mode(smode); |
440 if (!src2.rm_.is_valid()) { | 463 if (!src2.rm_.is_valid()) { |
441 // Immediate. | 464 // Immediate. |
442 uint32_t i, imm3, imm8; | 465 uint32_t i, imm3, imm8; |
443 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 466 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
444 adc_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 467 adc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
445 return; | 468 return; |
446 } | 469 } |
447 mov_thumb(ip, src2, LeaveCC, al); | 470 mov_thumb(ip, src2, LeaveCC, al); |
448 adc_thumb(dst, src1, Operand(ip), s, al); | 471 adc_thumb(dst, src1, Operand(ip), smode, al); |
449 return; | 472 return; |
450 } else { | 473 } else { |
451 // Register. | 474 // Register. |
452 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 475 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
453 s == SetCC && src2.shift_imm_ == 0) { | 476 smode != LeaveCC && src2.shift_imm_ == 0) { |
454 emit16(thumb16_mode2(ADC_REG) | | 477 emit16(thumb16_mode2(ADC_REG) | |
455 thumb16_2lowreg_encoding(dst, src2)); | 478 thumb16_2lowreg_encoding(dst, src2)); |
456 return; | 479 return; |
457 } else { | 480 } else { |
458 adc_reg_t2(dst, src1, src2, s, cond); | 481 adc_reg_t2(dst, src1, src2, s, cond); |
459 return; | 482 return; |
460 } | 483 } |
461 } | 484 } |
462 UNREACHABLE(); | 485 UNREACHABLE(); |
463 } | 486 } |
464 | 487 |
465 | 488 |
466 void Assembler::sbc_thumb(Register dst, Register src1, const Operand& src2, | 489 void Assembler::sbc_thumb(Register dst, Register src1, const Operand& src2, |
467 SBit s, Condition cond) { | 490 SBitMode smode, Condition cond) { |
468 ASSERT(cond == al); | 491 ASSERT(cond == al); |
| 492 SBit s = sbit_from_mode(smode); |
469 if (!src2.rm_.is_valid()) { | 493 if (!src2.rm_.is_valid()) { |
470 // Immediate. | 494 // Immediate. |
471 uint32_t i, imm3, imm8; | 495 uint32_t i, imm3, imm8; |
472 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 496 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
473 sbc_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 497 sbc_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
474 return; | 498 return; |
475 } | 499 } |
476 mov_thumb(ip, src2, LeaveCC, al); | 500 mov_thumb(ip, src2, LeaveCC, al); |
477 sbc_thumb(dst, src1, Operand(ip), s, al); | 501 sbc_thumb(dst, src1, Operand(ip), smode, al); |
478 return; | 502 return; |
479 } else { | 503 } else { |
480 // Register. | 504 // Register. |
481 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 505 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
482 s == SetCC && src2.shift_imm_ == 0) { | 506 smode != LeaveCC && src2.shift_imm_ == 0) { |
483 emit16(thumb16_mode2(SBC_REG) | | 507 emit16(thumb16_mode2(SBC_REG) | |
484 thumb16_2lowreg_encoding(dst, src2)); | 508 thumb16_2lowreg_encoding(dst, src2)); |
485 return; | 509 return; |
486 } else { | 510 } else { |
487 sbc_reg_t2(dst, src1, src2, s, cond); | 511 sbc_reg_t2(dst, src1, src2, s, cond); |
488 return; | 512 return; |
489 } | 513 } |
490 } | 514 } |
491 UNREACHABLE(); | 515 UNREACHABLE(); |
492 } | 516 } |
493 | 517 |
494 | 518 |
495 void Assembler::rsb_thumb(Register dst, Register src1, const Operand& src2, | 519 void Assembler::rsb_thumb(Register dst, Register src1, const Operand& src2, |
496 SBit s, Condition cond) { | 520 SBitMode smode, Condition cond) { |
497 emit_it(cond); | 521 bool is_in_it_block = emit_it(cond); |
| 522 SBit s = sbit_from_mode(smode); |
498 if (!src2.rm_.is_valid()) { | 523 if (!src2.rm_.is_valid()) { |
499 // Immediate. | 524 // Immediate. |
500 if (src2.imm32_ == 0 && are_low_reg(dst, src1)) { | 525 if (it_block_smode_check(is_in_it_block, smode) && |
| 526 src2.imm32_ == 0 && |
| 527 are_low_reg(dst, src1)) { |
501 emit16(thumb16_mode2(RSB_IMM) | | 528 emit16(thumb16_mode2(RSB_IMM) | |
502 thumb16_2lowreg_encoding(dst, src1)); | 529 thumb16_2lowreg_encoding(dst, src1)); |
503 return; | 530 return; |
504 } else { | 531 } else { |
505 uint32_t i, imm3, imm8; | 532 uint32_t i, imm3, imm8; |
506 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 533 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
507 rsb_imm_t2(dst, src1, s, cond, i, imm3, imm8); | 534 rsb_imm_t2(dst, src1, s, cond, i, imm3, imm8); |
508 return; | 535 return; |
509 } | 536 } |
510 ASSERT(cond == al); | 537 ASSERT(cond == al); |
511 mov_thumb(ip, src2, LeaveCC, al); | 538 mov_thumb(ip, src2, LeaveCC, al); |
512 rsb_thumb(dst, src1, Operand(ip), s, al); | 539 rsb_thumb(dst, src1, Operand(ip), smode, al); |
513 return; | 540 return; |
514 } | 541 } |
515 } else { | 542 } else { |
516 // Register. | 543 // Register. |
517 rsb_reg_t1(dst, src1, src2, s, cond); | 544 rsb_reg_t1(dst, src1, src2, s, cond); |
518 return; | 545 return; |
519 } | 546 } |
520 UNREACHABLE(); | 547 UNREACHABLE(); |
521 } | 548 } |
522 | 549 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
570 } else { | 597 } else { |
571 cmn_reg_t2(src1, src2, cond); | 598 cmn_reg_t2(src1, src2, cond); |
572 return; | 599 return; |
573 } | 600 } |
574 } | 601 } |
575 UNREACHABLE(); | 602 UNREACHABLE(); |
576 } | 603 } |
577 | 604 |
578 | 605 |
579 void Assembler::bic_thumb(Register dst, Register src1, const Operand& src2, | 606 void Assembler::bic_thumb(Register dst, Register src1, const Operand& src2, |
580 SBit s, Condition cond) { | 607 SBitMode smode, Condition cond) { |
581 ASSERT(cond == al); | 608 ASSERT(cond == al); |
| 609 SBit s = sbit_from_mode(smode); |
582 if (!src2.rm_.is_valid()) { | 610 if (!src2.rm_.is_valid()) { |
583 // Immediate. | 611 // Immediate. |
584 uint32_t i, imm3, imm8; | 612 uint32_t i, imm3, imm8; |
585 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 613 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
586 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 614 bic_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
587 return; | 615 return; |
588 } | 616 } |
589 mov_thumb(ip, src2, LeaveCC, al); | 617 mov_thumb(ip, src2, LeaveCC, al); |
590 bic_thumb(dst, src1, Operand(ip), s, al); | 618 bic_thumb(dst, src1, Operand(ip), smode, al); |
591 return; | 619 return; |
592 } else { | 620 } else { |
593 // Register. | 621 // Register. |
594 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 622 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
595 s == SetCC && src2.shift_imm_ == 0) { | 623 smode != LeaveCC && src2.shift_imm_ == 0) { |
596 emit16(thumb16_mode2(BIC_REG) | | 624 emit16(thumb16_mode2(BIC_REG) | |
597 thumb16_2lowreg_encoding(dst, src2)); | 625 thumb16_2lowreg_encoding(dst, src2)); |
598 return; | 626 return; |
599 } else { | 627 } else { |
600 bic_reg_t2(dst, src1, src2, s, cond); | 628 bic_reg_t2(dst, src1, src2, s, cond); |
601 return; | 629 return; |
602 } | 630 } |
603 } | 631 } |
604 UNREACHABLE(); | 632 UNREACHABLE(); |
605 } | 633 } |
606 | 634 |
607 | 635 |
608 void Assembler::mul_thumb(Register dst, Register src1, Register src2, | 636 void Assembler::mul_thumb(Register dst, Register src1, Register src2, |
609 SBit s, Condition cond) { | 637 SBitMode smode, Condition cond) { |
610 ASSERT(cond == al); | 638 ASSERT(cond == al); |
611 if (dst.code() == src2.code() && are_low_reg(src1, src2) && s == SetCC) { | 639 if (dst.code() == src2.code() && are_low_reg(src1, src2) && |
| 640 smode != LeaveCC) { |
612 emit16(thumb16_mode2(MUL_REG) | | 641 emit16(thumb16_mode2(MUL_REG) | |
613 thumb16_2lowreg_encoding(dst, src1)); | 642 thumb16_2lowreg_encoding(dst, src1)); |
614 return; | 643 return; |
615 } else if (dst.code() == src1.code() && are_low_reg(src1, src2) && | 644 } else if (dst.code() == src1.code() && are_low_reg(src1, src2) && |
616 s == SetCC) { | 645 smode != LeaveCC) { |
617 emit16(thumb16_mode2(MUL_REG) | | 646 emit16(thumb16_mode2(MUL_REG) | |
618 thumb16_2lowreg_encoding(dst, src2)); | 647 thumb16_2lowreg_encoding(dst, src2)); |
619 return; | 648 return; |
620 } else { | 649 } else { |
| 650 SBit s = sbit_from_mode(smode); |
621 mul_t2(dst, src1, src2, s, cond); | 651 mul_t2(dst, src1, src2, s, cond); |
622 return; | 652 return; |
623 } | 653 } |
624 UNREACHABLE(); | 654 UNREACHABLE(); |
625 } | 655 } |
626 | 656 |
627 | 657 |
628 void Assembler::mvn_thumb(Register dst, const Operand& src, SBit s, | 658 void Assembler::mvn_thumb(Register dst, const Operand& src, SBitMode smode, |
629 Condition cond) { | 659 Condition cond) { |
630 ASSERT(cond == al); | 660 ASSERT(cond == al); |
| 661 SBit s = sbit_from_mode(smode); |
631 if (!src.rm_.is_valid()) { | 662 if (!src.rm_.is_valid()) { |
632 // Immediate. | 663 // Immediate. |
633 uint32_t i, imm3, imm8; | 664 uint32_t i, imm3, imm8; |
634 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { | 665 if (thumb_expand_imm(src.imm32_, &i, &imm3, &imm8)) { |
635 mvn_imm_t1(dst, s, cond, i, imm3, imm8); | 666 mvn_imm_t1(dst, s, cond, i, imm3, imm8); |
636 return; | 667 return; |
637 } | 668 } |
638 mov_thumb(ip, src, LeaveCC, al); | 669 mov_thumb(ip, src, LeaveCC, al); |
639 rsb_thumb(dst, ip, Operand(0), s, al); | 670 rsb_thumb(dst, ip, Operand(0), smode, al); |
640 return; | 671 return; |
641 } else { | 672 } else { |
642 // Register. | 673 // Register. |
643 if (are_low_reg(dst, src.rm_) && s == SetCC && src.shift_imm_ == 0) { | 674 if (are_low_reg(dst, src.rm_) && smode != LeaveCC && src.shift_imm_ == 0) { |
644 emit16(thumb16_mode2(MVN_REG) | | 675 emit16(thumb16_mode2(MVN_REG) | |
645 thumb16_2anyreg_encoding(dst, src)); | 676 thumb16_2anyreg_encoding(dst, src)); |
646 return; | 677 return; |
647 } else { | 678 } else { |
648 mvn_reg_t2(dst, src, s, cond); | 679 mvn_reg_t2(dst, src, s, cond); |
649 return; | 680 return; |
650 } | 681 } |
651 } | 682 } |
652 UNREACHABLE(); | 683 UNREACHABLE(); |
653 } | 684 } |
654 | 685 |
655 | 686 |
656 void Assembler::orr_thumb(Register dst, Register src1, const Operand& src2, | 687 void Assembler::orr_thumb(Register dst, Register src1, const Operand& src2, |
657 SBit s, Condition cond) { | 688 SBitMode smode, Condition cond) { |
658 ASSERT(cond == al); | 689 ASSERT(cond == al); |
| 690 SBit s = sbit_from_mode(smode); |
659 if (!src2.rm_.is_valid()) { | 691 if (!src2.rm_.is_valid()) { |
660 // Immediate. | 692 // Immediate. |
661 uint32_t i, imm3, imm8; | 693 uint32_t i, imm3, imm8; |
662 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { | 694 if (thumb_expand_imm(src2.imm32_, &i, &imm3, &imm8)) { |
663 orr_imm_t1(dst, src1, s, cond, i, imm3, imm8); | 695 orr_imm_t1(dst, src1, s, cond, i, imm3, imm8); |
664 return; | 696 return; |
665 } | 697 } |
666 mov_thumb(ip, src2, LeaveCC, al); | 698 mov_thumb(ip, src2, LeaveCC, al); |
667 orr_thumb(dst, src1, Operand(ip), s, al); | 699 orr_thumb(dst, src1, Operand(ip), smode, al); |
668 return; | 700 return; |
669 } else { | 701 } else { |
670 // Register. | 702 // Register. |
671 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && | 703 if (dst.code() == src1.code() && are_low_reg(src1, src2.rm_) && |
672 s == SetCC && src2.shift_imm_ == 0) { | 704 smode != LeaveCC && src2.shift_imm_ == 0) { |
673 emit16(thumb16_mode2(ORR_REG) | | 705 emit16(thumb16_mode2(ORR_REG) | |
674 thumb16_2lowreg_encoding(dst, src2)); | 706 thumb16_2lowreg_encoding(dst, src2)); |
675 return; | 707 return; |
676 } else if (src2.rs_.is_valid()) { | 708 } else if (src2.rs_.is_valid()) { |
677 ASSERT(src2.shift_op_ == LSL); | 709 ASSERT(src2.shift_op_ == LSL); |
678 lsl_thumb(dst, src2, s, cond); | 710 lsl_thumb(dst, src2, smode, cond); |
679 orr_thumb(dst, src1, Operand(dst), s, cond); | 711 orr_thumb(dst, src1, Operand(dst), smode, cond); |
680 return; | 712 return; |
681 } else { | 713 } else { |
682 orr_reg_t2(dst, src1, src2, s, cond); | 714 orr_reg_t2(dst, src1, src2, s, cond); |
683 return; | 715 return; |
684 } | 716 } |
685 } | 717 } |
686 UNREACHABLE(); | 718 UNREACHABLE(); |
687 } | 719 } |
688 | 720 |
689 | 721 |
(...skipping 554 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1244 } else { | 1276 } else { |
1245 // Encoding T2 (register) - 32 bit | 1277 // Encoding T2 (register) - 32 bit |
1246 emit32(thumb32_mode8(LDRSH_32_REG) | thumb32_3reg_lsl(dst, src)); | 1278 emit32(thumb32_mode8(LDRSH_32_REG) | thumb32_3reg_lsl(dst, src)); |
1247 return; | 1279 return; |
1248 } | 1280 } |
1249 UNREACHABLE(); | 1281 UNREACHABLE(); |
1250 } | 1282 } |
1251 | 1283 |
1252 | 1284 |
1253 void Assembler::mla_thumb(Register dst, Register src1, Register src2, | 1285 void Assembler::mla_thumb(Register dst, Register src1, Register src2, |
1254 Register srcA, SBit s, Condition cond) { | 1286 Register srcA, SBitMode smode, Condition cond) { |
1255 ASSERT(cond == al && s == LeaveCC); | 1287 ASSERT(cond == al && smode != SetCC); |
1256 emit32(thumb32_mode16(MLA_32) | | 1288 emit32(thumb32_mode16(MLA_32) | |
1257 thumb32_4reg(dst, src1, src2, srcA)); | 1289 thumb32_4reg(dst, src1, src2, srcA)); |
1258 } | 1290 } |
1259 | 1291 |
1260 | 1292 |
1261 void Assembler::mls_thumb(Register dst, Register src1, Register src2, | 1293 void Assembler::mls_thumb(Register dst, Register src1, Register src2, |
1262 Register srcA, Condition cond) { | 1294 Register srcA, Condition cond) { |
1263 ASSERT(cond == al); | 1295 ASSERT(cond == al); |
1264 emit32(thumb32_mode16(MLS_32) | | 1296 emit32(thumb32_mode16(MLS_32) | |
1265 thumb32_4reg(dst, src1, src2, srcA)); | 1297 thumb32_4reg(dst, src1, src2, srcA)); |
1266 } | 1298 } |
1267 | 1299 |
1268 | 1300 |
1269 void Assembler::sdiv_thumb(Register dst, Register src1, Register src2, | 1301 void Assembler::sdiv_thumb(Register dst, Register src1, Register src2, |
1270 Condition cond) { | 1302 Condition cond) { |
1271 ASSERT(cond == al); | 1303 ASSERT(cond == al); |
1272 emit32(thumb32_mode17(SDIV_32) | thumb32_4reg(dst, src1, src2, pc) | | 1304 emit32(thumb32_mode17(SDIV_32) | thumb32_4reg(dst, src1, src2, pc) | |
1273 B7 | B6 | B5 | B4); | 1305 B7 | B6 | B5 | B4); |
1274 } | 1306 } |
1275 | 1307 |
1276 | 1308 |
1277 void Assembler::smlal_thumb(Register dstL, Register dstH, Register src1, | 1309 void Assembler::smlal_thumb(Register dstL, Register dstH, Register src1, |
1278 Register src2, SBit s, Condition cond) { | 1310 Register src2, SBitMode smode, Condition cond) { |
1279 ASSERT(cond == al && s == LeaveCC); | 1311 ASSERT(cond == al && smode != SetCC); |
1280 ASSERT(dstL.code() != dstH.code()); | 1312 ASSERT(dstL.code() != dstH.code()); |
1281 emit32(thumb32_mode17(SMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); | 1313 emit32(thumb32_mode17(SMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
1282 } | 1314 } |
1283 | 1315 |
1284 | 1316 |
1285 void Assembler::smull_thumb(Register dstL, Register dstH, Register src1, | 1317 void Assembler::smull_thumb(Register dstL, Register dstH, Register src1, |
1286 Register src2, SBit s, Condition cond) { | 1318 Register src2, SBitMode smode, Condition cond) { |
1287 ASSERT(cond == al && s == LeaveCC); | 1319 ASSERT(cond == al && smode != SetCC); |
1288 ASSERT(dstL.code() != dstH.code()); | 1320 ASSERT(dstL.code() != dstH.code()); |
1289 emit32(thumb32_mode17(SMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); | 1321 emit32(thumb32_mode17(SMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
1290 } | 1322 } |
1291 | 1323 |
1292 | 1324 |
1293 void Assembler::umlal_thumb(Register dstL, Register dstH, Register src1, | 1325 void Assembler::umlal_thumb(Register dstL, Register dstH, Register src1, |
1294 Register src2, SBit s, Condition cond) { | 1326 Register src2, SBitMode smode, Condition cond) { |
1295 ASSERT(cond == al && s == LeaveCC); | 1327 ASSERT(cond == al && smode != SetCC); |
1296 ASSERT(dstL.code() != dstH.code()); | 1328 ASSERT(dstL.code() != dstH.code()); |
1297 emit32(thumb32_mode17(UMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); | 1329 emit32(thumb32_mode17(UMLAL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
1298 } | 1330 } |
1299 | 1331 |
1300 | 1332 |
1301 void Assembler::umull_thumb(Register dstL, Register dstH, Register src1, | 1333 void Assembler::umull_thumb(Register dstL, Register dstH, Register src1, |
1302 Register src2, SBit s, Condition cond) { | 1334 Register src2, SBitMode smode, Condition cond) { |
1303 ASSERT(cond == al && s == LeaveCC); | 1335 ASSERT(cond == al && smode != SetCC); |
1304 ASSERT(dstL.code() != dstH.code()); | 1336 ASSERT(dstL.code() != dstH.code()); |
1305 emit32(thumb32_mode17(UMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); | 1337 emit32(thumb32_mode17(UMULL_32) | thumb32_4reg(dstH, src1, src2, dstL)); |
1306 } | 1338 } |
1307 | 1339 |
1308 | 1340 |
1309 void Assembler::ldm_thumb(BlockAddrMode am, | 1341 void Assembler::ldm_thumb(BlockAddrMode am, |
1310 Register base, | 1342 Register base, |
1311 RegList dst, | 1343 RegList dst, |
1312 Condition cond) { | 1344 Condition cond) { |
1313 ASSERT(cond == al); | 1345 ASSERT(cond == al); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1353 | 1385 |
1354 bool Assembler::IsThumbLdrPcImmediateOffset(Instr instr) { | 1386 bool Assembler::IsThumbLdrPcImmediateOffset(Instr instr) { |
1355 // Check the instruction is indeed a | 1387 // Check the instruction is indeed a |
1356 // ldr<cond> <Rd>, [pc +/- offset_12]. | 1388 // ldr<cond> <Rd>, [pc +/- offset_12]. |
1357 return (instr & kThumbLdrPCMask) == kThumbLdrPCPattern; | 1389 return (instr & kThumbLdrPCMask) == kThumbLdrPCPattern; |
1358 } | 1390 } |
1359 | 1391 |
1360 } } // namespace v8::internal | 1392 } } // namespace v8::internal |
1361 | 1393 |
1362 #endif // V8_TARGET_ARCH_ARM | 1394 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |