OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 BlockConstPoolScope block_const_pool(this); | 90 BlockConstPoolScope block_const_pool(this); |
91 Label start; | 91 Label start; |
92 bind(&start); | 92 bind(&start); |
93 blx(target, cond); | 93 blx(target, cond); |
94 } | 94 } |
95 | 95 |
96 | 96 |
97 int MacroAssembler::CallSize( | 97 int MacroAssembler::CallSize( |
98 Address target, RelocInfo::Mode rmode, Condition cond) { | 98 Address target, RelocInfo::Mode rmode, Condition cond) { |
99 int size = 2 * kInstrSize; | 99 int size = 2 * kInstrSize; |
100 Instr mov_instr = cond | MOV | LeaveCC; | 100 Instr mov_instr = cond | MOV | LeaveCCBit; |
101 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 101 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
102 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { | 102 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { |
103 size += kInstrSize; | 103 size += kInstrSize; |
104 } | 104 } |
105 return size; | 105 return size; |
106 } | 106 } |
107 | 107 |
108 | 108 |
109 int MacroAssembler::CallSizeNotPredictableCodeSize( | 109 int MacroAssembler::CallSizeNotPredictableCodeSize( |
110 Address target, RelocInfo::Mode rmode, Condition cond) { | 110 Address target, RelocInfo::Mode rmode, Condition cond) { |
111 int size = 2 * kInstrSize; | 111 int size = 2 * kInstrSize; |
112 Instr mov_instr = cond | MOV | LeaveCC; | 112 Instr mov_instr = cond | MOV | LeaveCCBit; |
113 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 113 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
114 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { | 114 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { |
115 size += kInstrSize; | 115 size += kInstrSize; |
116 } | 116 } |
117 return size; | 117 return size; |
118 } | 118 } |
119 | 119 |
120 | 120 |
121 void MacroAssembler::Call(Address target, | 121 void MacroAssembler::Call(Address target, |
122 RelocInfo::Mode rmode, | 122 RelocInfo::Mode rmode, |
(...skipping 19 matching lines...) Expand all Loading... |
142 // ldr ip, [pc, #...] @ call address | 142 // ldr ip, [pc, #...] @ call address |
143 // blx ip | 143 // blx ip |
144 // @ return address | 144 // @ return address |
145 | 145 |
146 // Statement positions are expected to be recorded when the target | 146 // Statement positions are expected to be recorded when the target |
147 // address is loaded. The mov method will automatically record | 147 // address is loaded. The mov method will automatically record |
148 // positions when pc is the target, since this is not the case here | 148 // positions when pc is the target, since this is not the case here |
149 // we have to do it explicitly. | 149 // we have to do it explicitly. |
150 positions_recorder()->WriteRecordedPositions(); | 150 positions_recorder()->WriteRecordedPositions(); |
151 | 151 |
152 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 152 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC); |
153 blx(ip, cond); | 153 blx(ip, cond); |
154 | 154 |
155 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 155 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
156 set_predictable_code_size(old_predictable_code_size); | 156 set_predictable_code_size(old_predictable_code_size); |
157 } | 157 } |
158 } | 158 } |
159 | 159 |
160 | 160 |
161 int MacroAssembler::CallSize(Handle<Code> code, | 161 int MacroAssembler::CallSize(Handle<Code> code, |
162 RelocInfo::Mode rmode, | 162 RelocInfo::Mode rmode, |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
201 Drop(drop, cond); | 201 Drop(drop, cond); |
202 Ret(cond); | 202 Ret(cond); |
203 } | 203 } |
204 | 204 |
205 | 205 |
206 void MacroAssembler::Swap(Register reg1, | 206 void MacroAssembler::Swap(Register reg1, |
207 Register reg2, | 207 Register reg2, |
208 Register scratch, | 208 Register scratch, |
209 Condition cond) { | 209 Condition cond) { |
210 if (scratch.is(no_reg)) { | 210 if (scratch.is(no_reg)) { |
211 eor(reg1, reg1, Operand(reg2), LeaveCC, cond); | 211 eor(reg1, reg1, Operand(reg2), DontCareCC, cond); |
212 eor(reg2, reg2, Operand(reg1), LeaveCC, cond); | 212 eor(reg2, reg2, Operand(reg1), DontCareCC, cond); |
213 eor(reg1, reg1, Operand(reg2), LeaveCC, cond); | 213 eor(reg1, reg1, Operand(reg2), DontCareCC, cond); |
214 } else { | 214 } else { |
215 mov(scratch, reg1, LeaveCC, cond); | 215 mov(scratch, reg1, DontCareCC, cond); |
216 mov(reg1, reg2, LeaveCC, cond); | 216 mov(reg1, reg2, DontCareCC, cond); |
217 mov(reg2, scratch, LeaveCC, cond); | 217 mov(reg2, scratch, DontCareCC, cond); |
218 } | 218 } |
219 } | 219 } |
220 | 220 |
221 | 221 |
222 void MacroAssembler::Call(Label* target) { | 222 void MacroAssembler::Call(Label* target) { |
223 bl(target); | 223 bl(target); |
224 } | 224 } |
225 | 225 |
226 | 226 |
227 void MacroAssembler::Push(Handle<Object> handle) { | 227 void MacroAssembler::Push(Handle<Object> handle) { |
228 mov(ip, Operand(handle)); | 228 mov(ip, Operand(handle)); |
229 push(ip); | 229 push(ip); |
230 } | 230 } |
231 | 231 |
232 | 232 |
233 void MacroAssembler::Move(Register dst, Handle<Object> value) { | 233 void MacroAssembler::Move(Register dst, Handle<Object> value) { |
234 mov(dst, Operand(value)); | 234 mov(dst, Operand(value), LeaveCC); |
235 } | 235 } |
236 | 236 |
237 | 237 |
238 void MacroAssembler::Move(Register dst, Register src, Condition cond) { | 238 void MacroAssembler::Move(Register dst, Register src, Condition cond) { |
239 if (!dst.is(src)) { | 239 if (!dst.is(src)) { |
240 mov(dst, src, LeaveCC, cond); | 240 mov(dst, src, LeaveCC, cond); |
241 } | 241 } |
242 } | 242 } |
243 | 243 |
244 | 244 |
245 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { | 245 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { |
246 if (!dst.is(src)) { | 246 if (!dst.is(src)) { |
247 vmov(dst, src); | 247 vmov(dst, src); |
248 } | 248 } |
249 } | 249 } |
250 | 250 |
251 | 251 |
252 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 252 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
253 Condition cond) { | 253 Condition cond) { |
254 if (!src2.is_reg() && | 254 if (!src2.is_reg() && |
255 !src2.must_output_reloc_info(this) && | 255 !src2.must_output_reloc_info(this) && |
256 src2.immediate() == 0) { | 256 src2.immediate() == 0) { |
257 mov(dst, Operand::Zero(), LeaveCC, cond); | 257 mov(dst, Operand::Zero(), DontCareCC, cond); |
258 } else if (!src2.is_single_instruction(this) && | 258 } else if (!src2.is_single_instruction(this) && |
259 !src2.must_output_reloc_info(this) && | 259 !src2.must_output_reloc_info(this) && |
260 CpuFeatures::IsSupported(ARMv7) && | 260 CpuFeatures::IsSupported(ARMv7) && |
261 IsPowerOf2(src2.immediate() + 1)) { | 261 IsPowerOf2(src2.immediate() + 1)) { |
262 ubfx(dst, src1, 0, | 262 ubfx(dst, src1, 0, |
263 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 263 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
264 } else { | 264 } else { |
265 and_(dst, src1, src2, LeaveCC, cond); | 265 and_(dst, src1, src2, DontCareCC, cond); |
266 } | 266 } |
267 } | 267 } |
268 | 268 |
269 | 269 |
270 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, | 270 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, |
271 Condition cond) { | 271 Condition cond) { |
272 ASSERT(lsb < 32); | 272 ASSERT(lsb < 32); |
273 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 273 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { |
274 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 274 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); |
275 and_(dst, src1, Operand(mask), LeaveCC, cond); | 275 and_(dst, src1, Operand(mask), DontCareCC, cond); |
276 if (lsb != 0) { | 276 if (lsb != 0) { |
277 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond); | 277 mov(dst, Operand(dst, LSR, lsb), DontCareCC, cond); |
278 } | 278 } |
279 } else { | 279 } else { |
280 ubfx(dst, src1, lsb, width, cond); | 280 ubfx(dst, src1, lsb, width, cond); |
281 } | 281 } |
282 } | 282 } |
283 | 283 |
284 | 284 |
285 void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width, | 285 void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width, |
286 Condition cond) { | 286 Condition cond) { |
287 ASSERT(lsb < 32); | 287 ASSERT(lsb < 32); |
288 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 288 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { |
289 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 289 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); |
290 and_(dst, src1, Operand(mask), LeaveCC, cond); | 290 and_(dst, src1, Operand(mask), DontCareCC, cond); |
291 int shift_up = 32 - lsb - width; | 291 int shift_up = 32 - lsb - width; |
292 int shift_down = lsb + shift_up; | 292 int shift_down = lsb + shift_up; |
293 if (shift_up != 0) { | 293 if (shift_up != 0) { |
294 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond); | 294 mov(dst, Operand(dst, LSL, shift_up), DontCareCC, cond); |
295 } | 295 } |
296 if (shift_down != 0) { | 296 if (shift_down != 0) { |
297 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond); | 297 mov(dst, Operand(dst, ASR, shift_down), DontCareCC, cond); |
298 } | 298 } |
299 } else { | 299 } else { |
300 sbfx(dst, src1, lsb, width, cond); | 300 sbfx(dst, src1, lsb, width, cond); |
301 } | 301 } |
302 } | 302 } |
303 | 303 |
304 | 304 |
305 void MacroAssembler::Bfi(Register dst, | 305 void MacroAssembler::Bfi(Register dst, |
306 Register src, | 306 Register src, |
307 Register scratch, | 307 Register scratch, |
(...skipping 3599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3907 void CodePatcher::EmitCondition(Condition cond) { | 3907 void CodePatcher::EmitCondition(Condition cond) { |
3908 Instr instr = Assembler::instr_at(masm_.pc_); | 3908 Instr instr = Assembler::instr_at(masm_.pc_); |
3909 instr = (instr & ~kCondMask) | cond; | 3909 instr = (instr & ~kCondMask) | cond; |
3910 masm_.emit(instr); | 3910 masm_.emit(instr); |
3911 } | 3911 } |
3912 | 3912 |
3913 | 3913 |
3914 } } // namespace v8::internal | 3914 } } // namespace v8::internal |
3915 | 3915 |
3916 #endif // V8_TARGET_ARCH_ARM | 3916 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |