Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 101 | 101 |
| 102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, | 102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, |
| 103 Condition cond) { | 103 Condition cond) { |
| 104 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 104 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 105 // 'code' is always generated ARM code, never THUMB code | 105 // 'code' is always generated ARM code, never THUMB code |
| 106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
| 107 } | 107 } |
| 108 | 108 |
| 109 | 109 |
| 110 int MacroAssembler::CallSize(Register target, Condition cond) { | 110 int MacroAssembler::CallSize(Register target, Condition cond) { |
| 111 #if USE_BLX | 111 #ifdef USE_BLX |
| 112 return kInstrSize; | 112 return kInstrSize; |
| 113 #else | 113 #else |
| 114 return 2 * kInstrSize; | 114 return 2 * kInstrSize; |
| 115 #endif | 115 #endif |
| 116 } | 116 } |
| 117 | 117 |
| 118 | 118 |
| 119 void MacroAssembler::Call(Register target, Condition cond) { | 119 void MacroAssembler::Call(Register target, Condition cond) { |
| 120 // Block constant pool for the call instruction sequence. | 120 // Block constant pool for the call instruction sequence. |
| 121 BlockConstPoolScope block_const_pool(this); | 121 BlockConstPoolScope block_const_pool(this); |
| 122 Label start; | 122 Label start; |
| 123 bind(&start); | 123 bind(&start); |
| 124 #if USE_BLX | 124 #ifdef USE_BLX |
| 125 blx(target, cond); | 125 blx(target, cond); |
| 126 #else | 126 #else |
| 127 // set lr for return at current pc + 8 | 127 // set lr for return at current pc + 8 |
| 128 mov(lr, Operand(pc), LeaveCC, cond); | 128 mov(lr, Operand(pc), LeaveCC, cond); |
| 129 mov(pc, Operand(target), LeaveCC, cond); | 129 mov(pc, Operand(target), LeaveCC, cond); |
| 130 #endif | 130 #endif |
| 131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); | 131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); |
| 132 } | 132 } |
| 133 | 133 |
| 134 | 134 |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 156 } | 156 } |
| 157 | 157 |
| 158 | 158 |
| 159 void MacroAssembler::Call(Address target, | 159 void MacroAssembler::Call(Address target, |
| 160 RelocInfo::Mode rmode, | 160 RelocInfo::Mode rmode, |
| 161 Condition cond) { | 161 Condition cond) { |
| 162 // Block constant pool for the call instruction sequence. | 162 // Block constant pool for the call instruction sequence. |
| 163 BlockConstPoolScope block_const_pool(this); | 163 BlockConstPoolScope block_const_pool(this); |
| 164 Label start; | 164 Label start; |
| 165 bind(&start); | 165 bind(&start); |
| 166 #if USE_BLX | 166 #ifdef USE_BLX |
| 167 // On ARMv5 and after the recommended call sequence is: | 167 // Call sequence on V7 or later is : |
| 168 // ldr ip, [pc, #...] | 168 // movw ip, #... @ call address low 16 |
| 169 // blx ip | 169 // movt ip, #... @ call address high 16 |
| 170 // blx ip | |
| 171 // @ return address | |
| 172 // Or pre-V7: | |
| 173 // ldr ip, [pc, #...] @ call address | |
| 174 // blx ip | |
| 175 // @ return address | |
| 170 | 176 |
| 171 // Statement positions are expected to be recorded when the target | 177 // Statement positions are expected to be recorded when the target |
| 172 // address is loaded. The mov method will automatically record | 178 // address is loaded. The mov method will automatically record |
| 173 // positions when pc is the target, since this is not the case here | 179 // positions when pc is the target, since this is not the case here |
| 174 // we have to do it explicitly. | 180 // we have to do it explicitly. |
| 175 positions_recorder()->WriteRecordedPositions(); | 181 positions_recorder()->WriteRecordedPositions(); |
| 176 | 182 |
| 177 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 183 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
| 178 blx(ip, cond); | 184 blx(ip, cond); |
| 179 | |
| 180 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); | |
| 181 #else | 185 #else |
| 182 // Set lr for return at current pc + 8. | 186 // Set lr for return at current pc + 8. |
| 183 mov(lr, Operand(pc), LeaveCC, cond); | 187 mov(lr, Operand(pc), LeaveCC, cond); |
| 184 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 188 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
| 185 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); | 189 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); |
| 186 ASSERT(kCallTargetAddressOffset == kInstrSize); | |
| 187 #endif | 190 #endif |
| 188 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); | 191 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
| 189 } | 192 } |
| 190 | 193 |
| 191 | 194 |
| 192 int MacroAssembler::CallSize(Handle<Code> code, | 195 int MacroAssembler::CallSize(Handle<Code> code, |
| 193 RelocInfo::Mode rmode, | 196 RelocInfo::Mode rmode, |
| 194 TypeFeedbackId ast_id, | 197 TypeFeedbackId ast_id, |
| 195 Condition cond) { | 198 Condition cond) { |
| 196 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); | 199 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 281 CpuFeatures::Scope scope(VFP2); | 284 CpuFeatures::Scope scope(VFP2); |
| 282 if (!dst.is(src)) { | 285 if (!dst.is(src)) { |
| 283 vmov(dst, src); | 286 vmov(dst, src); |
| 284 } | 287 } |
| 285 } | 288 } |
| 286 | 289 |
| 287 | 290 |
| 288 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 291 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
| 289 Condition cond) { | 292 Condition cond) { |
| 290 if (!src2.is_reg() && | 293 if (!src2.is_reg() && |
| 291 !src2.must_use_constant_pool(this) && | 294 !src2.must_output_reloc_info(this) && |
| 292 src2.immediate() == 0) { | 295 src2.immediate() == 0) { |
| 293 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond); | 296 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond); |
| 294 | |
| 295 } else if (!src2.is_single_instruction(this) && | 297 } else if (!src2.is_single_instruction(this) && |
| 296 !src2.must_use_constant_pool(this) && | 298 !src2.must_output_reloc_info(this) && |
| 297 CpuFeatures::IsSupported(ARMv7) && | 299 CpuFeatures::IsSupported(ARMv7) && |
| 298 IsPowerOf2(src2.immediate() + 1)) { | 300 IsPowerOf2(src2.immediate() + 1)) { |
|
Please use jfb - chromium.org
2012/10/10 13:56:52
It would be kind of silly, but if src2 == 0xffffff
danno
2012/10/17 10:04:44
Possible, but that's for a separate CL.
On 2012/10
| |
| 299 ubfx(dst, src1, 0, | 301 ubfx(dst, src1, 0, |
| 300 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 302 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
| 301 | |
| 302 } else { | 303 } else { |
| 303 and_(dst, src1, src2, LeaveCC, cond); | 304 and_(dst, src1, src2, LeaveCC, cond); |
| 304 } | 305 } |
| 305 } | 306 } |
| 306 | 307 |
| 307 | 308 |
| 308 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, | 309 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, |
| 309 Condition cond) { | 310 Condition cond) { |
| 310 ASSERT(lsb < 32); | 311 ASSERT(lsb < 32); |
| 311 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 312 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { |
| (...skipping 3527 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3839 void CodePatcher::EmitCondition(Condition cond) { | 3840 void CodePatcher::EmitCondition(Condition cond) { |
| 3840 Instr instr = Assembler::instr_at(masm_.pc_); | 3841 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3841 instr = (instr & ~kCondMask) | cond; | 3842 instr = (instr & ~kCondMask) | cond; |
| 3842 masm_.emit(instr); | 3843 masm_.emit(instr); |
| 3843 } | 3844 } |
| 3844 | 3845 |
| 3845 | 3846 |
| 3846 } } // namespace v8::internal | 3847 } } // namespace v8::internal |
| 3847 | 3848 |
| 3848 #endif // V8_TARGET_ARCH_ARM | 3849 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |