OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
141 intptr_t target, RelocInfo::Mode rmode, Condition cond) { | 141 intptr_t target, RelocInfo::Mode rmode, Condition cond) { |
142 int size = 2 * kInstrSize; | 142 int size = 2 * kInstrSize; |
143 Instr mov_instr = cond | MOV | LeaveCC; | 143 Instr mov_instr = cond | MOV | LeaveCC; |
144 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { | 144 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { |
145 size += kInstrSize; | 145 size += kInstrSize; |
146 } | 146 } |
147 return size; | 147 return size; |
148 } | 148 } |
149 | 149 |
150 | 150 |
151 void MacroAssembler::Call( | 151 void MacroAssembler::Call(intptr_t target, |
152 intptr_t target, RelocInfo::Mode rmode, Condition cond) { | 152 RelocInfo::Mode rmode, |
153 Condition cond) { | |
153 // Block constant pool for the call instruction sequence. | 154 // Block constant pool for the call instruction sequence. |
154 BlockConstPoolScope block_const_pool(this); | 155 BlockConstPoolScope block_const_pool(this); |
155 #ifdef DEBUG | 156 #ifdef DEBUG |
156 int pre_position = pc_offset(); | 157 int pre_position = pc_offset(); |
157 #endif | 158 #endif |
158 | 159 |
159 #if USE_BLX | 160 #if USE_BLX |
160 // On ARMv5 and after the recommended call sequence is: | 161 // On ARMv5 and after the recommended call sequence is: |
161 // ldr ip, [pc, #...] | 162 // ldr ip, [pc, #...] |
162 // blx ip | 163 // blx ip |
163 | 164 |
164 // Statement positions are expected to be recorded when the target | 165 // Statement positions are expected to be recorded when the target |
165 // address is loaded. The mov method will automatically record | 166 // address is loaded. The mov method will automatically record |
166 // positions when pc is the target, since this is not the case here | 167 // positions when pc is the target, since this is not the case here |
167 // we have to do it explicitly. | 168 // we have to do it explicitly. |
168 positions_recorder()->WriteRecordedPositions(); | 169 positions_recorder()->WriteRecordedPositions(); |
169 | 170 |
170 mov(ip, Operand(target, rmode), LeaveCC, cond); | 171 mov(ip, Operand(target, rmode), LeaveCC, cond); |
Vitaly Repeshko
2011/04/15 01:24:43
What if USE_BLX is on?
William Hesse
2011/04/15 11:51:27
The ast_id is carried through Assembler::ast_id_fo
| |
171 blx(ip, cond); | 172 blx(ip, cond); |
172 | 173 |
173 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); | 174 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); |
174 #else | 175 #else |
175 // Set lr for return at current pc + 8. | 176 // Set lr for return at current pc + 8. |
176 mov(lr, Operand(pc), LeaveCC, cond); | 177 mov(lr, Operand(pc), LeaveCC, cond); |
177 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 178 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
178 mov(pc, Operand(target, rmode), LeaveCC, cond); | 179 mov(pc, Operand(target, rmode, ast_id), LeaveCC, cond); |
179 ASSERT(kCallTargetAddressOffset == kInstrSize); | 180 ASSERT(kCallTargetAddressOffset == kInstrSize); |
180 #endif | 181 #endif |
181 | 182 |
182 #ifdef DEBUG | 183 #ifdef DEBUG |
183 int post_position = pc_offset(); | 184 int post_position = pc_offset(); |
184 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); | 185 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); |
185 #endif | 186 #endif |
186 } | 187 } |
187 | 188 |
188 | 189 |
(...skipping 18 matching lines...) Expand all Loading... | |
207 #endif | 208 #endif |
208 } | 209 } |
209 | 210 |
210 | 211 |
211 int MacroAssembler::CallSize( | 212 int MacroAssembler::CallSize( |
212 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | 213 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { |
213 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 214 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
214 } | 215 } |
215 | 216 |
216 | 217 |
217 void MacroAssembler::Call( | 218 void MacroAssembler::CallWithAstId(Handle<Code> code, |
218 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | 219 RelocInfo::Mode rmode, |
Vitaly Repeshko
2011/04/15 01:24:43
nit: Fix indentation.
| |
220 unsigned ast_id, | |
221 Condition cond) { | |
219 #ifdef DEBUG | 222 #ifdef DEBUG |
220 int pre_position = pc_offset(); | 223 int pre_position = pc_offset(); |
221 #endif | 224 #endif |
225 | |
226 ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID); | |
227 ASSERT(ast_id != kNoASTId); | |
228 ASSERT(ast_id_for_reloc_info_ == kNoASTId); | |
229 ast_id_for_reloc_info_ = ast_id; | |
230 // 'code' is always generated ARM code, never THUMB code | |
231 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
232 | |
233 #ifdef DEBUG | |
234 int post_position = pc_offset(); | |
235 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | |
236 #endif | |
237 } | |
238 | |
239 | |
240 void MacroAssembler::Call(Handle<Code> code, | |
241 RelocInfo::Mode rmode, | |
242 Condition cond) { | |
243 #ifdef DEBUG | |
244 int pre_position = pc_offset(); | |
245 #endif | |
222 | 246 |
223 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 247 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
224 // 'code' is always generated ARM code, never THUMB code | 248 // 'code' is always generated ARM code, never THUMB code |
225 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 249 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
226 | 250 |
227 #ifdef DEBUG | 251 #ifdef DEBUG |
228 int post_position = pc_offset(); | 252 int post_position = pc_offset(); |
229 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | 253 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); |
230 #endif | 254 #endif |
231 } | 255 } |
(...skipping 2694 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2926 void CodePatcher::EmitCondition(Condition cond) { | 2950 void CodePatcher::EmitCondition(Condition cond) { |
2927 Instr instr = Assembler::instr_at(masm_.pc_); | 2951 Instr instr = Assembler::instr_at(masm_.pc_); |
2928 instr = (instr & ~kCondMask) | cond; | 2952 instr = (instr & ~kCondMask) | cond; |
2929 masm_.emit(instr); | 2953 masm_.emit(instr); |
2930 } | 2954 } |
2931 | 2955 |
2932 | 2956 |
2933 } } // namespace v8::internal | 2957 } } // namespace v8::internal |
2934 | 2958 |
2935 #endif // V8_TARGET_ARCH_ARM | 2959 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |