| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 141 intptr_t target, RelocInfo::Mode rmode, Condition cond) { | 141 intptr_t target, RelocInfo::Mode rmode, Condition cond) { |
| 142 int size = 2 * kInstrSize; | 142 int size = 2 * kInstrSize; |
| 143 Instr mov_instr = cond | MOV | LeaveCC; | 143 Instr mov_instr = cond | MOV | LeaveCC; |
| 144 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { | 144 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { |
| 145 size += kInstrSize; | 145 size += kInstrSize; |
| 146 } | 146 } |
| 147 return size; | 147 return size; |
| 148 } | 148 } |
| 149 | 149 |
| 150 | 150 |
| 151 void MacroAssembler::Call(intptr_t target, | 151 void MacroAssembler::Call( |
| 152 RelocInfo::Mode rmode, | 152 intptr_t target, RelocInfo::Mode rmode, Condition cond) { |
| 153 Condition cond) { | |
| 154 // Block constant pool for the call instruction sequence. | 153 // Block constant pool for the call instruction sequence. |
| 155 BlockConstPoolScope block_const_pool(this); | 154 BlockConstPoolScope block_const_pool(this); |
| 156 #ifdef DEBUG | 155 #ifdef DEBUG |
| 157 int pre_position = pc_offset(); | 156 int pre_position = pc_offset(); |
| 158 #endif | 157 #endif |
| 159 | 158 |
| 160 #if USE_BLX | 159 #if USE_BLX |
| 161 // On ARMv5 and after the recommended call sequence is: | 160 // On ARMv5 and after the recommended call sequence is: |
| 162 // ldr ip, [pc, #...] | 161 // ldr ip, [pc, #...] |
| 163 // blx ip | 162 // blx ip |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 208 #endif | 207 #endif |
| 209 } | 208 } |
| 210 | 209 |
| 211 | 210 |
| 212 int MacroAssembler::CallSize( | 211 int MacroAssembler::CallSize( |
| 213 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | 212 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { |
| 214 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 213 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
| 215 } | 214 } |
| 216 | 215 |
| 217 | 216 |
| 218 void MacroAssembler::CallWithAstId(Handle<Code> code, | 217 void MacroAssembler::Call( |
| 219 RelocInfo::Mode rmode, | 218 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { |
| 220 unsigned ast_id, | |
| 221 Condition cond) { | |
| 222 #ifdef DEBUG | |
| 223 int pre_position = pc_offset(); | |
| 224 #endif | |
| 225 | |
| 226 ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID); | |
| 227 ASSERT(ast_id != kNoASTId); | |
| 228 ASSERT(ast_id_for_reloc_info_ == kNoASTId); | |
| 229 ast_id_for_reloc_info_ = ast_id; | |
| 230 // 'code' is always generated ARM code, never THUMB code | |
| 231 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
| 232 | |
| 233 #ifdef DEBUG | |
| 234 int post_position = pc_offset(); | |
| 235 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | |
| 236 #endif | |
| 237 } | |
| 238 | |
| 239 | |
| 240 void MacroAssembler::Call(Handle<Code> code, | |
| 241 RelocInfo::Mode rmode, | |
| 242 Condition cond) { | |
| 243 #ifdef DEBUG | 219 #ifdef DEBUG |
| 244 int pre_position = pc_offset(); | 220 int pre_position = pc_offset(); |
| 245 #endif | 221 #endif |
| 246 | 222 |
| 247 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 223 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 248 // 'code' is always generated ARM code, never THUMB code | 224 // 'code' is always generated ARM code, never THUMB code |
| 249 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 225 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
| 250 | 226 |
| 251 #ifdef DEBUG | 227 #ifdef DEBUG |
| 252 int post_position = pc_offset(); | 228 int post_position = pc_offset(); |
| (...skipping 2697 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2950 void CodePatcher::EmitCondition(Condition cond) { | 2926 void CodePatcher::EmitCondition(Condition cond) { |
| 2951 Instr instr = Assembler::instr_at(masm_.pc_); | 2927 Instr instr = Assembler::instr_at(masm_.pc_); |
| 2952 instr = (instr & ~kCondMask) | cond; | 2928 instr = (instr & ~kCondMask) | cond; |
| 2953 masm_.emit(instr); | 2929 masm_.emit(instr); |
| 2954 } | 2930 } |
| 2955 | 2931 |
| 2956 | 2932 |
| 2957 } } // namespace v8::internal | 2933 } } // namespace v8::internal |
| 2958 | 2934 |
| 2959 #endif // V8_TARGET_ARCH_ARM | 2935 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |