| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 Condition cond) { | 84 Condition cond) { |
| 85 #if USE_BX | 85 #if USE_BX |
| 86 mov(ip, Operand(target, rmode)); | 86 mov(ip, Operand(target, rmode)); |
| 87 bx(ip, cond); | 87 bx(ip, cond); |
| 88 #else | 88 #else |
| 89 mov(pc, Operand(target, rmode), LeaveCC, cond); | 89 mov(pc, Operand(target, rmode), LeaveCC, cond); |
| 90 #endif | 90 #endif |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode, | 94 void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode, |
| 95 Condition cond) { | 95 Condition cond) { |
| 96 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | 96 ASSERT(!RelocInfo::IsCodeTarget(rmode)); |
| 97 Jump(reinterpret_cast<intptr_t>(target), rmode, cond); | 97 Jump(reinterpret_cast<intptr_t>(target), rmode, cond); |
| 98 } | 98 } |
| 99 | 99 |
| 100 | 100 |
| 101 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, | 101 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, |
| 102 Condition cond) { | 102 Condition cond) { |
| 103 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 103 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 104 // 'code' is always generated ARM code, never THUMB code | 104 // 'code' is always generated ARM code, never THUMB code |
| 105 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 105 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
| 106 } | 106 } |
| 107 | 107 |
| 108 | 108 |
| 109 int MacroAssembler::CallSize(Register target, Condition cond) { | 109 int MacroAssembler::CallSize(Register target, Condition cond) { |
| 110 #if USE_BLX | 110 #if USE_BLX |
| 111 return kInstrSize; | 111 return kInstrSize; |
| 112 #else | 112 #else |
| 113 return 2 * kInstrSize; | 113 return 2 * kInstrSize; |
| 114 #endif | 114 #endif |
| 115 } | 115 } |
| 116 | 116 |
| 117 | 117 |
| 118 void MacroAssembler::Call(Register target, Condition cond) { | 118 void MacroAssembler::Call(Register target, Condition cond) { |
| 119 // Block constant pool for the call instruction sequence. | 119 // Block constant pool for the call instruction sequence. |
| 120 BlockConstPoolScope block_const_pool(this); | 120 BlockConstPoolScope block_const_pool(this); |
| 121 #ifdef DEBUG | 121 Label start; |
| 122 int pre_position = pc_offset(); | 122 bind(&start); |
| 123 #endif | |
| 124 | |
| 125 #if USE_BLX | 123 #if USE_BLX |
| 126 blx(target, cond); | 124 blx(target, cond); |
| 127 #else | 125 #else |
| 128 // set lr for return at current pc + 8 | 126 // set lr for return at current pc + 8 |
| 129 mov(lr, Operand(pc), LeaveCC, cond); | 127 mov(lr, Operand(pc), LeaveCC, cond); |
| 130 mov(pc, Operand(target), LeaveCC, cond); | 128 mov(pc, Operand(target), LeaveCC, cond); |
| 131 #endif | 129 #endif |
| 132 | 130 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); |
| 133 #ifdef DEBUG | |
| 134 int post_position = pc_offset(); | |
| 135 CHECK_EQ(pre_position + CallSize(target, cond), post_position); | |
| 136 #endif | |
| 137 } | 131 } |
| 138 | 132 |
| 139 | 133 |
| 140 int MacroAssembler::CallSize( | 134 int MacroAssembler::CallSize( |
| 141 intptr_t target, RelocInfo::Mode rmode, Condition cond) { | 135 Address target, RelocInfo::Mode rmode, Condition cond) { |
| 142 int size = 2 * kInstrSize; | 136 int size = 2 * kInstrSize; |
| 143 Instr mov_instr = cond | MOV | LeaveCC; | 137 Instr mov_instr = cond | MOV | LeaveCC; |
| 144 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { | 138 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
| 139 if (!Operand(immediate, rmode).is_single_instruction(mov_instr)) { |
| 145 size += kInstrSize; | 140 size += kInstrSize; |
| 146 } | 141 } |
| 147 return size; | 142 return size; |
| 148 } | 143 } |
| 149 | 144 |
| 150 | 145 |
| 151 void MacroAssembler::Call(intptr_t target, | 146 void MacroAssembler::Call(Address target, |
| 152 RelocInfo::Mode rmode, | 147 RelocInfo::Mode rmode, |
| 153 Condition cond) { | 148 Condition cond) { |
| 154 // Block constant pool for the call instruction sequence. | 149 // Block constant pool for the call instruction sequence. |
| 155 BlockConstPoolScope block_const_pool(this); | 150 BlockConstPoolScope block_const_pool(this); |
| 156 #ifdef DEBUG | 151 Label start; |
| 157 int pre_position = pc_offset(); | 152 bind(&start); |
| 158 #endif | |
| 159 | |
| 160 #if USE_BLX | 153 #if USE_BLX |
| 161 // On ARMv5 and after the recommended call sequence is: | 154 // On ARMv5 and after the recommended call sequence is: |
| 162 // ldr ip, [pc, #...] | 155 // ldr ip, [pc, #...] |
| 163 // blx ip | 156 // blx ip |
| 164 | 157 |
| 165 // Statement positions are expected to be recorded when the target | 158 // Statement positions are expected to be recorded when the target |
| 166 // address is loaded. The mov method will automatically record | 159 // address is loaded. The mov method will automatically record |
| 167 // positions when pc is the target, since this is not the case here | 160 // positions when pc is the target, since this is not the case here |
| 168 // we have to do it explicitly. | 161 // we have to do it explicitly. |
| 169 positions_recorder()->WriteRecordedPositions(); | 162 positions_recorder()->WriteRecordedPositions(); |
| 170 | 163 |
| 171 mov(ip, Operand(target, rmode)); | 164 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
| 172 blx(ip, cond); | 165 blx(ip, cond); |
| 173 | 166 |
| 174 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); | 167 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); |
| 175 #else | 168 #else |
| 176 // Set lr for return at current pc + 8. | 169 // Set lr for return at current pc + 8. |
| 177 mov(lr, Operand(pc), LeaveCC, cond); | 170 mov(lr, Operand(pc), LeaveCC, cond); |
| 178 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 171 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
| 179 mov(pc, Operand(target, rmode), LeaveCC, cond); | 172 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); |
| 180 ASSERT(kCallTargetAddressOffset == kInstrSize); | 173 ASSERT(kCallTargetAddressOffset == kInstrSize); |
| 181 #endif | 174 #endif |
| 182 | 175 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
| 183 #ifdef DEBUG | |
| 184 int post_position = pc_offset(); | |
| 185 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); | |
| 186 #endif | |
| 187 } | 176 } |
| 188 | 177 |
| 189 | 178 |
| 190 int MacroAssembler::CallSize( | 179 int MacroAssembler::CallSize(Handle<Code> code, |
| 191 byte* target, RelocInfo::Mode rmode, Condition cond) { | 180 RelocInfo::Mode rmode, |
| 192 return CallSize(reinterpret_cast<intptr_t>(target), rmode); | 181 unsigned ast_id, |
| 193 } | 182 Condition cond) { |
| 194 | 183 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); |
| 195 | |
| 196 void MacroAssembler::Call( | |
| 197 byte* target, RelocInfo::Mode rmode, Condition cond) { | |
| 198 #ifdef DEBUG | |
| 199 int pre_position = pc_offset(); | |
| 200 #endif | |
| 201 | |
| 202 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | |
| 203 Call(reinterpret_cast<intptr_t>(target), rmode, cond); | |
| 204 | |
| 205 #ifdef DEBUG | |
| 206 int post_position = pc_offset(); | |
| 207 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); | |
| 208 #endif | |
| 209 } | |
| 210 | |
| 211 | |
| 212 int MacroAssembler::CallSize( | |
| 213 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | |
| 214 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
| 215 } | |
| 216 | |
| 217 | |
| 218 void MacroAssembler::CallWithAstId(Handle<Code> code, | |
| 219 RelocInfo::Mode rmode, | |
| 220 unsigned ast_id, | |
| 221 Condition cond) { | |
| 222 #ifdef DEBUG | |
| 223 int pre_position = pc_offset(); | |
| 224 #endif | |
| 225 | |
| 226 ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID); | |
| 227 ASSERT(ast_id != kNoASTId); | |
| 228 ASSERT(ast_id_for_reloc_info_ == kNoASTId); | |
| 229 ast_id_for_reloc_info_ = ast_id; | |
| 230 // 'code' is always generated ARM code, never THUMB code | |
| 231 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
| 232 | |
| 233 #ifdef DEBUG | |
| 234 int post_position = pc_offset(); | |
| 235 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | |
| 236 #endif | |
| 237 } | 184 } |
| 238 | 185 |
| 239 | 186 |
| 240 void MacroAssembler::Call(Handle<Code> code, | 187 void MacroAssembler::Call(Handle<Code> code, |
| 241 RelocInfo::Mode rmode, | 188 RelocInfo::Mode rmode, |
| 189 unsigned ast_id, |
| 242 Condition cond) { | 190 Condition cond) { |
| 243 #ifdef DEBUG | 191 Label start; |
| 244 int pre_position = pc_offset(); | 192 bind(&start); |
| 245 #endif | |
| 246 | |
| 247 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 193 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 194 if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) { |
| 195 ASSERT(ast_id_for_reloc_info_ == kNoASTId); |
| 196 ast_id_for_reloc_info_ = ast_id; |
| 197 rmode = RelocInfo::CODE_TARGET_WITH_ID; |
| 198 } |
| 248 // 'code' is always generated ARM code, never THUMB code | 199 // 'code' is always generated ARM code, never THUMB code |
| 249 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 200 Call(reinterpret_cast<Address>(code.location()), rmode, cond); |
| 250 | 201 ASSERT_EQ(CallSize(code, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
| 251 #ifdef DEBUG | |
| 252 int post_position = pc_offset(); | |
| 253 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | |
| 254 #endif | |
| 255 } | 202 } |
| 256 | 203 |
| 257 | 204 |
| 258 void MacroAssembler::Ret(Condition cond) { | 205 void MacroAssembler::Ret(Condition cond) { |
| 259 #if USE_BX | 206 #if USE_BX |
| 260 bx(lr, cond); | 207 bx(lr, cond); |
| 261 #else | 208 #else |
| 262 mov(pc, Operand(lr), LeaveCC, cond); | 209 mov(pc, Operand(lr), LeaveCC, cond); |
| 263 #endif | 210 #endif |
| 264 } | 211 } |
| (...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 987 | 934 |
| 988 if (!definitely_matches) { | 935 if (!definitely_matches) { |
| 989 if (!code_constant.is_null()) { | 936 if (!code_constant.is_null()) { |
| 990 mov(r3, Operand(code_constant)); | 937 mov(r3, Operand(code_constant)); |
| 991 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 938 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 992 } | 939 } |
| 993 | 940 |
| 994 Handle<Code> adaptor = | 941 Handle<Code> adaptor = |
| 995 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 942 isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 996 if (flag == CALL_FUNCTION) { | 943 if (flag == CALL_FUNCTION) { |
| 997 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); | 944 call_wrapper.BeforeCall(CallSize(adaptor)); |
| 998 SetCallKind(r5, call_kind); | 945 SetCallKind(r5, call_kind); |
| 999 Call(adaptor, RelocInfo::CODE_TARGET); | 946 Call(adaptor); |
| 1000 call_wrapper.AfterCall(); | 947 call_wrapper.AfterCall(); |
| 1001 b(done); | 948 b(done); |
| 1002 } else { | 949 } else { |
| 1003 SetCallKind(r5, call_kind); | 950 SetCallKind(r5, call_kind); |
| 1004 Jump(adaptor, RelocInfo::CODE_TARGET); | 951 Jump(adaptor, RelocInfo::CODE_TARGET); |
| 1005 } | 952 } |
| 1006 bind(®ular_invoke); | 953 bind(®ular_invoke); |
| 1007 } | 954 } |
| 1008 } | 955 } |
| 1009 | 956 |
| (...skipping 2180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3190 void CodePatcher::EmitCondition(Condition cond) { | 3137 void CodePatcher::EmitCondition(Condition cond) { |
| 3191 Instr instr = Assembler::instr_at(masm_.pc_); | 3138 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3192 instr = (instr & ~kCondMask) | cond; | 3139 instr = (instr & ~kCondMask) | cond; |
| 3193 masm_.emit(instr); | 3140 masm_.emit(instr); |
| 3194 } | 3141 } |
| 3195 | 3142 |
| 3196 | 3143 |
| 3197 } } // namespace v8::internal | 3144 } } // namespace v8::internal |
| 3198 | 3145 |
| 3199 #endif // V8_TARGET_ARCH_ARM | 3146 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |