| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 76 bx(target, cond); | 76 bx(target, cond); |
| 77 #else | 77 #else |
| 78 mov(pc, Operand(target), LeaveCC, cond); | 78 mov(pc, Operand(target), LeaveCC, cond); |
| 79 #endif | 79 #endif |
| 80 } | 80 } |
| 81 | 81 |
| 82 | 82 |
| 83 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, | 83 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, |
| 84 Condition cond) { | 84 Condition cond) { |
| 85 #if USE_BX | 85 #if USE_BX |
| 86 mov(ip, Operand(target, rmode), LeaveCC, cond); | 86 mov(ip, Operand(target, rmode)); |
| 87 bx(ip, cond); | 87 bx(ip, cond); |
| 88 #else | 88 #else |
| 89 mov(pc, Operand(target, rmode), LeaveCC, cond); | 89 mov(pc, Operand(target, rmode), LeaveCC, cond); |
| 90 #endif | 90 #endif |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode, | 94 void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode, |
| 95 Condition cond) { | 95 Condition cond) { |
| 96 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | 96 ASSERT(!RelocInfo::IsCodeTarget(rmode)); |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 160 // On ARMv5 and after the recommended call sequence is: | 160 // On ARMv5 and after the recommended call sequence is: |
| 161 // ldr ip, [pc, #...] | 161 // ldr ip, [pc, #...] |
| 162 // blx ip | 162 // blx ip |
| 163 | 163 |
| 164 // Statement positions are expected to be recorded when the target | 164 // Statement positions are expected to be recorded when the target |
| 165 // address is loaded. The mov method will automatically record | 165 // address is loaded. The mov method will automatically record |
| 166 // positions when pc is the target, since this is not the case here | 166 // positions when pc is the target, since this is not the case here |
| 167 // we have to do it explicitly. | 167 // we have to do it explicitly. |
| 168 positions_recorder()->WriteRecordedPositions(); | 168 positions_recorder()->WriteRecordedPositions(); |
| 169 | 169 |
| 170 mov(ip, Operand(target, rmode), LeaveCC, cond); | 170 mov(ip, Operand(target, rmode)); |
| 171 blx(ip, cond); | 171 blx(ip, cond); |
| 172 | 172 |
| 173 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); | 173 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); |
| 174 #else | 174 #else |
| 175 // Set lr for return at current pc + 8. | 175 // Set lr for return at current pc + 8. |
| 176 mov(lr, Operand(pc), LeaveCC, cond); | 176 mov(lr, Operand(pc), LeaveCC, cond); |
| 177 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 177 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
| 178 mov(pc, Operand(target, rmode), LeaveCC, cond); | 178 mov(pc, Operand(target, rmode), LeaveCC, cond); |
| 179 ASSERT(kCallTargetAddressOffset == kInstrSize); | 179 ASSERT(kCallTargetAddressOffset == kInstrSize); |
| 180 #endif | 180 #endif |
| (...skipping 2675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2856 // Just call directly. The function called cannot cause a GC, or | 2856 // Just call directly. The function called cannot cause a GC, or |
| 2857 // allow preemption, so the return address in the link register | 2857 // allow preemption, so the return address in the link register |
| 2858 // stays correct. | 2858 // stays correct. |
| 2859 if (function.is(no_reg)) { | 2859 if (function.is(no_reg)) { |
| 2860 mov(scratch, Operand(function_reference)); | 2860 mov(scratch, Operand(function_reference)); |
| 2861 function = scratch; | 2861 function = scratch; |
| 2862 } | 2862 } |
| 2863 Call(function); | 2863 Call(function); |
| 2864 int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ? | 2864 int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ? |
| 2865 0 : num_arguments - kRegisterPassedArguments; | 2865 0 : num_arguments - kRegisterPassedArguments; |
| 2866 if (OS::ActivationFrameAlignment() > kPointerSize) { | 2866 if (ActivationFrameAlignment() > kPointerSize) { |
| 2867 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 2867 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
| 2868 } else { | 2868 } else { |
| 2869 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 2869 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
| 2870 } | 2870 } |
| 2871 } | 2871 } |
| 2872 | 2872 |
| 2873 | 2873 |
| 2874 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 2874 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
| 2875 Register result) { | 2875 Register result) { |
| 2876 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 2876 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2926 void CodePatcher::EmitCondition(Condition cond) { | 2926 void CodePatcher::EmitCondition(Condition cond) { |
| 2927 Instr instr = Assembler::instr_at(masm_.pc_); | 2927 Instr instr = Assembler::instr_at(masm_.pc_); |
| 2928 instr = (instr & ~kCondMask) | cond; | 2928 instr = (instr & ~kCondMask) | cond; |
| 2929 masm_.emit(instr); | 2929 masm_.emit(instr); |
| 2930 } | 2930 } |
| 2931 | 2931 |
| 2932 | 2932 |
| 2933 } } // namespace v8::internal | 2933 } } // namespace v8::internal |
| 2934 | 2934 |
| 2935 #endif // V8_TARGET_ARCH_ARM | 2935 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |