| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 return kInstrSize; | 84 return kInstrSize; |
| 85 } | 85 } |
| 86 | 86 |
| 87 | 87 |
| 88 void MacroAssembler::Call(Register target, Condition cond) { | 88 void MacroAssembler::Call(Register target, Condition cond) { |
| 89 // Block constant pool for the call instruction sequence. | 89 // Block constant pool for the call instruction sequence. |
| 90 BlockConstPoolScope block_const_pool(this); | 90 BlockConstPoolScope block_const_pool(this); |
| 91 Label start; | 91 Label start; |
| 92 bind(&start); | 92 bind(&start); |
| 93 blx(target, cond); | 93 blx(target, cond); |
| 94 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); | |
| 95 } | 94 } |
| 96 | 95 |
| 97 | 96 |
| 98 int MacroAssembler::CallSize( | 97 int MacroAssembler::CallSize( |
| 99 Address target, RelocInfo::Mode rmode, Condition cond) { | 98 Address target, RelocInfo::Mode rmode, Condition cond) { |
| 100 int size = 2 * kInstrSize; | 99 int size = 2 * kInstrSize; |
| 101 Instr mov_instr = cond | MOV | LeaveCC; | 100 Instr mov_instr = cond | MOV | LeaveCC; |
| 102 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 101 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
| 103 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { | 102 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { |
| 104 size += kInstrSize; | 103 size += kInstrSize; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 | 145 |
| 147 // Statement positions are expected to be recorded when the target | 146 // Statement positions are expected to be recorded when the target |
| 148 // address is loaded. The mov method will automatically record | 147 // address is loaded. The mov method will automatically record |
| 149 // positions when pc is the target, since this is not the case here | 148 // positions when pc is the target, since this is not the case here |
| 150 // we have to do it explicitly. | 149 // we have to do it explicitly. |
| 151 positions_recorder()->WriteRecordedPositions(); | 150 positions_recorder()->WriteRecordedPositions(); |
| 152 | 151 |
| 153 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 152 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
| 154 blx(ip, cond); | 153 blx(ip, cond); |
| 155 | 154 |
| 156 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); | |
| 157 if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 155 if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
| 158 set_predictable_code_size(old_predictable_code_size); | 156 set_predictable_code_size(old_predictable_code_size); |
| 159 } | 157 } |
| 160 } | 158 } |
| 161 | 159 |
| 162 | 160 |
| 163 int MacroAssembler::CallSize(Handle<Code> code, | 161 int MacroAssembler::CallSize(Handle<Code> code, |
| 164 RelocInfo::Mode rmode, | 162 RelocInfo::Mode rmode, |
| 165 TypeFeedbackId ast_id, | 163 TypeFeedbackId ast_id, |
| 166 Condition cond) { | 164 Condition cond) { |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 376 Condition cond) { | 374 Condition cond) { |
| 377 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 375 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && |
| 378 isolate()->heap()->RootCanBeTreatedAsConstant(index) && | 376 isolate()->heap()->RootCanBeTreatedAsConstant(index) && |
| 379 !predictable_code_size()) { | 377 !predictable_code_size()) { |
| 380 // The CPU supports fast immediate values, and this root will never | 378 // The CPU supports fast immediate values, and this root will never |
| 381 // change. We will load it as a relocatable immediate value. | 379 // change. We will load it as a relocatable immediate value. |
| 382 Handle<Object> root(&isolate()->heap()->roots_array_start()[index]); | 380 Handle<Object> root(&isolate()->heap()->roots_array_start()[index]); |
| 383 mov(destination, Operand(root), LeaveCC, cond); | 381 mov(destination, Operand(root), LeaveCC, cond); |
| 384 return; | 382 return; |
| 385 } | 383 } |
| 384 if (is_thumb_mode()) { |
| 385 emit_it(cond); |
| 386 } |
| 386 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); | 387 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); |
| 387 } | 388 } |
| 388 | 389 |
| 389 | 390 |
| 390 void MacroAssembler::StoreRoot(Register source, | 391 void MacroAssembler::StoreRoot(Register source, |
| 391 Heap::RootListIndex index, | 392 Heap::RootListIndex index, |
| 392 Condition cond) { | 393 Condition cond) { |
| 393 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); | 394 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); |
| 394 } | 395 } |
| 395 | 396 |
| (...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1146 mov(r0, Operand(actual.immediate())); | 1147 mov(r0, Operand(actual.immediate())); |
| 1147 } else { | 1148 } else { |
| 1148 cmp(expected.reg(), Operand(actual.reg())); | 1149 cmp(expected.reg(), Operand(actual.reg())); |
| 1149 b(eq, ®ular_invoke); | 1150 b(eq, ®ular_invoke); |
| 1150 } | 1151 } |
| 1151 } | 1152 } |
| 1152 | 1153 |
| 1153 if (!definitely_matches) { | 1154 if (!definitely_matches) { |
| 1154 if (!code_constant.is_null()) { | 1155 if (!code_constant.is_null()) { |
| 1155 mov(r3, Operand(code_constant)); | 1156 mov(r3, Operand(code_constant)); |
| 1157 CheckModeBit(r3); |
| 1156 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1158 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1157 } | 1159 } |
| 1158 | 1160 |
| 1159 Handle<Code> adaptor = | 1161 Handle<Code> adaptor = |
| 1160 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 1162 isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 1161 if (flag == CALL_FUNCTION) { | 1163 if (flag == CALL_FUNCTION) { |
| 1162 call_wrapper.BeforeCall(CallSize(adaptor)); | 1164 call_wrapper.BeforeCall(CallSize(adaptor)); |
| 1163 SetCallKind(r5, call_kind); | 1165 SetCallKind(r5, call_kind); |
| 1164 Call(adaptor); | 1166 Call(adaptor); |
| 1165 call_wrapper.AfterCall(); | 1167 call_wrapper.AfterCall(); |
| (...skipping 2319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3485 num_reg_arguments, num_double_arguments); | 3487 num_reg_arguments, num_double_arguments); |
| 3486 if (ActivationFrameAlignment() > kPointerSize) { | 3488 if (ActivationFrameAlignment() > kPointerSize) { |
| 3487 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3489 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
| 3488 } else { | 3490 } else { |
| 3489 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3491 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
| 3490 } | 3492 } |
| 3491 } | 3493 } |
| 3492 | 3494 |
| 3493 | 3495 |
| 3494 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 3496 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
| 3495 Register result) { | 3497 Register result) { |
| 3496 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 3498 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| 3497 const int32_t kPCRegOffset = 2 * kPointerSize; | 3499 const int32_t kPCRegOffset = 2 * kPointerSize; |
| 3500 const int32_t kThumbPCRegOffset = kPointerSize; |
| 3501 Label ldr_pc_arm, ldr_pc_thumb; |
| 3498 ldr(result, MemOperand(ldr_location)); | 3502 ldr(result, MemOperand(ldr_location)); |
| 3499 if (emit_debug_code()) { | 3503 if (FLAG_enable_thumb2_crankshaft || emit_debug_code()) { |
| 3500 // Check that the instruction is a ldr reg, [pc + offset] . | 3504 // Check that the instruction is a ldr reg, [pc + offset] . |
| 3501 and_(result, result, Operand(kLdrPCPattern)); | 3505 mov(ip, Operand(kLdrPCPattern)); |
| 3502 cmp(result, Operand(kLdrPCPattern)); | 3506 and_(result, result, Operand(ip)); |
| 3507 cmp(result, ip); |
| 3508 b(eq, &ldr_pc_arm); |
| 3509 // Result was clobbered. Restore it. |
| 3510 ldr(result, MemOperand(ldr_location, -3)); |
| 3511 mov(ip, Operand(kThumbLdrPCPattern)); |
| 3512 and_(result, result, Operand(ip)); |
| 3513 cmp(result, ip); |
| 3503 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); | 3514 Check(eq, kTheInstructionToPatchShouldBeALoadFromPc); |
| 3504 // Result was clobbered. Restore it. | 3515 ldr(result, MemOperand(ldr_location, 1)); |
| 3505 ldr(result, MemOperand(ldr_location)); | 3516 and_(result, result, Operand(kLdrOffsetMask)); |
| 3517 bic(ip, ldr_location, Operand(3)); |
| 3518 add(result, ip, Operand(result)); |
| 3519 add(result, result, Operand(kThumbPCRegOffset)); |
| 3520 b(&ldr_pc_thumb); |
| 3506 } | 3521 } |
| 3522 bind(&ldr_pc_arm); |
| 3523 ldr(result, MemOperand(ldr_location)); |
| 3524 |
| 3507 // Get the address of the constant. | 3525 // Get the address of the constant. |
| 3508 and_(result, result, Operand(kLdrOffsetMask)); | 3526 and_(result, result, Operand(kLdrOffsetMask)); |
| 3509 add(result, ldr_location, Operand(result)); | 3527 add(result, ldr_location, Operand(result)); |
| 3510 add(result, result, Operand(kPCRegOffset)); | 3528 add(result, result, Operand(kPCRegOffset)); |
| 3529 bind(&ldr_pc_thumb); |
| 3511 } | 3530 } |
| 3512 | 3531 |
| 3513 | 3532 |
| 3514 void MacroAssembler::CheckPageFlag( | 3533 void MacroAssembler::CheckPageFlag( |
| 3515 Register object, | 3534 Register object, |
| 3516 Register scratch, | 3535 Register scratch, |
| 3517 int mask, | 3536 int mask, |
| 3518 Condition cc, | 3537 Condition cc, |
| 3519 Label* condition_met) { | 3538 Label* condition_met) { |
| 3520 Bfc(scratch, object, 0, kPageSizeBits); | 3539 Bfc(scratch, object, 0, kPageSizeBits); |
| (...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3817 ldr(ip, MemOperand(ip)); | 3836 ldr(ip, MemOperand(ip)); |
| 3818 cmp(scratch_reg, ip); | 3837 cmp(scratch_reg, ip); |
| 3819 b(gt, &no_memento_available); | 3838 b(gt, &no_memento_available); |
| 3820 ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize)); | 3839 ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize)); |
| 3821 cmp(scratch_reg, | 3840 cmp(scratch_reg, |
| 3822 Operand(Handle<Map>(isolate()->heap()->allocation_memento_map()))); | 3841 Operand(Handle<Map>(isolate()->heap()->allocation_memento_map()))); |
| 3823 bind(&no_memento_available); | 3842 bind(&no_memento_available); |
| 3824 } | 3843 } |
| 3825 | 3844 |
| 3826 | 3845 |
| 3846 void MacroAssembler::CheckModeBit(Register code) { |
| 3847 ldr(ip, FieldMemOperand(code, Code::kFlagsOffset)); |
| 3848 tst(ip, Operand(0x1000)); |
| 3849 add(code, code, Operand(1), LeaveCC, ne); |
| 3850 } |
| 3851 |
| 3852 |
| 3827 #ifdef DEBUG | 3853 #ifdef DEBUG |
| 3828 bool AreAliased(Register reg1, | 3854 bool AreAliased(Register reg1, |
| 3829 Register reg2, | 3855 Register reg2, |
| 3830 Register reg3, | 3856 Register reg3, |
| 3831 Register reg4, | 3857 Register reg4, |
| 3832 Register reg5, | 3858 Register reg5, |
| 3833 Register reg6) { | 3859 Register reg6) { |
| 3834 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + | 3860 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + |
| 3835 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid(); | 3861 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid(); |
| 3836 | 3862 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3857 // bytes of instructions without failing with buffer size constraints. | 3883 // bytes of instructions without failing with buffer size constraints. |
| 3858 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 3884 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 3859 } | 3885 } |
| 3860 | 3886 |
| 3861 | 3887 |
| 3862 CodePatcher::~CodePatcher() { | 3888 CodePatcher::~CodePatcher() { |
| 3863 // Indicate that code has changed. | 3889 // Indicate that code has changed. |
| 3864 CPU::FlushICache(address_, size_); | 3890 CPU::FlushICache(address_, size_); |
| 3865 | 3891 |
| 3866 // Check that the code was patched as expected. | 3892 // Check that the code was patched as expected. |
| 3867 ASSERT(masm_.pc_ == address_ + size_); | |
| 3868 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 3893 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 3869 } | 3894 } |
| 3870 | 3895 |
| 3871 | 3896 |
| 3872 void CodePatcher::Emit(Instr instr) { | 3897 void CodePatcher::Emit(Instr instr) { |
| 3873 masm()->emit(instr); | 3898 masm()->emit(instr); |
| 3874 } | 3899 } |
| 3875 | 3900 |
| 3876 | 3901 |
| 3877 void CodePatcher::Emit(Address addr) { | 3902 void CodePatcher::Emit(Address addr) { |
| 3878 masm()->emit(reinterpret_cast<Instr>(addr)); | 3903 masm()->emit(reinterpret_cast<Instr>(addr)); |
| 3879 } | 3904 } |
| 3880 | 3905 |
| 3881 | 3906 |
| 3882 void CodePatcher::EmitCondition(Condition cond) { | 3907 void CodePatcher::EmitCondition(Condition cond) { |
| 3883 Instr instr = Assembler::instr_at(masm_.pc_); | 3908 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3884 instr = (instr & ~kCondMask) | cond; | 3909 instr = (instr & ~kCondMask) | cond; |
| 3885 masm_.emit(instr); | 3910 masm_.emit(instr); |
| 3886 } | 3911 } |
| 3887 | 3912 |
| 3888 | 3913 |
| 3889 } } // namespace v8::internal | 3914 } } // namespace v8::internal |
| 3890 | 3915 |
| 3891 #endif // V8_TARGET_ARCH_ARM | 3916 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |