OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #if V8_TARGET_ARCH_ARM | 9 #if V8_TARGET_ARCH_ARM |
10 | 10 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
67 BlockConstPoolScope block_const_pool(this); | 67 BlockConstPoolScope block_const_pool(this); |
68 Label start; | 68 Label start; |
69 bind(&start); | 69 bind(&start); |
70 blx(target, cond); | 70 blx(target, cond); |
71 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); | 71 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); |
72 } | 72 } |
73 | 73 |
74 | 74 |
75 int MacroAssembler::CallSize( | 75 int MacroAssembler::CallSize( |
76 Address target, RelocInfo::Mode rmode, Condition cond) { | 76 Address target, RelocInfo::Mode rmode, Condition cond) { |
77 int size = 2 * kInstrSize; | |
78 Instr mov_instr = cond | MOV | LeaveCC; | 77 Instr mov_instr = cond | MOV | LeaveCC; |
79 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 78 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); |
80 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { | 79 return kInstrSize + |
81 size += kInstrSize; | 80 mov_operand.instructions_required(this, mov_instr) * kInstrSize; |
82 } | |
83 return size; | |
84 } | 81 } |
85 | 82 |
86 | 83 |
87 int MacroAssembler::CallStubSize( | 84 int MacroAssembler::CallStubSize( |
88 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { | 85 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { |
89 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); | 86 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); |
90 } | 87 } |
91 | 88 |
92 | 89 |
93 int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate, | 90 int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate, |
94 Address target, | 91 Address target, |
95 RelocInfo::Mode rmode, | 92 RelocInfo::Mode rmode, |
96 Condition cond) { | 93 Condition cond) { |
97 int size = 2 * kInstrSize; | |
98 Instr mov_instr = cond | MOV | LeaveCC; | 94 Instr mov_instr = cond | MOV | LeaveCC; |
99 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 95 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); |
100 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { | 96 return kInstrSize + |
101 size += kInstrSize; | 97 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize; |
102 } | |
103 return size; | |
104 } | 98 } |
105 | 99 |
106 | 100 |
107 void MacroAssembler::Call(Address target, | 101 void MacroAssembler::Call(Address target, |
108 RelocInfo::Mode rmode, | 102 RelocInfo::Mode rmode, |
109 Condition cond, | 103 Condition cond, |
110 TargetAddressStorageMode mode) { | 104 TargetAddressStorageMode mode) { |
111 // Block constant pool for the call instruction sequence. | 105 // Block constant pool for the call instruction sequence. |
112 BlockConstPoolScope block_const_pool(this); | 106 BlockConstPoolScope block_const_pool(this); |
113 Label start; | 107 Label start; |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
266 } | 260 } |
267 } | 261 } |
268 | 262 |
269 | 263 |
270 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 264 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
271 Condition cond) { | 265 Condition cond) { |
272 if (!src2.is_reg() && | 266 if (!src2.is_reg() && |
273 !src2.must_output_reloc_info(this) && | 267 !src2.must_output_reloc_info(this) && |
274 src2.immediate() == 0) { | 268 src2.immediate() == 0) { |
275 mov(dst, Operand::Zero(), LeaveCC, cond); | 269 mov(dst, Operand::Zero(), LeaveCC, cond); |
276 } else if (!src2.is_single_instruction(this) && | 270 } else if (!(src2.instructions_required(this) == 1) && |
277 !src2.must_output_reloc_info(this) && | 271 !src2.must_output_reloc_info(this) && |
278 CpuFeatures::IsSupported(ARMv7) && | 272 CpuFeatures::IsSupported(ARMv7) && |
279 IsPowerOf2(src2.immediate() + 1)) { | 273 IsPowerOf2(src2.immediate() + 1)) { |
280 ubfx(dst, src1, 0, | 274 ubfx(dst, src1, 0, |
281 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 275 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
282 } else { | 276 } else { |
283 and_(dst, src1, src2, LeaveCC, cond); | 277 and_(dst, src1, src2, LeaveCC, cond); |
284 } | 278 } |
285 } | 279 } |
286 | 280 |
(...skipping 1557 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1844 Condition cond = al; | 1838 Condition cond = al; |
1845 int shift = 0; | 1839 int shift = 0; |
1846 while (object_size != 0) { | 1840 while (object_size != 0) { |
1847 if (((object_size >> shift) & 0x03) == 0) { | 1841 if (((object_size >> shift) & 0x03) == 0) { |
1848 shift += 2; | 1842 shift += 2; |
1849 } else { | 1843 } else { |
1850 int bits = object_size & (0xff << shift); | 1844 int bits = object_size & (0xff << shift); |
1851 object_size -= bits; | 1845 object_size -= bits; |
1852 shift += 8; | 1846 shift += 8; |
1853 Operand bits_operand(bits); | 1847 Operand bits_operand(bits); |
1854 ASSERT(bits_operand.is_single_instruction(this)); | 1848 ASSERT(bits_operand.instructions_required(this) == 1); |
1855 add(scratch2, source, bits_operand, SetCC, cond); | 1849 add(scratch2, source, bits_operand, SetCC, cond); |
1856 source = scratch2; | 1850 source = scratch2; |
1857 cond = cc; | 1851 cond = cc; |
1858 } | 1852 } |
1859 } | 1853 } |
1860 b(cs, gc_required); | 1854 b(cs, gc_required); |
1861 cmp(scratch2, Operand(ip)); | 1855 cmp(scratch2, Operand(ip)); |
1862 b(hi, gc_required); | 1856 b(hi, gc_required); |
1863 str(scratch2, MemOperand(topaddr)); | 1857 str(scratch2, MemOperand(topaddr)); |
1864 | 1858 |
(...skipping 1758 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3623 num_reg_arguments, num_double_arguments); | 3617 num_reg_arguments, num_double_arguments); |
3624 if (ActivationFrameAlignment() > kPointerSize) { | 3618 if (ActivationFrameAlignment() > kPointerSize) { |
3625 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3619 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
3626 } else { | 3620 } else { |
3627 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3621 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
3628 } | 3622 } |
3629 } | 3623 } |
3630 | 3624 |
3631 | 3625 |
3632 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 3626 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
3633 Register result) { | 3627 Register result, |
3634 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 3628 Register scratch) { |
| 3629 Label small_constant_pool_load, load_result; |
3635 ldr(result, MemOperand(ldr_location)); | 3630 ldr(result, MemOperand(ldr_location)); |
| 3631 |
| 3632 if (FLAG_enable_ool_constant_pool) { |
| 3633 // Check if this is an extended constant pool load. |
| 3634 and_(scratch, result, Operand(GetConsantPoolLoadMask())); |
| 3635 teq(scratch, Operand(GetConsantPoolLoadPattern())); |
| 3636 b(eq, &small_constant_pool_load); |
| 3637 if (emit_debug_code()) { |
| 3638 // Check that the instruction sequence is: |
| 3639 // movw reg, #offset_low |
| 3640 // movt reg, #offset_high |
| 3641 // ldr reg, [pp, reg] |
| 3642 Instr patterns[] = {GetMovWPattern(), GetMovTPattern(), |
| 3643 GetLdrPpRegOffsetPattern()}; |
| 3644 for (int i = 0; i < 3; i++) { |
| 3645 ldr(result, MemOperand(ldr_location, i * kInstrSize)); |
| 3646 and_(result, result, Operand(patterns[i])); |
| 3647 cmp(result, Operand(patterns[i])); |
| 3648 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); |
| 3649 } |
| 3650 // Result was clobbered. Restore it. |
| 3651 ldr(result, MemOperand(ldr_location)); |
| 3652 } |
| 3653 |
| 3654 // Get the offset into the constant pool. First extract movw immediate into |
| 3655 // result. |
| 3656 and_(scratch, result, Operand(0xfff)); |
| 3657 mov(ip, Operand(result, LSR, 4)); |
| 3658 and_(ip, ip, Operand(0xf000)); |
| 3659 orr(result, scratch, Operand(ip)); |
| 3660 // Then extract movt immediate and or into result. |
| 3661 ldr(scratch, MemOperand(ldr_location, kInstrSize)); |
| 3662 and_(ip, scratch, Operand(0xf0000)); |
| 3663 orr(result, result, Operand(ip, LSL, 12)); |
| 3664 and_(scratch, scratch, Operand(0xfff)); |
| 3665 orr(result, result, Operand(scratch, LSL, 16)); |
| 3666 |
| 3667 b(&load_result); |
| 3668 } |
| 3669 |
| 3670 bind(&small_constant_pool_load); |
3636 if (emit_debug_code()) { | 3671 if (emit_debug_code()) { |
3637 // Check that the instruction is a ldr reg, [<pc or pp> + offset] . | 3672 // Check that the instruction is a ldr reg, [<pc or pp> + offset] . |
3638 and_(result, result, Operand(GetConsantPoolLoadPattern())); | 3673 and_(result, result, Operand(GetConsantPoolLoadPattern())); |
3639 cmp(result, Operand(GetConsantPoolLoadPattern())); | 3674 cmp(result, Operand(GetConsantPoolLoadPattern())); |
3640 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); | 3675 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); |
3641 // Result was clobbered. Restore it. | 3676 // Result was clobbered. Restore it. |
3642 ldr(result, MemOperand(ldr_location)); | 3677 ldr(result, MemOperand(ldr_location)); |
3643 } | 3678 } |
| 3679 |
| 3680 // Get the offset into the constant pool. |
| 3681 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| 3682 and_(result, result, Operand(kLdrOffsetMask)); |
| 3683 |
| 3684 bind(&load_result); |
3644 // Get the address of the constant. | 3685 // Get the address of the constant. |
3645 and_(result, result, Operand(kLdrOffsetMask)); | |
3646 if (FLAG_enable_ool_constant_pool) { | 3686 if (FLAG_enable_ool_constant_pool) { |
3647 add(result, pp, Operand(result)); | 3687 add(result, pp, Operand(result)); |
3648 } else { | 3688 } else { |
3649 add(result, ldr_location, Operand(result)); | 3689 add(result, ldr_location, Operand(result)); |
3650 add(result, result, Operand(Instruction::kPCReadOffset)); | 3690 add(result, result, Operand(Instruction::kPCReadOffset)); |
3651 } | 3691 } |
3652 } | 3692 } |
3653 | 3693 |
3654 | 3694 |
3655 void MacroAssembler::CheckPageFlag( | 3695 void MacroAssembler::CheckPageFlag( |
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4087 sub(result, result, Operand(dividend)); | 4127 sub(result, result, Operand(dividend)); |
4088 } | 4128 } |
4089 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); | 4129 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); |
4090 add(result, result, Operand(dividend, LSR, 31)); | 4130 add(result, result, Operand(dividend, LSR, 31)); |
4091 } | 4131 } |
4092 | 4132 |
4093 | 4133 |
4094 } } // namespace v8::internal | 4134 } } // namespace v8::internal |
4095 | 4135 |
4096 #endif // V8_TARGET_ARCH_ARM | 4136 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |