OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #include "v8.h" | 7 #include "v8.h" |
8 | 8 |
9 #if V8_TARGET_ARCH_ARM | 9 #if V8_TARGET_ARCH_ARM |
10 | 10 |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
70 blx(target, cond); | 70 blx(target, cond); |
71 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); | 71 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); |
72 } | 72 } |
73 | 73 |
74 | 74 |
75 int MacroAssembler::CallSize( | 75 int MacroAssembler::CallSize( |
76 Address target, RelocInfo::Mode rmode, Condition cond) { | 76 Address target, RelocInfo::Mode rmode, Condition cond) { |
77 int size = 2 * kInstrSize; | 77 int size = 2 * kInstrSize; |
78 Instr mov_instr = cond | MOV | LeaveCC; | 78 Instr mov_instr = cond | MOV | LeaveCC; |
79 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 79 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
80 if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) { | 80 if (!Operand(immediate, rmode).is_single_instruction(isolate(), |
| 81 this, |
| 82 mov_instr)) { |
81 size += kInstrSize; | 83 size += kInstrSize; |
82 } | 84 } |
83 return size; | 85 return size; |
84 } | 86 } |
85 | 87 |
86 | 88 |
87 int MacroAssembler::CallStubSize( | 89 int MacroAssembler::CallStubSize( |
88 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { | 90 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { |
89 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); | 91 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); |
90 } | 92 } |
91 | 93 |
92 | 94 |
93 int MacroAssembler::CallSizeNotPredictableCodeSize( | 95 int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate, |
94 Address target, RelocInfo::Mode rmode, Condition cond) { | 96 Address target, |
| 97 RelocInfo::Mode rmode, |
| 98 Condition cond) { |
95 int size = 2 * kInstrSize; | 99 int size = 2 * kInstrSize; |
96 Instr mov_instr = cond | MOV | LeaveCC; | 100 Instr mov_instr = cond | MOV | LeaveCC; |
97 intptr_t immediate = reinterpret_cast<intptr_t>(target); | 101 intptr_t immediate = reinterpret_cast<intptr_t>(target); |
98 if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) { | 102 if (!Operand(immediate, rmode).is_single_instruction(isolate, |
| 103 NULL, |
| 104 mov_instr)) { |
99 size += kInstrSize; | 105 size += kInstrSize; |
100 } | 106 } |
101 return size; | 107 return size; |
102 } | 108 } |
103 | 109 |
104 | 110 |
105 void MacroAssembler::Call(Address target, | 111 void MacroAssembler::Call(Address target, |
106 RelocInfo::Mode rmode, | 112 RelocInfo::Mode rmode, |
107 Condition cond, | 113 Condition cond, |
108 TargetAddressStorageMode mode) { | 114 TargetAddressStorageMode mode) { |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { | 254 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { |
249 if (!dst.is(src)) { | 255 if (!dst.is(src)) { |
250 vmov(dst, src); | 256 vmov(dst, src); |
251 } | 257 } |
252 } | 258 } |
253 | 259 |
254 | 260 |
255 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 261 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
256 Condition cond) { | 262 Condition cond) { |
257 if (!src2.is_reg() && | 263 if (!src2.is_reg() && |
258 !src2.must_output_reloc_info(this) && | 264 !src2.must_output_reloc_info(isolate(), this) && |
259 src2.immediate() == 0) { | 265 src2.immediate() == 0) { |
260 mov(dst, Operand::Zero(), LeaveCC, cond); | 266 mov(dst, Operand::Zero(), LeaveCC, cond); |
261 } else if (!src2.is_single_instruction(this) && | 267 } else if (!src2.is_single_instruction(isolate(), this) && |
262 !src2.must_output_reloc_info(this) && | 268 !src2.must_output_reloc_info(isolate(), this) && |
263 CpuFeatures::IsSupported(ARMv7) && | 269 CpuFeatures::IsSupported(ARMv7) && |
264 IsPowerOf2(src2.immediate() + 1)) { | 270 IsPowerOf2(src2.immediate() + 1)) { |
265 ubfx(dst, src1, 0, | 271 ubfx(dst, src1, 0, |
266 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 272 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
267 } else { | 273 } else { |
268 and_(dst, src1, src2, LeaveCC, cond); | 274 and_(dst, src1, src2, LeaveCC, cond); |
269 } | 275 } |
270 } | 276 } |
271 | 277 |
272 | 278 |
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
627 | 633 |
628 void MacroAssembler::PopSafepointRegisters() { | 634 void MacroAssembler::PopSafepointRegisters() { |
629 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 635 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
630 ldm(ia_w, sp, kSafepointSavedRegisters); | 636 ldm(ia_w, sp, kSafepointSavedRegisters); |
631 add(sp, sp, Operand(num_unsaved * kPointerSize)); | 637 add(sp, sp, Operand(num_unsaved * kPointerSize)); |
632 } | 638 } |
633 | 639 |
634 | 640 |
635 void MacroAssembler::PushSafepointRegistersAndDoubles() { | 641 void MacroAssembler::PushSafepointRegistersAndDoubles() { |
636 // Number of d-regs not known at snapshot time. | 642 // Number of d-regs not known at snapshot time. |
637 ASSERT(!Serializer::enabled()); | 643 ASSERT(!Serializer::enabled(isolate())); |
638 PushSafepointRegisters(); | 644 PushSafepointRegisters(); |
639 // Only save allocatable registers. | 645 // Only save allocatable registers. |
640 ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); | 646 ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); |
641 ASSERT(DwVfpRegister::NumReservedRegisters() == 2); | 647 ASSERT(DwVfpRegister::NumReservedRegisters() == 2); |
642 if (CpuFeatures::IsSupported(VFP32DREGS)) { | 648 if (CpuFeatures::IsSupported(VFP32DREGS)) { |
643 vstm(db_w, sp, d16, d31); | 649 vstm(db_w, sp, d16, d31); |
644 } | 650 } |
645 vstm(db_w, sp, d0, d13); | 651 vstm(db_w, sp, d0, d13); |
646 } | 652 } |
647 | 653 |
648 | 654 |
649 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 655 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
650 // Number of d-regs not known at snapshot time. | 656 // Number of d-regs not known at snapshot time. |
651 ASSERT(!Serializer::enabled()); | 657 ASSERT(!Serializer::enabled(isolate())); |
652 // Only save allocatable registers. | 658 // Only save allocatable registers. |
653 ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); | 659 ASSERT(kScratchDoubleReg.is(d15) && kDoubleRegZero.is(d14)); |
654 ASSERT(DwVfpRegister::NumReservedRegisters() == 2); | 660 ASSERT(DwVfpRegister::NumReservedRegisters() == 2); |
655 vldm(ia_w, sp, d0, d13); | 661 vldm(ia_w, sp, d0, d13); |
656 if (CpuFeatures::IsSupported(VFP32DREGS)) { | 662 if (CpuFeatures::IsSupported(VFP32DREGS)) { |
657 vldm(ia_w, sp, d16, d31); | 663 vldm(ia_w, sp, d16, d31); |
658 } | 664 } |
659 PopSafepointRegisters(); | 665 PopSafepointRegisters(); |
660 } | 666 } |
661 | 667 |
(...skipping 21 matching lines...) Expand all Loading... |
683 } | 689 } |
684 | 690 |
685 | 691 |
686 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | 692 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { |
687 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | 693 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); |
688 } | 694 } |
689 | 695 |
690 | 696 |
691 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | 697 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { |
692 // Number of d-regs not known at snapshot time. | 698 // Number of d-regs not known at snapshot time. |
693 ASSERT(!Serializer::enabled()); | 699 ASSERT(!Serializer::enabled(isolate())); |
694 // General purpose registers are pushed last on the stack. | 700 // General purpose registers are pushed last on the stack. |
695 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; | 701 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; |
696 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | 702 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; |
697 return MemOperand(sp, doubles_size + register_offset); | 703 return MemOperand(sp, doubles_size + register_offset); |
698 } | 704 } |
699 | 705 |
700 | 706 |
701 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 707 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
702 const MemOperand& src, Condition cond) { | 708 const MemOperand& src, Condition cond) { |
703 ASSERT(src.rm().is(no_reg)); | 709 ASSERT(src.rm().is(no_reg)); |
(...skipping 1033 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1737 Condition cond = al; | 1743 Condition cond = al; |
1738 int shift = 0; | 1744 int shift = 0; |
1739 while (object_size != 0) { | 1745 while (object_size != 0) { |
1740 if (((object_size >> shift) & 0x03) == 0) { | 1746 if (((object_size >> shift) & 0x03) == 0) { |
1741 shift += 2; | 1747 shift += 2; |
1742 } else { | 1748 } else { |
1743 int bits = object_size & (0xff << shift); | 1749 int bits = object_size & (0xff << shift); |
1744 object_size -= bits; | 1750 object_size -= bits; |
1745 shift += 8; | 1751 shift += 8; |
1746 Operand bits_operand(bits); | 1752 Operand bits_operand(bits); |
1747 ASSERT(bits_operand.is_single_instruction(this)); | 1753 ASSERT(bits_operand.is_single_instruction(isolate(), this)); |
1748 add(scratch2, source, bits_operand, SetCC, cond); | 1754 add(scratch2, source, bits_operand, SetCC, cond); |
1749 source = scratch2; | 1755 source = scratch2; |
1750 cond = cc; | 1756 cond = cc; |
1751 } | 1757 } |
1752 } | 1758 } |
1753 b(cs, gc_required); | 1759 b(cs, gc_required); |
1754 cmp(scratch2, Operand(ip)); | 1760 cmp(scratch2, Operand(ip)); |
1755 b(hi, gc_required); | 1761 b(hi, gc_required); |
1756 str(scratch2, MemOperand(topaddr)); | 1762 str(scratch2, MemOperand(topaddr)); |
1757 | 1763 |
(...skipping 2304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4062 sub(result, result, Operand(dividend)); | 4068 sub(result, result, Operand(dividend)); |
4063 } | 4069 } |
4064 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); | 4070 if (ms.shift() > 0) mov(result, Operand(result, ASR, ms.shift())); |
4065 add(result, result, Operand(dividend, LSR, 31)); | 4071 add(result, result, Operand(dividend, LSR, 31)); |
4066 } | 4072 } |
4067 | 4073 |
4068 | 4074 |
4069 } } // namespace v8::internal | 4075 } } // namespace v8::internal |
4070 | 4076 |
4071 #endif // V8_TARGET_ARCH_ARM | 4077 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |