OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
186 kPageSizeBits - Page::kRegionSizeLog2); | 186 kPageSizeBits - Page::kRegionSizeLog2); |
187 | 187 |
188 // Mark region dirty. | 188 // Mark region dirty. |
189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
190 li(at, Operand(1)); | 190 li(at, Operand(1)); |
191 sllv(at, at, address); | 191 sllv(at, at, address); |
192 or_(scratch, scratch, at); | 192 or_(scratch, scratch, at); |
193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
194 } | 194 } |
195 | 195 |
196 // Push and pop all registers that can hold pointers. | |
197 void MacroAssembler::PushSafepointRegisters() { | |
198 // Safepoints expect a block of kNumSafepointRegisters values on the | |
199 // stack, so adjust the stack for unsaved registers. | |
200 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | |
201 ASSERT(num_unsaved >= 0); | |
202 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); | |
203 MultiPush(kSafepointSavedRegisters); | |
204 } | |
205 | |
206 void MacroAssembler::PopSafepointRegisters() { | |
207 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | |
208 MultiPop(kSafepointSavedRegisters); | |
209 Addu(sp, sp, Operand(num_unsaved * kPointerSize)); | |
210 } | |
211 | |
212 void MacroAssembler::PushSafepointRegistersAndDoubles() { | |
213 PushSafepointRegisters(); | |
214 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); | |
215 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { | |
Søren Thygesen Gjesse
2011/05/24 07:21:00
Indentation.
| |
216 FPURegister reg = FPURegister::FromAllocationIndex(i); | |
217 sdc1(reg, MemOperand(sp, i * kDoubleSize)); | |
218 } | |
219 } | |
220 | |
221 void MacroAssembler::PopSafepointRegistersAndDoubles() { | |
222 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { | |
Søren Thygesen Gjesse
2011/05/24 07:21:00
Indentation.
| |
223 FPURegister reg = FPURegister::FromAllocationIndex(i); | |
224 ldc1(reg, MemOperand(sp, i * kDoubleSize)); | |
225 } | |
226 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); | |
227 PopSafepointRegisters(); | |
228 } | |
229 | |
230 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, | |
231 Register dst) { | |
232 sw(src, SafepointRegistersAndDoublesSlot(dst)); | |
233 } | |
234 | |
235 | |
236 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) { | |
237 sw(src, SafepointRegisterSlot(dst)); | |
238 } | |
239 | |
240 | |
241 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) { | |
242 lw(dst, SafepointRegisterSlot(src)); | |
243 } | |
244 | |
245 | |
246 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) { | |
247 // The registers are pushed starting with the highest encoding, | |
248 // which means that lowest encodings are closest to the stack pointer. | |
249 return kSafepointRegisterStackIndexMap[reg_code]; | |
250 } | |
251 | |
252 | |
253 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | |
254 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | |
255 } | |
256 | |
257 | |
258 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | |
259 // General purpose registers are pushed last on the stack. | |
260 int doubles_size = FPURegister::kNumAllocatableRegisters * kDoubleSize; | |
261 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | |
262 return MemOperand(sp, doubles_size + register_offset); | |
263 } | |
264 | |
265 | |
266 | |
196 | 267 |
197 void MacroAssembler::InNewSpace(Register object, | 268 void MacroAssembler::InNewSpace(Register object, |
198 Register scratch, | 269 Register scratch, |
199 Condition cc, | 270 Condition cc, |
200 Label* branch) { | 271 Label* branch) { |
201 ASSERT(cc == eq || cc == ne); | 272 ASSERT(cc == eq || cc == ne); |
202 And(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); | 273 And(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); |
203 Branch(branch, cc, scratch, | 274 Branch(branch, cc, scratch, |
204 Operand(ExternalReference::new_space_start(isolate()))); | 275 Operand(ExternalReference::new_space_start(isolate()))); |
205 } | 276 } |
(...skipping 1690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1896 mov(reg2, scratch); | 1967 mov(reg2, scratch); |
1897 } | 1968 } |
1898 } | 1969 } |
1899 | 1970 |
1900 | 1971 |
1901 void MacroAssembler::Call(Label* target) { | 1972 void MacroAssembler::Call(Label* target) { |
1902 BranchAndLink(target); | 1973 BranchAndLink(target); |
1903 } | 1974 } |
1904 | 1975 |
1905 | 1976 |
1906 void MacroAssembler::Move(Register dst, Register src) { | |
1907 if (!dst.is(src)) { | |
1908 mov(dst, src); | |
1909 } | |
1910 } | |
1911 | |
1912 | |
1913 #ifdef ENABLE_DEBUGGER_SUPPORT | 1977 #ifdef ENABLE_DEBUGGER_SUPPORT |
1914 | 1978 |
1915 void MacroAssembler::DebugBreak() { | 1979 void MacroAssembler::DebugBreak() { |
1916 ASSERT(allow_stub_calls()); | 1980 ASSERT(allow_stub_calls()); |
1917 mov(a0, zero_reg); | 1981 mov(a0, zero_reg); |
1918 li(a1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); | 1982 li(a1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); |
1919 CEntryStub ces(1); | 1983 CEntryStub ces(1); |
1920 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); | 1984 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); |
1921 } | 1985 } |
1922 | 1986 |
(...skipping 655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2578 if (smi_check_type == DO_SMI_CHECK) { | 2642 if (smi_check_type == DO_SMI_CHECK) { |
2579 JumpIfSmi(obj, fail); | 2643 JumpIfSmi(obj, fail); |
2580 } | 2644 } |
2581 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 2645 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
2582 LoadRoot(at, index); | 2646 LoadRoot(at, index); |
2583 Branch(fail, ne, scratch, Operand(at)); | 2647 Branch(fail, ne, scratch, Operand(at)); |
2584 } | 2648 } |
2585 | 2649 |
2586 | 2650 |
2587 void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) { | 2651 void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) { |
2652 CpuFeatures::Scope scope(FPU); | |
2588 if (IsMipsSoftFloatABI) { | 2653 if (IsMipsSoftFloatABI) { |
2589 mtc1(v0, dst); | 2654 Move(v0, v1, dst); |
2590 mtc1(v1, FPURegister::from_code(dst.code() + 1)); | |
2591 } else { | 2655 } else { |
2592 if (!dst.is(f0)) { | 2656 Move(f0, dst); // Reg f0 is o32 ABI FP return value. |
2593 mov_d(dst, f0); // Reg f0 is o32 ABI FP return value. | |
2594 } | |
2595 } | 2657 } |
2596 } | 2658 } |
2597 | 2659 |
2598 | 2660 |
2661 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) { | |
2662 CpuFeatures::Scope scope(FPU); | |
2663 if (!IsMipsSoftFloatABI) { | |
2664 Move(f12, dreg); | |
2665 } else { | |
2666 Move(a0, a1, dreg); | |
2667 } | |
2668 } | |
2669 | |
2670 | |
2671 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1, | |
2672 DoubleRegister dreg2) { | |
2673 CpuFeatures::Scope scope(FPU); | |
2674 if (!IsMipsSoftFloatABI) { | |
2675 if (dreg2.is(f12)) { | |
2676 ASSERT(!dreg1.is(f14)); | |
2677 Move(f14, dreg2); | |
2678 Move(f12, dreg1); | |
2679 } else { | |
2680 Move(f12, dreg1); | |
2681 Move(f14, dreg2); | |
2682 } | |
2683 } else { | |
2684 Move(a0, a1, dreg1); | |
2685 Move(a2, a3, dreg2); | |
2686 } | |
2687 } | |
2688 | |
2689 | |
2690 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg, | |
2691 Register reg) { | |
2692 CpuFeatures::Scope scope(FPU); | |
2693 if (!IsMipsSoftFloatABI) { | |
2694 Move(f12, dreg); | |
2695 Move(a2, reg); | |
2696 } else { | |
2697 Move(a2, reg); | |
2698 Move(a0, a1, dreg); | |
2699 } | |
2700 } | |
2701 | |
2599 // ----------------------------------------------------------------------------- | 2702 // ----------------------------------------------------------------------------- |
2600 // JavaScript invokes. | 2703 // JavaScript invokes. |
2601 | 2704 |
2602 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 2705 void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
2603 const ParameterCount& actual, | 2706 const ParameterCount& actual, |
2604 Handle<Code> code_constant, | 2707 Handle<Code> code_constant, |
2605 Register code_reg, | 2708 Register code_reg, |
2606 Label* done, | 2709 Label* done, |
2607 InvokeFlag flag, | 2710 InvokeFlag flag, |
2608 const CallWrapper& call_wrapper) { | 2711 const CallWrapper& call_wrapper) { |
(...skipping 874 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3483 | 3586 |
3484 li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot. | 3587 li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot. |
3485 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 3588 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
3486 | 3589 |
3487 // Save the frame pointer and the context in top. | 3590 // Save the frame pointer and the context in top. |
3488 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); | 3591 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); |
3489 sw(fp, MemOperand(t8)); | 3592 sw(fp, MemOperand(t8)); |
3490 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); | 3593 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); |
3491 sw(cp, MemOperand(t8)); | 3594 sw(cp, MemOperand(t8)); |
3492 | 3595 |
3493 // Ensure we are not saving doubles, since it's not implemented yet. | 3596 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); |
3494 ASSERT(save_doubles == 0); | 3597 if (save_doubles) { |
3598 // The stack must be allign to 0 modulo 8 for stores with sdc1. | |
3599 ASSERT(kDoubleSize == frame_alignment); | |
3600 if (frame_alignment > 0) { | |
3601 ASSERT(IsPowerOf2(frame_alignment)); | |
3602 And(sp, sp, Operand(-frame_alignment)); // Align stack. | |
3603 } | |
3604 int space = FPURegister::kNumRegisters * kDoubleSize; | |
3605 Subu(sp, sp, Operand(space)); | |
3606 // Remember: we only need to save every 2nd double FPU value. | |
3607 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) { | |
3608 FPURegister reg = FPURegister::from_code(i); | |
3609 sdc1(reg, MemOperand(sp, i * kDoubleSize)); | |
3610 } | |
3611 } | |
3495 | 3612 |
3496 // Reserve place for the return address, stack space and an optional slot | 3613 // Reserve place for the return address, stack space and an optional slot |
3497 // (used by the DirectCEntryStub to hold the return value if a struct is | 3614 // (used by the DirectCEntryStub to hold the return value if a struct is |
3498 // returned) and align the frame preparing for calling the runtime function. | 3615 // returned) and align the frame preparing for calling the runtime function. |
3499 ASSERT(stack_space >= 0); | 3616 ASSERT(stack_space >= 0); |
3500 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); | |
3501 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize)); | 3617 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize)); |
3502 if (frame_alignment > 0) { | 3618 if (frame_alignment > 0) { |
3503 ASSERT(IsPowerOf2(frame_alignment)); | 3619 ASSERT(IsPowerOf2(frame_alignment)); |
3504 And(sp, sp, Operand(-frame_alignment)); // Align stack. | 3620 And(sp, sp, Operand(-frame_alignment)); // Align stack. |
3505 } | 3621 } |
3506 | 3622 |
3507 // Set the exit frame sp value to point just before the return address | 3623 // Set the exit frame sp value to point just before the return address |
3508 // location. | 3624 // location. |
3509 addiu(at, sp, kPointerSize); | 3625 addiu(at, sp, kPointerSize); |
3510 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 3626 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
3511 } | 3627 } |
3512 | 3628 |
3513 | 3629 |
3514 void MacroAssembler::LeaveExitFrame(bool save_doubles, | 3630 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
3515 Register argument_count) { | 3631 Register argument_count) { |
3516 // Ensure we are not restoring doubles, since it's not implemented yet. | 3632 // Optionally restore all double registers. |
3517 ASSERT(save_doubles == 0); | 3633 if (save_doubles) { |
3634 // Remember: we only need to restore every 2nd double FPU value. | |
3635 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset)); | |
3636 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) { | |
3637 FPURegister reg = FPURegister::from_code(i); | |
3638 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize)); | |
3639 } | |
3640 } | |
3518 | 3641 |
3519 // Clear top frame. | 3642 // Clear top frame. |
3520 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); | 3643 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); |
3521 sw(zero_reg, MemOperand(t8)); | 3644 sw(zero_reg, MemOperand(t8)); |
3522 | 3645 |
3523 // Restore current context from top and clear it in debug mode. | 3646 // Restore current context from top and clear it in debug mode. |
3524 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); | 3647 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); |
3525 lw(cp, MemOperand(t8)); | 3648 lw(cp, MemOperand(t8)); |
3526 #ifdef DEBUG | 3649 #ifdef DEBUG |
3527 sw(a3, MemOperand(t8)); | 3650 sw(a3, MemOperand(t8)); |
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3830 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 3953 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
3831 } else { | 3954 } else { |
3832 Addu(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 3955 Addu(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
3833 } | 3956 } |
3834 } | 3957 } |
3835 | 3958 |
3836 | 3959 |
3837 #undef BRANCH_ARGS_CHECK | 3960 #undef BRANCH_ARGS_CHECK |
3838 | 3961 |
3839 | 3962 |
3963 void MacroAssembler::LoadInstanceDescriptors(Register map, | |
3964 Register descriptors) { | |
3965 lw(descriptors, | |
3966 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); | |
3967 Label not_smi; | |
3968 JumpIfNotSmi(descriptors, ¬_smi); | |
3969 li(descriptors, Operand(FACTORY->empty_descriptor_array())); | |
3970 bind(¬_smi); | |
3971 } | |
3972 | |
3973 | |
3840 CodePatcher::CodePatcher(byte* address, int instructions) | 3974 CodePatcher::CodePatcher(byte* address, int instructions) |
3841 : address_(address), | 3975 : address_(address), |
3842 instructions_(instructions), | 3976 instructions_(instructions), |
3843 size_(instructions * Assembler::kInstrSize), | 3977 size_(instructions * Assembler::kInstrSize), |
3844 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { | 3978 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { |
3845 // Create a new macro assembler pointing to the address of the code to patch. | 3979 // Create a new macro assembler pointing to the address of the code to patch. |
3846 // The size is adjusted with kGap on order for the assembler to generate size | 3980 // The size is adjusted with kGap on order for the assembler to generate size |
3847 // bytes of instructions without failing with buffer size constraints. | 3981 // bytes of instructions without failing with buffer size constraints. |
3848 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 3982 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
3849 } | 3983 } |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3887 opcode == BGTZL); | 4021 opcode == BGTZL); |
3888 opcode = (cond == eq) ? BEQ : BNE; | 4022 opcode = (cond == eq) ? BEQ : BNE; |
3889 instr = (instr & ~kOpcodeMask) | opcode; | 4023 instr = (instr & ~kOpcodeMask) | opcode; |
3890 masm_.emit(instr); | 4024 masm_.emit(instr); |
3891 } | 4025 } |
3892 | 4026 |
3893 | 4027 |
3894 } } // namespace v8::internal | 4028 } } // namespace v8::internal |
3895 | 4029 |
3896 #endif // V8_TARGET_ARCH_MIPS | 4030 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |