OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; | 158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; |
159 } | 159 } |
160 | 160 |
161 | 161 |
162 void MacroAssembler::PushAddress(ExternalReference source) { | 162 void MacroAssembler::PushAddress(ExternalReference source) { |
163 int64_t address = reinterpret_cast<int64_t>(source.address()); | 163 int64_t address = reinterpret_cast<int64_t>(source.address()); |
164 if (is_int32(address) && !Serializer::enabled()) { | 164 if (is_int32(address) && !Serializer::enabled()) { |
165 if (emit_debug_code()) { | 165 if (emit_debug_code()) { |
166 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone()); | 166 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone()); |
167 } | 167 } |
168 push(Immediate(static_cast<int32_t>(address))); | 168 Push(Immediate(static_cast<int32_t>(address))); |
169 return; | 169 return; |
170 } | 170 } |
171 LoadAddress(kScratchRegister, source); | 171 LoadAddress(kScratchRegister, source); |
172 push(kScratchRegister); | 172 Push(kScratchRegister); |
173 } | 173 } |
174 | 174 |
175 | 175 |
176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { | 176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { |
177 ASSERT(root_array_available_); | 177 ASSERT(root_array_available_); |
178 movp(destination, Operand(kRootRegister, | 178 movp(destination, Operand(kRootRegister, |
179 (index << kPointerSizeLog2) - kRootRegisterBias)); | 179 (index << kPointerSizeLog2) - kRootRegisterBias)); |
180 } | 180 } |
181 | 181 |
182 | 182 |
(...skipping 10 matching lines...) Expand all Loading... |
193 | 193 |
194 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { | 194 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { |
195 ASSERT(root_array_available_); | 195 ASSERT(root_array_available_); |
196 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), | 196 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), |
197 source); | 197 source); |
198 } | 198 } |
199 | 199 |
200 | 200 |
201 void MacroAssembler::PushRoot(Heap::RootListIndex index) { | 201 void MacroAssembler::PushRoot(Heap::RootListIndex index) { |
202 ASSERT(root_array_available_); | 202 ASSERT(root_array_available_); |
203 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); | 203 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); |
204 } | 204 } |
205 | 205 |
206 | 206 |
207 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { | 207 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { |
208 ASSERT(root_array_available_); | 208 ASSERT(root_array_available_); |
209 cmpq(with, Operand(kRootRegister, | 209 cmpq(with, Operand(kRootRegister, |
210 (index << kPointerSizeLog2) - kRootRegisterBias)); | 210 (index << kPointerSizeLog2) - kRootRegisterBias)); |
211 } | 211 } |
212 | 212 |
213 | 213 |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
511 RecordComment("Abort message: "); | 511 RecordComment("Abort message: "); |
512 RecordComment(msg); | 512 RecordComment(msg); |
513 } | 513 } |
514 | 514 |
515 if (FLAG_trap_on_abort) { | 515 if (FLAG_trap_on_abort) { |
516 int3(); | 516 int3(); |
517 return; | 517 return; |
518 } | 518 } |
519 #endif | 519 #endif |
520 | 520 |
521 push(rax); | 521 Push(rax); |
522 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)), | 522 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)), |
523 Assembler::RelocInfoNone()); | 523 Assembler::RelocInfoNone()); |
524 push(kScratchRegister); | 524 Push(kScratchRegister); |
525 | 525 |
526 if (!has_frame_) { | 526 if (!has_frame_) { |
527 // We don't actually want to generate a pile of code for this, so just | 527 // We don't actually want to generate a pile of code for this, so just |
528 // claim there is a stack frame, without generating one. | 528 // claim there is a stack frame, without generating one. |
529 FrameScope scope(this, StackFrame::NONE); | 529 FrameScope scope(this, StackFrame::NONE); |
530 CallRuntime(Runtime::kAbort, 1); | 530 CallRuntime(Runtime::kAbort, 1); |
531 } else { | 531 } else { |
532 CallRuntime(Runtime::kAbort, 1); | 532 CallRuntime(Runtime::kAbort, 1); |
533 } | 533 } |
534 // Control will not return here. | 534 // Control will not return here. |
(...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
874 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, | 874 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, |
875 Register exclusion1, | 875 Register exclusion1, |
876 Register exclusion2, | 876 Register exclusion2, |
877 Register exclusion3) { | 877 Register exclusion3) { |
878 // We don't allow a GC during a store buffer overflow so there is no need to | 878 // We don't allow a GC during a store buffer overflow so there is no need to |
879 // store the registers in any particular way, but we do have to store and | 879 // store the registers in any particular way, but we do have to store and |
880 // restore them. | 880 // restore them. |
881 for (int i = 0; i < kNumberOfSavedRegs; i++) { | 881 for (int i = 0; i < kNumberOfSavedRegs; i++) { |
882 Register reg = saved_regs[i]; | 882 Register reg = saved_regs[i]; |
883 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { | 883 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { |
884 push(reg); | 884 pushq(reg); |
885 } | 885 } |
886 } | 886 } |
887 // R12 to r15 are callee save on all platforms. | 887 // R12 to r15 are callee save on all platforms. |
888 if (fp_mode == kSaveFPRegs) { | 888 if (fp_mode == kSaveFPRegs) { |
889 subq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); | 889 subq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); |
890 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { | 890 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { |
891 XMMRegister reg = XMMRegister::from_code(i); | 891 XMMRegister reg = XMMRegister::from_code(i); |
892 movsd(Operand(rsp, i * kDoubleSize), reg); | 892 movsd(Operand(rsp, i * kDoubleSize), reg); |
893 } | 893 } |
894 } | 894 } |
895 } | 895 } |
896 | 896 |
897 | 897 |
898 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, | 898 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, |
899 Register exclusion1, | 899 Register exclusion1, |
900 Register exclusion2, | 900 Register exclusion2, |
901 Register exclusion3) { | 901 Register exclusion3) { |
902 if (fp_mode == kSaveFPRegs) { | 902 if (fp_mode == kSaveFPRegs) { |
903 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { | 903 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { |
904 XMMRegister reg = XMMRegister::from_code(i); | 904 XMMRegister reg = XMMRegister::from_code(i); |
905 movsd(reg, Operand(rsp, i * kDoubleSize)); | 905 movsd(reg, Operand(rsp, i * kDoubleSize)); |
906 } | 906 } |
907 addq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); | 907 addq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); |
908 } | 908 } |
909 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) { | 909 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) { |
910 Register reg = saved_regs[i]; | 910 Register reg = saved_regs[i]; |
911 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { | 911 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { |
912 pop(reg); | 912 popq(reg); |
913 } | 913 } |
914 } | 914 } |
915 } | 915 } |
916 | 916 |
917 | 917 |
918 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) { | 918 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) { |
919 xorps(dst, dst); | 919 xorps(dst, dst); |
920 cvtlsi2sd(dst, src); | 920 cvtlsi2sd(dst, src); |
921 } | 921 } |
922 | 922 |
(...skipping 1301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2224 | 2224 |
2225 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { | 2225 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { |
2226 ASSERT_EQ(0, kSmiShift % kBitsPerByte); | 2226 ASSERT_EQ(0, kSmiShift % kBitsPerByte); |
2227 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); | 2227 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); |
2228 } | 2228 } |
2229 | 2229 |
2230 | 2230 |
2231 void MacroAssembler::Push(Smi* source) { | 2231 void MacroAssembler::Push(Smi* source) { |
2232 intptr_t smi = reinterpret_cast<intptr_t>(source); | 2232 intptr_t smi = reinterpret_cast<intptr_t>(source); |
2233 if (is_int32(smi)) { | 2233 if (is_int32(smi)) { |
2234 push(Immediate(static_cast<int32_t>(smi))); | 2234 Push(Immediate(static_cast<int32_t>(smi))); |
2235 } else { | 2235 } else { |
2236 Register constant = GetSmiConstant(source); | 2236 Register constant = GetSmiConstant(source); |
2237 push(constant); | 2237 Push(constant); |
2238 } | 2238 } |
2239 } | 2239 } |
2240 | 2240 |
2241 | 2241 |
2242 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) { | 2242 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) { |
2243 movp(scratch, src); | 2243 movp(scratch, src); |
2244 // High bits. | 2244 // High bits. |
2245 shr(src, Immediate(64 - kSmiShift)); | 2245 shr(src, Immediate(64 - kSmiShift)); |
2246 shl(src, Immediate(kSmiShift)); | 2246 shl(src, Immediate(kSmiShift)); |
2247 push(src); | 2247 Push(src); |
2248 // Low bits. | 2248 // Low bits. |
2249 shl(scratch, Immediate(kSmiShift)); | 2249 shl(scratch, Immediate(kSmiShift)); |
2250 push(scratch); | 2250 Push(scratch); |
2251 } | 2251 } |
2252 | 2252 |
2253 | 2253 |
2254 void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) { | 2254 void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) { |
2255 pop(scratch); | 2255 Pop(scratch); |
2256 // Low bits. | 2256 // Low bits. |
2257 shr(scratch, Immediate(kSmiShift)); | 2257 shr(scratch, Immediate(kSmiShift)); |
2258 pop(dst); | 2258 Pop(dst); |
2259 shr(dst, Immediate(kSmiShift)); | 2259 shr(dst, Immediate(kSmiShift)); |
2260 // High bits. | 2260 // High bits. |
2261 shl(dst, Immediate(64 - kSmiShift)); | 2261 shl(dst, Immediate(64 - kSmiShift)); |
2262 or_(dst, scratch); | 2262 or_(dst, scratch); |
2263 } | 2263 } |
2264 | 2264 |
2265 | 2265 |
2266 void MacroAssembler::Test(const Operand& src, Smi* source) { | 2266 void MacroAssembler::Test(const Operand& src, Smi* source) { |
2267 testl(Operand(src, kIntSize), Immediate(source->value())); | 2267 testl(Operand(src, kIntSize), Immediate(source->value())); |
2268 } | 2268 } |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2528 } | 2528 } |
2529 } | 2529 } |
2530 | 2530 |
2531 | 2531 |
2532 void MacroAssembler::Push(Handle<Object> source) { | 2532 void MacroAssembler::Push(Handle<Object> source) { |
2533 AllowDeferredHandleDereference smi_check; | 2533 AllowDeferredHandleDereference smi_check; |
2534 if (source->IsSmi()) { | 2534 if (source->IsSmi()) { |
2535 Push(Smi::cast(*source)); | 2535 Push(Smi::cast(*source)); |
2536 } else { | 2536 } else { |
2537 MoveHeapObject(kScratchRegister, source); | 2537 MoveHeapObject(kScratchRegister, source); |
2538 push(kScratchRegister); | 2538 Push(kScratchRegister); |
2539 } | 2539 } |
2540 } | 2540 } |
2541 | 2541 |
2542 | 2542 |
2543 void MacroAssembler::MoveHeapObject(Register result, | 2543 void MacroAssembler::MoveHeapObject(Register result, |
2544 Handle<Object> object) { | 2544 Handle<Object> object) { |
2545 AllowDeferredHandleDereference using_raw_address; | 2545 AllowDeferredHandleDereference using_raw_address; |
2546 ASSERT(object->IsHeapObject()); | 2546 ASSERT(object->IsHeapObject()); |
2547 if (isolate()->heap()->InNewSpace(*object)) { | 2547 if (isolate()->heap()->InNewSpace(*object)) { |
2548 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 2548 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
(...skipping 16 matching lines...) Expand all Loading... |
2565 } | 2565 } |
2566 | 2566 |
2567 | 2567 |
2568 void MacroAssembler::Drop(int stack_elements) { | 2568 void MacroAssembler::Drop(int stack_elements) { |
2569 if (stack_elements > 0) { | 2569 if (stack_elements > 0) { |
2570 addq(rsp, Immediate(stack_elements * kPointerSize)); | 2570 addq(rsp, Immediate(stack_elements * kPointerSize)); |
2571 } | 2571 } |
2572 } | 2572 } |
2573 | 2573 |
2574 | 2574 |
| 2575 void MacroAssembler::Push(Register src) { |
| 2576 if (kPointerSize == kInt64Size) { |
| 2577 pushq(src); |
| 2578 } else { |
| 2579 ASSERT(kPointerSize == kInt32Size); |
| 2580 // x32 uses 64-bit push for rbp in the prologue. |
| 2581 ASSERT(src.code() != rbp.code()); |
| 2582 leal(rsp, Operand(rsp, -4)); |
| 2583 movp(Operand(rsp, 0), src); |
| 2584 } |
| 2585 } |
| 2586 |
| 2587 |
| 2588 void MacroAssembler::Push(const Operand& src) { |
| 2589 if (kPointerSize == kInt64Size) { |
| 2590 pushq(src); |
| 2591 } else { |
| 2592 ASSERT(kPointerSize == kInt32Size); |
| 2593 movp(kScratchRegister, src); |
| 2594 leal(rsp, Operand(rsp, -4)); |
| 2595 movp(Operand(rsp, 0), kScratchRegister); |
| 2596 } |
| 2597 } |
| 2598 |
| 2599 |
| 2600 void MacroAssembler::Push(Immediate value) { |
| 2601 if (kPointerSize == kInt64Size) { |
| 2602 pushq(value); |
| 2603 } else { |
| 2604 ASSERT(kPointerSize == kInt32Size); |
| 2605 leal(rsp, Operand(rsp, -4)); |
| 2606 movp(Operand(rsp, 0), value); |
| 2607 } |
| 2608 } |
| 2609 |
| 2610 |
| 2611 void MacroAssembler::PushImm32(int32_t imm32) { |
| 2612 if (kPointerSize == kInt64Size) { |
| 2613 pushq_imm32(imm32); |
| 2614 } else { |
| 2615 ASSERT(kPointerSize == kInt32Size); |
| 2616 leal(rsp, Operand(rsp, -4)); |
| 2617 movp(Operand(rsp, 0), Immediate(imm32)); |
| 2618 } |
| 2619 } |
| 2620 |
| 2621 |
| 2622 void MacroAssembler::Pop(Register dst) { |
| 2623 if (kPointerSize == kInt64Size) { |
| 2624 popq(dst); |
| 2625 } else { |
| 2626 ASSERT(kPointerSize == kInt32Size); |
| 2627 // x32 uses 64-bit pop for rbp in the epilogue. |
| 2628 ASSERT(dst.code() != rbp.code()); |
| 2629 movp(dst, Operand(rsp, 0)); |
| 2630 leal(rsp, Operand(rsp, 4)); |
| 2631 } |
| 2632 } |
| 2633 |
| 2634 |
| 2635 void MacroAssembler::Pop(const Operand& dst) { |
| 2636 if (kPointerSize == kInt64Size) { |
| 2637 popq(dst); |
| 2638 } else { |
| 2639 ASSERT(kPointerSize == kInt32Size); |
| 2640 Register scratch = dst.AddressUsesRegister(kScratchRegister) |
| 2641 ? kSmiConstantRegister : kScratchRegister; |
| 2642 movp(scratch, Operand(rsp, 0)); |
| 2643 movp(dst, scratch); |
| 2644 leal(rsp, Operand(rsp, 4)); |
| 2645 if (scratch.is(kSmiConstantRegister)) { |
| 2646 // Restore kSmiConstantRegister. |
| 2647 movp(kSmiConstantRegister, Smi::FromInt(kSmiConstantRegisterValue), |
| 2648 Assembler::RelocInfoNone()); |
| 2649 } |
| 2650 } |
| 2651 } |
| 2652 |
| 2653 |
2575 void MacroAssembler::TestBit(const Operand& src, int bits) { | 2654 void MacroAssembler::TestBit(const Operand& src, int bits) { |
2576 int byte_offset = bits / kBitsPerByte; | 2655 int byte_offset = bits / kBitsPerByte; |
2577 int bit_in_byte = bits & (kBitsPerByte - 1); | 2656 int bit_in_byte = bits & (kBitsPerByte - 1); |
2578 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); | 2657 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); |
2579 } | 2658 } |
2580 | 2659 |
2581 | 2660 |
2582 void MacroAssembler::Jump(ExternalReference ext) { | 2661 void MacroAssembler::Jump(ExternalReference ext) { |
2583 LoadAddress(kScratchRegister, ext); | 2662 LoadAddress(kScratchRegister, ext); |
2584 jmp(kScratchRegister); | 2663 jmp(kScratchRegister); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2659 ASSERT(RelocInfo::IsCodeTarget(rmode) || | 2738 ASSERT(RelocInfo::IsCodeTarget(rmode) || |
2660 rmode == RelocInfo::CODE_AGE_SEQUENCE); | 2739 rmode == RelocInfo::CODE_AGE_SEQUENCE); |
2661 call(code_object, rmode, ast_id); | 2740 call(code_object, rmode, ast_id); |
2662 #ifdef DEBUG | 2741 #ifdef DEBUG |
2663 CHECK_EQ(end_position, pc_offset()); | 2742 CHECK_EQ(end_position, pc_offset()); |
2664 #endif | 2743 #endif |
2665 } | 2744 } |
2666 | 2745 |
2667 | 2746 |
2668 void MacroAssembler::Pushad() { | 2747 void MacroAssembler::Pushad() { |
2669 push(rax); | 2748 Push(rax); |
2670 push(rcx); | 2749 Push(rcx); |
2671 push(rdx); | 2750 Push(rdx); |
2672 push(rbx); | 2751 Push(rbx); |
2673 // Not pushing rsp or rbp. | 2752 // Not pushing rsp or rbp. |
2674 push(rsi); | 2753 Push(rsi); |
2675 push(rdi); | 2754 Push(rdi); |
2676 push(r8); | 2755 Push(r8); |
2677 push(r9); | 2756 Push(r9); |
2678 // r10 is kScratchRegister. | 2757 // r10 is kScratchRegister. |
2679 push(r11); | 2758 Push(r11); |
2680 // r12 is kSmiConstantRegister. | 2759 // r12 is kSmiConstantRegister. |
2681 // r13 is kRootRegister. | 2760 // r13 is kRootRegister. |
2682 push(r14); | 2761 Push(r14); |
2683 push(r15); | 2762 Push(r15); |
2684 STATIC_ASSERT(11 == kNumSafepointSavedRegisters); | 2763 STATIC_ASSERT(11 == kNumSafepointSavedRegisters); |
2685 // Use lea for symmetry with Popad. | 2764 // Use lea for symmetry with Popad. |
2686 int sp_delta = | 2765 int sp_delta = |
2687 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; | 2766 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; |
2688 lea(rsp, Operand(rsp, -sp_delta)); | 2767 lea(rsp, Operand(rsp, -sp_delta)); |
2689 } | 2768 } |
2690 | 2769 |
2691 | 2770 |
2692 void MacroAssembler::Popad() { | 2771 void MacroAssembler::Popad() { |
2693 // Popad must not change the flags, so use lea instead of addq. | 2772 // Popad must not change the flags, so use lea instead of addq. |
2694 int sp_delta = | 2773 int sp_delta = |
2695 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; | 2774 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; |
2696 lea(rsp, Operand(rsp, sp_delta)); | 2775 lea(rsp, Operand(rsp, sp_delta)); |
2697 pop(r15); | 2776 Pop(r15); |
2698 pop(r14); | 2777 Pop(r14); |
2699 pop(r11); | 2778 Pop(r11); |
2700 pop(r9); | 2779 Pop(r9); |
2701 pop(r8); | 2780 Pop(r8); |
2702 pop(rdi); | 2781 Pop(rdi); |
2703 pop(rsi); | 2782 Pop(rsi); |
2704 pop(rbx); | 2783 Pop(rbx); |
2705 pop(rdx); | 2784 Pop(rdx); |
2706 pop(rcx); | 2785 Pop(rcx); |
2707 pop(rax); | 2786 Pop(rax); |
2708 } | 2787 } |
2709 | 2788 |
2710 | 2789 |
2711 void MacroAssembler::Dropad() { | 2790 void MacroAssembler::Dropad() { |
2712 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize)); | 2791 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize)); |
2713 } | 2792 } |
2714 | 2793 |
2715 | 2794 |
2716 // Order general registers are pushed by Pushad: | 2795 // Order general registers are pushed by Pushad: |
2717 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. | 2796 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2767 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); | 2846 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); |
2768 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); | 2847 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); |
2769 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 2848 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); |
2770 | 2849 |
2771 // We will build up the handler from the bottom by pushing on the stack. | 2850 // We will build up the handler from the bottom by pushing on the stack. |
2772 // First push the frame pointer and context. | 2851 // First push the frame pointer and context. |
2773 if (kind == StackHandler::JS_ENTRY) { | 2852 if (kind == StackHandler::JS_ENTRY) { |
2774 // The frame pointer does not point to a JS frame so we save NULL for | 2853 // The frame pointer does not point to a JS frame so we save NULL for |
2775 // rbp. We expect the code throwing an exception to check rbp before | 2854 // rbp. We expect the code throwing an exception to check rbp before |
2776 // dereferencing it to restore the context. | 2855 // dereferencing it to restore the context. |
2777 push(Immediate(0)); // NULL frame pointer. | 2856 pushq(Immediate(0)); // NULL frame pointer. |
2778 Push(Smi::FromInt(0)); // No context. | 2857 Push(Smi::FromInt(0)); // No context. |
2779 } else { | 2858 } else { |
2780 push(rbp); | 2859 pushq(rbp); |
2781 push(rsi); | 2860 Push(rsi); |
2782 } | 2861 } |
2783 | 2862 |
2784 // Push the state and the code object. | 2863 // Push the state and the code object. |
2785 unsigned state = | 2864 unsigned state = |
2786 StackHandler::IndexField::encode(handler_index) | | 2865 StackHandler::IndexField::encode(handler_index) | |
2787 StackHandler::KindField::encode(kind); | 2866 StackHandler::KindField::encode(kind); |
2788 push(Immediate(state)); | 2867 Push(Immediate(state)); |
2789 Push(CodeObject()); | 2868 Push(CodeObject()); |
2790 | 2869 |
2791 // Link the current handler as the next handler. | 2870 // Link the current handler as the next handler. |
2792 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); | 2871 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); |
2793 push(ExternalOperand(handler_address)); | 2872 Push(ExternalOperand(handler_address)); |
2794 // Set this new handler as the current one. | 2873 // Set this new handler as the current one. |
2795 movp(ExternalOperand(handler_address), rsp); | 2874 movp(ExternalOperand(handler_address), rsp); |
2796 } | 2875 } |
2797 | 2876 |
2798 | 2877 |
2799 void MacroAssembler::PopTryHandler() { | 2878 void MacroAssembler::PopTryHandler() { |
2800 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 2879 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
2801 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); | 2880 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); |
2802 pop(ExternalOperand(handler_address)); | 2881 Pop(ExternalOperand(handler_address)); |
2803 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); | 2882 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); |
2804 } | 2883 } |
2805 | 2884 |
2806 | 2885 |
2807 void MacroAssembler::JumpToHandlerEntry() { | 2886 void MacroAssembler::JumpToHandlerEntry() { |
2808 // Compute the handler entry address and jump to it. The handler table is | 2887 // Compute the handler entry address and jump to it. The handler table is |
2809 // a fixed array of (smi-tagged) code offsets. | 2888 // a fixed array of (smi-tagged) code offsets. |
2810 // rax = exception, rdi = code object, rdx = state. | 2889 // rax = exception, rdi = code object, rdx = state. |
2811 movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset)); | 2890 movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset)); |
2812 shr(rdx, Immediate(StackHandler::kKindWidth)); | 2891 shr(rdx, Immediate(StackHandler::kKindWidth)); |
(...skipping 16 matching lines...) Expand all Loading... |
2829 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 2908 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); |
2830 | 2909 |
2831 // The exception is expected in rax. | 2910 // The exception is expected in rax. |
2832 if (!value.is(rax)) { | 2911 if (!value.is(rax)) { |
2833 movp(rax, value); | 2912 movp(rax, value); |
2834 } | 2913 } |
2835 // Drop the stack pointer to the top of the top handler. | 2914 // Drop the stack pointer to the top of the top handler. |
2836 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); | 2915 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); |
2837 movp(rsp, ExternalOperand(handler_address)); | 2916 movp(rsp, ExternalOperand(handler_address)); |
2838 // Restore the next handler. | 2917 // Restore the next handler. |
2839 pop(ExternalOperand(handler_address)); | 2918 Pop(ExternalOperand(handler_address)); |
2840 | 2919 |
2841 // Remove the code object and state, compute the handler address in rdi. | 2920 // Remove the code object and state, compute the handler address in rdi. |
2842 pop(rdi); // Code object. | 2921 Pop(rdi); // Code object. |
2843 pop(rdx); // Offset and state. | 2922 Pop(rdx); // Offset and state. |
2844 | 2923 |
2845 // Restore the context and frame pointer. | 2924 // Restore the context and frame pointer. |
2846 pop(rsi); // Context. | 2925 Pop(rsi); // Context. |
2847 pop(rbp); // Frame pointer. | 2926 popq(rbp); // Frame pointer. |
2848 | 2927 |
2849 // If the handler is a JS frame, restore the context to the frame. | 2928 // If the handler is a JS frame, restore the context to the frame. |
2850 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either | 2929 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either |
2851 // rbp or rsi. | 2930 // rbp or rsi. |
2852 Label skip; | 2931 Label skip; |
2853 testq(rsi, rsi); | 2932 testq(rsi, rsi); |
2854 j(zero, &skip, Label::kNear); | 2933 j(zero, &skip, Label::kNear); |
2855 movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 2934 movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
2856 bind(&skip); | 2935 bind(&skip); |
2857 | 2936 |
(...skipping 25 matching lines...) Expand all Loading... |
2883 bind(&fetch_next); | 2962 bind(&fetch_next); |
2884 movp(rsp, Operand(rsp, StackHandlerConstants::kNextOffset)); | 2963 movp(rsp, Operand(rsp, StackHandlerConstants::kNextOffset)); |
2885 | 2964 |
2886 bind(&check_kind); | 2965 bind(&check_kind); |
2887 STATIC_ASSERT(StackHandler::JS_ENTRY == 0); | 2966 STATIC_ASSERT(StackHandler::JS_ENTRY == 0); |
2888 testl(Operand(rsp, StackHandlerConstants::kStateOffset), | 2967 testl(Operand(rsp, StackHandlerConstants::kStateOffset), |
2889 Immediate(StackHandler::KindField::kMask)); | 2968 Immediate(StackHandler::KindField::kMask)); |
2890 j(not_zero, &fetch_next); | 2969 j(not_zero, &fetch_next); |
2891 | 2970 |
2892 // Set the top handler address to next handler past the top ENTRY handler. | 2971 // Set the top handler address to next handler past the top ENTRY handler. |
2893 pop(ExternalOperand(handler_address)); | 2972 Pop(ExternalOperand(handler_address)); |
2894 | 2973 |
2895 // Remove the code object and state, compute the handler address in rdi. | 2974 // Remove the code object and state, compute the handler address in rdi. |
2896 pop(rdi); // Code object. | 2975 Pop(rdi); // Code object. |
2897 pop(rdx); // Offset and state. | 2976 Pop(rdx); // Offset and state. |
2898 | 2977 |
2899 // Clear the context pointer and frame pointer (0 was saved in the handler). | 2978 // Clear the context pointer and frame pointer (0 was saved in the handler). |
2900 pop(rsi); | 2979 Pop(rsi); |
2901 pop(rbp); | 2980 popq(rbp); |
2902 | 2981 |
2903 JumpToHandlerEntry(); | 2982 JumpToHandlerEntry(); |
2904 } | 2983 } |
2905 | 2984 |
2906 | 2985 |
2907 void MacroAssembler::Ret() { | 2986 void MacroAssembler::Ret() { |
2908 ret(0); | 2987 ret(0); |
2909 } | 2988 } |
2910 | 2989 |
2911 | 2990 |
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3212 | 3291 |
3213 void MacroAssembler::Throw(BailoutReason reason) { | 3292 void MacroAssembler::Throw(BailoutReason reason) { |
3214 #ifdef DEBUG | 3293 #ifdef DEBUG |
3215 const char* msg = GetBailoutReason(reason); | 3294 const char* msg = GetBailoutReason(reason); |
3216 if (msg != NULL) { | 3295 if (msg != NULL) { |
3217 RecordComment("Throw message: "); | 3296 RecordComment("Throw message: "); |
3218 RecordComment(msg); | 3297 RecordComment(msg); |
3219 } | 3298 } |
3220 #endif | 3299 #endif |
3221 | 3300 |
3222 push(rax); | 3301 Push(rax); |
3223 Push(Smi::FromInt(reason)); | 3302 Push(Smi::FromInt(reason)); |
3224 if (!has_frame_) { | 3303 if (!has_frame_) { |
3225 // We don't actually want to generate a pile of code for this, so just | 3304 // We don't actually want to generate a pile of code for this, so just |
3226 // claim there is a stack frame, without generating one. | 3305 // claim there is a stack frame, without generating one. |
3227 FrameScope scope(this, StackFrame::NONE); | 3306 FrameScope scope(this, StackFrame::NONE); |
3228 CallRuntime(Runtime::kThrowMessage, 1); | 3307 CallRuntime(Runtime::kThrowMessage, 1); |
3229 } else { | 3308 } else { |
3230 CallRuntime(Runtime::kThrowMessage, 1); | 3309 CallRuntime(Runtime::kThrowMessage, 1); |
3231 } | 3310 } |
3232 // Control will not return here. | 3311 // Control will not return here. |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3323 cmpq(kScratchRegister, int32_register); | 3402 cmpq(kScratchRegister, int32_register); |
3324 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); | 3403 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); |
3325 } | 3404 } |
3326 } | 3405 } |
3327 | 3406 |
3328 | 3407 |
3329 void MacroAssembler::AssertString(Register object) { | 3408 void MacroAssembler::AssertString(Register object) { |
3330 if (emit_debug_code()) { | 3409 if (emit_debug_code()) { |
3331 testb(object, Immediate(kSmiTagMask)); | 3410 testb(object, Immediate(kSmiTagMask)); |
3332 Check(not_equal, kOperandIsASmiAndNotAString); | 3411 Check(not_equal, kOperandIsASmiAndNotAString); |
3333 push(object); | 3412 Push(object); |
3334 movp(object, FieldOperand(object, HeapObject::kMapOffset)); | 3413 movp(object, FieldOperand(object, HeapObject::kMapOffset)); |
3335 CmpInstanceType(object, FIRST_NONSTRING_TYPE); | 3414 CmpInstanceType(object, FIRST_NONSTRING_TYPE); |
3336 pop(object); | 3415 Pop(object); |
3337 Check(below, kOperandIsNotAString); | 3416 Check(below, kOperandIsNotAString); |
3338 } | 3417 } |
3339 } | 3418 } |
3340 | 3419 |
3341 | 3420 |
3342 void MacroAssembler::AssertName(Register object) { | 3421 void MacroAssembler::AssertName(Register object) { |
3343 if (emit_debug_code()) { | 3422 if (emit_debug_code()) { |
3344 testb(object, Immediate(kSmiTagMask)); | 3423 testb(object, Immediate(kSmiTagMask)); |
3345 Check(not_equal, kOperandIsASmiAndNotAName); | 3424 Check(not_equal, kOperandIsASmiAndNotAName); |
3346 push(object); | 3425 Push(object); |
3347 movp(object, FieldOperand(object, HeapObject::kMapOffset)); | 3426 movp(object, FieldOperand(object, HeapObject::kMapOffset)); |
3348 CmpInstanceType(object, LAST_NAME_TYPE); | 3427 CmpInstanceType(object, LAST_NAME_TYPE); |
3349 pop(object); | 3428 Pop(object); |
3350 Check(below_equal, kOperandIsNotAName); | 3429 Check(below_equal, kOperandIsNotAName); |
3351 } | 3430 } |
3352 } | 3431 } |
3353 | 3432 |
3354 | 3433 |
3355 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { | 3434 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { |
3356 if (emit_debug_code()) { | 3435 if (emit_debug_code()) { |
3357 Label done_checking; | 3436 Label done_checking; |
3358 AssertNotSmi(object); | 3437 AssertNotSmi(object); |
3359 Cmp(object, isolate()->factory()->undefined_value()); | 3438 Cmp(object, isolate()->factory()->undefined_value()); |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3652 } else { | 3731 } else { |
3653 Jump(adaptor, RelocInfo::CODE_TARGET); | 3732 Jump(adaptor, RelocInfo::CODE_TARGET); |
3654 } | 3733 } |
3655 bind(&invoke); | 3734 bind(&invoke); |
3656 } | 3735 } |
3657 } | 3736 } |
3658 | 3737 |
3659 | 3738 |
3660 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { | 3739 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { |
3661 if (frame_mode == BUILD_STUB_FRAME) { | 3740 if (frame_mode == BUILD_STUB_FRAME) { |
3662 push(rbp); // Caller's frame pointer. | 3741 pushq(rbp); // Caller's frame pointer. |
3663 movp(rbp, rsp); | 3742 movp(rbp, rsp); |
3664 push(rsi); // Callee's context. | 3743 Push(rsi); // Callee's context. |
3665 Push(Smi::FromInt(StackFrame::STUB)); | 3744 Push(Smi::FromInt(StackFrame::STUB)); |
3666 } else { | 3745 } else { |
3667 PredictableCodeSizeScope predictible_code_size_scope(this, | 3746 PredictableCodeSizeScope predictible_code_size_scope(this, |
3668 kNoCodeAgeSequenceLength); | 3747 kNoCodeAgeSequenceLength); |
3669 if (isolate()->IsCodePreAgingActive()) { | 3748 if (isolate()->IsCodePreAgingActive()) { |
3670 // Pre-age the code. | 3749 // Pre-age the code. |
3671 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(), | 3750 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(), |
3672 RelocInfo::CODE_AGE_SEQUENCE); | 3751 RelocInfo::CODE_AGE_SEQUENCE); |
3673 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength); | 3752 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength); |
3674 } else { | 3753 } else { |
3675 push(rbp); // Caller's frame pointer. | 3754 pushq(rbp); // Caller's frame pointer. |
3676 movp(rbp, rsp); | 3755 movp(rbp, rsp); |
3677 push(rsi); // Callee's context. | 3756 Push(rsi); // Callee's context. |
3678 push(rdi); // Callee's JS function. | 3757 Push(rdi); // Callee's JS function. |
3679 } | 3758 } |
3680 } | 3759 } |
3681 } | 3760 } |
3682 | 3761 |
3683 | 3762 |
3684 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 3763 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
3685 push(rbp); | 3764 pushq(rbp); |
3686 movp(rbp, rsp); | 3765 movp(rbp, rsp); |
3687 push(rsi); // Context. | 3766 Push(rsi); // Context. |
3688 Push(Smi::FromInt(type)); | 3767 Push(Smi::FromInt(type)); |
3689 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 3768 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
3690 push(kScratchRegister); | 3769 Push(kScratchRegister); |
3691 if (emit_debug_code()) { | 3770 if (emit_debug_code()) { |
3692 Move(kScratchRegister, | 3771 Move(kScratchRegister, |
3693 isolate()->factory()->undefined_value(), | 3772 isolate()->factory()->undefined_value(), |
3694 RelocInfo::EMBEDDED_OBJECT); | 3773 RelocInfo::EMBEDDED_OBJECT); |
3695 cmpq(Operand(rsp, 0), kScratchRegister); | 3774 cmpq(Operand(rsp, 0), kScratchRegister); |
3696 Check(not_equal, kCodeObjectNotProperlyPatched); | 3775 Check(not_equal, kCodeObjectNotProperlyPatched); |
3697 } | 3776 } |
3698 } | 3777 } |
3699 | 3778 |
3700 | 3779 |
3701 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 3780 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
3702 if (emit_debug_code()) { | 3781 if (emit_debug_code()) { |
3703 Move(kScratchRegister, Smi::FromInt(type)); | 3782 Move(kScratchRegister, Smi::FromInt(type)); |
3704 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); | 3783 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); |
3705 Check(equal, kStackFrameTypesMustMatch); | 3784 Check(equal, kStackFrameTypesMustMatch); |
3706 } | 3785 } |
3707 movp(rsp, rbp); | 3786 movp(rsp, rbp); |
3708 pop(rbp); | 3787 popq(rbp); |
3709 } | 3788 } |
3710 | 3789 |
3711 | 3790 |
3712 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { | 3791 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { |
3713 // Set up the frame structure on the stack. | 3792 // Set up the frame structure on the stack. |
3714 // All constants are relative to the frame pointer of the exit frame. | 3793 // All constants are relative to the frame pointer of the exit frame. |
3715 ASSERT(ExitFrameConstants::kCallerSPDisplacement == | 3794 ASSERT(ExitFrameConstants::kCallerSPDisplacement == |
3716 kFPOnStackSize + kPCOnStackSize); | 3795 kFPOnStackSize + kPCOnStackSize); |
3717 ASSERT(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize); | 3796 ASSERT(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize); |
3718 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); | 3797 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); |
3719 push(rbp); | 3798 pushq(rbp); |
3720 movp(rbp, rsp); | 3799 movp(rbp, rsp); |
3721 | 3800 |
3722 // Reserve room for entry stack pointer and push the code object. | 3801 // Reserve room for entry stack pointer and push the code object. |
3723 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); | 3802 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); |
3724 push(Immediate(0)); // Saved entry sp, patched before call. | 3803 Push(Immediate(0)); // Saved entry sp, patched before call. |
3725 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 3804 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
3726 push(kScratchRegister); // Accessed from EditFrame::code_slot. | 3805 Push(kScratchRegister); // Accessed from EditFrame::code_slot. |
3727 | 3806 |
3728 // Save the frame pointer and the context in top. | 3807 // Save the frame pointer and the context in top. |
3729 if (save_rax) { | 3808 if (save_rax) { |
3730 movp(r14, rax); // Backup rax in callee-save register. | 3809 movp(r14, rax); // Backup rax in callee-save register. |
3731 } | 3810 } |
3732 | 3811 |
3733 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp); | 3812 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp); |
3734 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi); | 3813 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi); |
3735 } | 3814 } |
3736 | 3815 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3805 lea(rsp, Operand(r15, 1 * kPointerSize)); | 3884 lea(rsp, Operand(r15, 1 * kPointerSize)); |
3806 | 3885 |
3807 PushReturnAddressFrom(rcx); | 3886 PushReturnAddressFrom(rcx); |
3808 | 3887 |
3809 LeaveExitFrameEpilogue(true); | 3888 LeaveExitFrameEpilogue(true); |
3810 } | 3889 } |
3811 | 3890 |
3812 | 3891 |
3813 void MacroAssembler::LeaveApiExitFrame(bool restore_context) { | 3892 void MacroAssembler::LeaveApiExitFrame(bool restore_context) { |
3814 movp(rsp, rbp); | 3893 movp(rsp, rbp); |
3815 pop(rbp); | 3894 popq(rbp); |
3816 | 3895 |
3817 LeaveExitFrameEpilogue(restore_context); | 3896 LeaveExitFrameEpilogue(restore_context); |
3818 } | 3897 } |
3819 | 3898 |
3820 | 3899 |
3821 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) { | 3900 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) { |
3822 // Restore current context from top and clear it in debug mode. | 3901 // Restore current context from top and clear it in debug mode. |
3823 ExternalReference context_address(Isolate::kContextAddress, isolate()); | 3902 ExternalReference context_address(Isolate::kContextAddress, isolate()); |
3824 Operand context_operand = ExternalOperand(context_address); | 3903 Operand context_operand = ExternalOperand(context_address); |
3825 if (restore_context) { | 3904 if (restore_context) { |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3870 j(equal, &same_contexts); | 3949 j(equal, &same_contexts); |
3871 | 3950 |
3872 // Compare security tokens. | 3951 // Compare security tokens. |
3873 // Check that the security token in the calling global object is | 3952 // Check that the security token in the calling global object is |
3874 // compatible with the security token in the receiving global | 3953 // compatible with the security token in the receiving global |
3875 // object. | 3954 // object. |
3876 | 3955 |
3877 // Check the context is a native context. | 3956 // Check the context is a native context. |
3878 if (emit_debug_code()) { | 3957 if (emit_debug_code()) { |
3879 // Preserve original value of holder_reg. | 3958 // Preserve original value of holder_reg. |
3880 push(holder_reg); | 3959 Push(holder_reg); |
3881 movp(holder_reg, | 3960 movp(holder_reg, |
3882 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 3961 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); |
3883 CompareRoot(holder_reg, Heap::kNullValueRootIndex); | 3962 CompareRoot(holder_reg, Heap::kNullValueRootIndex); |
3884 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); | 3963 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); |
3885 | 3964 |
3886 // Read the first word and compare to native_context_map(), | 3965 // Read the first word and compare to native_context_map(), |
3887 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 3966 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
3888 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); | 3967 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); |
3889 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); | 3968 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); |
3890 pop(holder_reg); | 3969 Pop(holder_reg); |
3891 } | 3970 } |
3892 | 3971 |
3893 movp(kScratchRegister, | 3972 movp(kScratchRegister, |
3894 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 3973 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); |
3895 int token_offset = | 3974 int token_offset = |
3896 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; | 3975 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; |
3897 movp(scratch, FieldOperand(scratch, token_offset)); | 3976 movp(scratch, FieldOperand(scratch, token_offset)); |
3898 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); | 3977 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); |
3899 j(not_equal, miss); | 3978 j(not_equal, miss); |
3900 | 3979 |
(...skipping 695 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4596 | 4675 |
4597 void MacroAssembler::EmitSeqStringSetCharCheck(Register string, | 4676 void MacroAssembler::EmitSeqStringSetCharCheck(Register string, |
4598 Register index, | 4677 Register index, |
4599 Register value, | 4678 Register value, |
4600 uint32_t encoding_mask) { | 4679 uint32_t encoding_mask) { |
4601 Label is_object; | 4680 Label is_object; |
4602 JumpIfNotSmi(string, &is_object); | 4681 JumpIfNotSmi(string, &is_object); |
4603 Abort(kNonObject); | 4682 Abort(kNonObject); |
4604 bind(&is_object); | 4683 bind(&is_object); |
4605 | 4684 |
4606 push(value); | 4685 Push(value); |
4607 movp(value, FieldOperand(string, HeapObject::kMapOffset)); | 4686 movp(value, FieldOperand(string, HeapObject::kMapOffset)); |
4608 movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset)); | 4687 movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset)); |
4609 | 4688 |
4610 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); | 4689 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); |
4611 cmpq(value, Immediate(encoding_mask)); | 4690 cmpq(value, Immediate(encoding_mask)); |
4612 pop(value); | 4691 Pop(value); |
4613 Check(equal, kUnexpectedStringType); | 4692 Check(equal, kUnexpectedStringType); |
4614 | 4693 |
4615 // The index is assumed to be untagged coming in, tag it to compare with the | 4694 // The index is assumed to be untagged coming in, tag it to compare with the |
4616 // string length without using a temp register, it is restored at the end of | 4695 // string length without using a temp register, it is restored at the end of |
4617 // this function. | 4696 // this function. |
4618 Integer32ToSmi(index, index); | 4697 Integer32ToSmi(index, index); |
4619 SmiCompare(index, FieldOperand(string, String::kLengthOffset)); | 4698 SmiCompare(index, FieldOperand(string, String::kLengthOffset)); |
4620 Check(less, kIndexIsTooLarge); | 4699 Check(less, kIndexIsTooLarge); |
4621 | 4700 |
4622 SmiCompare(index, Smi::FromInt(0)); | 4701 SmiCompare(index, Smi::FromInt(0)); |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4822 Label done; | 4901 Label done; |
4823 | 4902 |
4824 // Since both black and grey have a 1 in the first position and white does | 4903 // Since both black and grey have a 1 in the first position and white does |
4825 // not have a 1 there we only need to check one bit. | 4904 // not have a 1 there we only need to check one bit. |
4826 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); | 4905 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); |
4827 j(not_zero, &done, Label::kNear); | 4906 j(not_zero, &done, Label::kNear); |
4828 | 4907 |
4829 if (emit_debug_code()) { | 4908 if (emit_debug_code()) { |
4830 // Check for impossible bit pattern. | 4909 // Check for impossible bit pattern. |
4831 Label ok; | 4910 Label ok; |
4832 push(mask_scratch); | 4911 Push(mask_scratch); |
4833 // shl. May overflow making the check conservative. | 4912 // shl. May overflow making the check conservative. |
4834 addq(mask_scratch, mask_scratch); | 4913 addq(mask_scratch, mask_scratch); |
4835 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); | 4914 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); |
4836 j(zero, &ok, Label::kNear); | 4915 j(zero, &ok, Label::kNear); |
4837 int3(); | 4916 int3(); |
4838 bind(&ok); | 4917 bind(&ok); |
4839 pop(mask_scratch); | 4918 Pop(mask_scratch); |
4840 } | 4919 } |
4841 | 4920 |
4842 // Value is white. We check whether it is data that doesn't need scanning. | 4921 // Value is white. We check whether it is data that doesn't need scanning. |
4843 // Currently only checks for HeapNumber and non-cons strings. | 4922 // Currently only checks for HeapNumber and non-cons strings. |
4844 Register map = rcx; // Holds map while checking type. | 4923 Register map = rcx; // Holds map while checking type. |
4845 Register length = rcx; // Holds length of object after checking type. | 4924 Register length = rcx; // Holds length of object after checking type. |
4846 Label not_heap_number; | 4925 Label not_heap_number; |
4847 Label is_data_object; | 4926 Label is_data_object; |
4848 | 4927 |
4849 // Check for heap-number | 4928 // Check for heap-number |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5000 imull(dividend); | 5079 imull(dividend); |
5001 if (divisor > 0 && ms.multiplier() < 0) addl(rdx, dividend); | 5080 if (divisor > 0 && ms.multiplier() < 0) addl(rdx, dividend); |
5002 if (divisor < 0 && ms.multiplier() > 0) subl(rdx, dividend); | 5081 if (divisor < 0 && ms.multiplier() > 0) subl(rdx, dividend); |
5003 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); | 5082 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); |
5004 } | 5083 } |
5005 | 5084 |
5006 | 5085 |
5007 } } // namespace v8::internal | 5086 } } // namespace v8::internal |
5008 | 5087 |
5009 #endif // V8_TARGET_ARCH_X64 | 5088 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |