OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 963 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
974 | 974 |
975 void MacroAssembler::Set(const Operand& dst, intptr_t x) { | 975 void MacroAssembler::Set(const Operand& dst, intptr_t x) { |
976 if (kPointerSize == kInt64Size) { | 976 if (kPointerSize == kInt64Size) { |
977 if (is_int32(x)) { | 977 if (is_int32(x)) { |
978 movp(dst, Immediate(static_cast<int32_t>(x))); | 978 movp(dst, Immediate(static_cast<int32_t>(x))); |
979 } else { | 979 } else { |
980 Set(kScratchRegister, x); | 980 Set(kScratchRegister, x); |
981 movp(dst, kScratchRegister); | 981 movp(dst, kScratchRegister); |
982 } | 982 } |
983 } else { | 983 } else { |
984 ASSERT(kPointerSize == kInt32Size); | |
985 movp(dst, Immediate(static_cast<int32_t>(x))); | 984 movp(dst, Immediate(static_cast<int32_t>(x))); |
986 } | 985 } |
987 } | 986 } |
988 | 987 |
989 | 988 |
990 // ---------------------------------------------------------------------------- | 989 // ---------------------------------------------------------------------------- |
991 // Smi tagging, untagging and tag detection. | 990 // Smi tagging, untagging and tag detection. |
992 | 991 |
993 bool MacroAssembler::IsUnsafeInt(const int32_t x) { | 992 bool MacroAssembler::IsUnsafeInt(const int32_t x) { |
994 static const int kMaxBits = 17; | 993 static const int kMaxBits = 17; |
(...skipping 1620 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2615 if (stack_elements > 0) { | 2614 if (stack_elements > 0) { |
2616 addp(rsp, Immediate(stack_elements * kPointerSize)); | 2615 addp(rsp, Immediate(stack_elements * kPointerSize)); |
2617 } | 2616 } |
2618 } | 2617 } |
2619 | 2618 |
2620 | 2619 |
2621 void MacroAssembler::Push(Register src) { | 2620 void MacroAssembler::Push(Register src) { |
2622 if (kPointerSize == kInt64Size) { | 2621 if (kPointerSize == kInt64Size) { |
2623 pushq(src); | 2622 pushq(src); |
2624 } else { | 2623 } else { |
2625 ASSERT(kPointerSize == kInt32Size); | |
2626 // x32 uses 64-bit push for rbp in the prologue. | 2624 // x32 uses 64-bit push for rbp in the prologue. |
2627 ASSERT(src.code() != rbp.code()); | 2625 ASSERT(src.code() != rbp.code()); |
2628 leal(rsp, Operand(rsp, -4)); | 2626 leal(rsp, Operand(rsp, -4)); |
2629 movp(Operand(rsp, 0), src); | 2627 movp(Operand(rsp, 0), src); |
2630 } | 2628 } |
2631 } | 2629 } |
2632 | 2630 |
2633 | 2631 |
2634 void MacroAssembler::Push(const Operand& src) { | 2632 void MacroAssembler::Push(const Operand& src) { |
2635 if (kPointerSize == kInt64Size) { | 2633 if (kPointerSize == kInt64Size) { |
2636 pushq(src); | 2634 pushq(src); |
2637 } else { | 2635 } else { |
2638 ASSERT(kPointerSize == kInt32Size); | |
2639 movp(kScratchRegister, src); | 2636 movp(kScratchRegister, src); |
2640 leal(rsp, Operand(rsp, -4)); | 2637 leal(rsp, Operand(rsp, -4)); |
2641 movp(Operand(rsp, 0), kScratchRegister); | 2638 movp(Operand(rsp, 0), kScratchRegister); |
2642 } | 2639 } |
2643 } | 2640 } |
2644 | 2641 |
2645 | 2642 |
| 2643 void MacroAssembler::PushQuad(const Operand& src) { |
| 2644 if (kPointerSize == kInt64Size) { |
| 2645 pushq(src); |
| 2646 } else { |
| 2647 movp(kScratchRegister, src); |
| 2648 pushq(kScratchRegister); |
| 2649 } |
| 2650 } |
| 2651 |
| 2652 |
2646 void MacroAssembler::Push(Immediate value) { | 2653 void MacroAssembler::Push(Immediate value) { |
2647 if (kPointerSize == kInt64Size) { | 2654 if (kPointerSize == kInt64Size) { |
2648 pushq(value); | 2655 pushq(value); |
2649 } else { | 2656 } else { |
2650 ASSERT(kPointerSize == kInt32Size); | |
2651 leal(rsp, Operand(rsp, -4)); | 2657 leal(rsp, Operand(rsp, -4)); |
2652 movp(Operand(rsp, 0), value); | 2658 movp(Operand(rsp, 0), value); |
2653 } | 2659 } |
2654 } | 2660 } |
2655 | 2661 |
2656 | 2662 |
2657 void MacroAssembler::PushImm32(int32_t imm32) { | 2663 void MacroAssembler::PushImm32(int32_t imm32) { |
2658 if (kPointerSize == kInt64Size) { | 2664 if (kPointerSize == kInt64Size) { |
2659 pushq_imm32(imm32); | 2665 pushq_imm32(imm32); |
2660 } else { | 2666 } else { |
2661 ASSERT(kPointerSize == kInt32Size); | |
2662 leal(rsp, Operand(rsp, -4)); | 2667 leal(rsp, Operand(rsp, -4)); |
2663 movp(Operand(rsp, 0), Immediate(imm32)); | 2668 movp(Operand(rsp, 0), Immediate(imm32)); |
2664 } | 2669 } |
2665 } | 2670 } |
2666 | 2671 |
2667 | 2672 |
2668 void MacroAssembler::Pop(Register dst) { | 2673 void MacroAssembler::Pop(Register dst) { |
2669 if (kPointerSize == kInt64Size) { | 2674 if (kPointerSize == kInt64Size) { |
2670 popq(dst); | 2675 popq(dst); |
2671 } else { | 2676 } else { |
2672 ASSERT(kPointerSize == kInt32Size); | |
2673 // x32 uses 64-bit pop for rbp in the epilogue. | 2677 // x32 uses 64-bit pop for rbp in the epilogue. |
2674 ASSERT(dst.code() != rbp.code()); | 2678 ASSERT(dst.code() != rbp.code()); |
2675 movp(dst, Operand(rsp, 0)); | 2679 movp(dst, Operand(rsp, 0)); |
2676 leal(rsp, Operand(rsp, 4)); | 2680 leal(rsp, Operand(rsp, 4)); |
2677 } | 2681 } |
2678 } | 2682 } |
2679 | 2683 |
2680 | 2684 |
2681 void MacroAssembler::Pop(const Operand& dst) { | 2685 void MacroAssembler::Pop(const Operand& dst) { |
2682 if (kPointerSize == kInt64Size) { | 2686 if (kPointerSize == kInt64Size) { |
2683 popq(dst); | 2687 popq(dst); |
2684 } else { | 2688 } else { |
2685 ASSERT(kPointerSize == kInt32Size); | |
2686 Register scratch = dst.AddressUsesRegister(kScratchRegister) | 2689 Register scratch = dst.AddressUsesRegister(kScratchRegister) |
2687 ? kSmiConstantRegister : kScratchRegister; | 2690 ? kSmiConstantRegister : kScratchRegister; |
2688 movp(scratch, Operand(rsp, 0)); | 2691 movp(scratch, Operand(rsp, 0)); |
2689 movp(dst, scratch); | 2692 movp(dst, scratch); |
2690 leal(rsp, Operand(rsp, 4)); | 2693 leal(rsp, Operand(rsp, 4)); |
2691 if (scratch.is(kSmiConstantRegister)) { | 2694 if (scratch.is(kSmiConstantRegister)) { |
2692 // Restore kSmiConstantRegister. | 2695 // Restore kSmiConstantRegister. |
2693 movp(kSmiConstantRegister, | 2696 movp(kSmiConstantRegister, |
2694 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)), | 2697 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)), |
2695 Assembler::RelocInfoNone()); | 2698 Assembler::RelocInfoNone()); |
2696 } | 2699 } |
2697 } | 2700 } |
2698 } | 2701 } |
2699 | 2702 |
2700 | 2703 |
| 2704 void MacroAssembler::PopQuad(const Operand& dst) { |
| 2705 if (kPointerSize == kInt64Size) { |
| 2706 popq(dst); |
| 2707 } else { |
| 2708 popq(kScratchRegister); |
| 2709 movp(dst, kScratchRegister); |
| 2710 } |
| 2711 } |
| 2712 |
| 2713 |
2701 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst, | 2714 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst, |
2702 Register base, | 2715 Register base, |
2703 int offset) { | 2716 int offset) { |
2704 ASSERT(offset > SharedFunctionInfo::kLengthOffset && | 2717 ASSERT(offset > SharedFunctionInfo::kLengthOffset && |
2705 offset <= SharedFunctionInfo::kSize && | 2718 offset <= SharedFunctionInfo::kSize && |
2706 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); | 2719 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); |
2707 if (kPointerSize == kInt64Size) { | 2720 if (kPointerSize == kInt64Size) { |
2708 movsxlq(dst, FieldOperand(base, offset)); | 2721 movsxlq(dst, FieldOperand(base, offset)); |
2709 } else { | 2722 } else { |
2710 movp(dst, FieldOperand(base, offset)); | 2723 movp(dst, FieldOperand(base, offset)); |
(...skipping 21 matching lines...) Expand all Loading... |
2732 void MacroAssembler::Jump(ExternalReference ext) { | 2745 void MacroAssembler::Jump(ExternalReference ext) { |
2733 LoadAddress(kScratchRegister, ext); | 2746 LoadAddress(kScratchRegister, ext); |
2734 jmp(kScratchRegister); | 2747 jmp(kScratchRegister); |
2735 } | 2748 } |
2736 | 2749 |
2737 | 2750 |
2738 void MacroAssembler::Jump(const Operand& op) { | 2751 void MacroAssembler::Jump(const Operand& op) { |
2739 if (kPointerSize == kInt64Size) { | 2752 if (kPointerSize == kInt64Size) { |
2740 jmp(op); | 2753 jmp(op); |
2741 } else { | 2754 } else { |
2742 ASSERT(kPointerSize == kInt32Size); | |
2743 movp(kScratchRegister, op); | 2755 movp(kScratchRegister, op); |
2744 jmp(kScratchRegister); | 2756 jmp(kScratchRegister); |
2745 } | 2757 } |
2746 } | 2758 } |
2747 | 2759 |
2748 | 2760 |
2749 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { | 2761 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { |
2750 Move(kScratchRegister, destination, rmode); | 2762 Move(kScratchRegister, destination, rmode); |
2751 jmp(kScratchRegister); | 2763 jmp(kScratchRegister); |
2752 } | 2764 } |
(...skipping 21 matching lines...) Expand all Loading... |
2774 #ifdef DEBUG | 2786 #ifdef DEBUG |
2775 CHECK_EQ(end_position, pc_offset()); | 2787 CHECK_EQ(end_position, pc_offset()); |
2776 #endif | 2788 #endif |
2777 } | 2789 } |
2778 | 2790 |
2779 | 2791 |
2780 void MacroAssembler::Call(const Operand& op) { | 2792 void MacroAssembler::Call(const Operand& op) { |
2781 if (kPointerSize == kInt64Size) { | 2793 if (kPointerSize == kInt64Size) { |
2782 call(op); | 2794 call(op); |
2783 } else { | 2795 } else { |
2784 ASSERT(kPointerSize == kInt32Size); | |
2785 movp(kScratchRegister, op); | 2796 movp(kScratchRegister, op); |
2786 call(kScratchRegister); | 2797 call(kScratchRegister); |
2787 } | 2798 } |
2788 } | 2799 } |
2789 | 2800 |
2790 | 2801 |
2791 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { | 2802 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { |
2792 #ifdef DEBUG | 2803 #ifdef DEBUG |
2793 int end_position = pc_offset() + CallSize(destination); | 2804 int end_position = pc_offset() + CallSize(destination); |
2794 #endif | 2805 #endif |
(...skipping 2360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5155 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); | 5166 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); |
5156 movl(rax, dividend); | 5167 movl(rax, dividend); |
5157 shrl(rax, Immediate(31)); | 5168 shrl(rax, Immediate(31)); |
5158 addl(rdx, rax); | 5169 addl(rdx, rax); |
5159 } | 5170 } |
5160 | 5171 |
5161 | 5172 |
5162 } } // namespace v8::internal | 5173 } } // namespace v8::internal |
5163 | 5174 |
5164 #endif // V8_TARGET_ARCH_X64 | 5175 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |