| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3684 // Get the value at the masked, scaled index. | 3684 // Get the value at the masked, scaled index. |
| 3685 const int kValueOffset = | 3685 const int kValueOffset = |
| 3686 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 3686 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
| 3687 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); | 3687 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); |
| 3688 } | 3688 } |
| 3689 | 3689 |
| 3690 | 3690 |
| 3691 void MacroAssembler::LoadAllocationTopHelper(Register result, | 3691 void MacroAssembler::LoadAllocationTopHelper(Register result, |
| 3692 Register scratch, | 3692 Register scratch, |
| 3693 AllocationFlags flags) { | 3693 AllocationFlags flags) { |
| 3694 ExternalReference new_space_allocation_top = | 3694 ExternalReference allocation_top = ((flags & PRETENURE) != 0) ? |
| 3695 ExternalReference::old_pointer_space_allocation_top_address(isolate()) : |
| 3695 ExternalReference::new_space_allocation_top_address(isolate()); | 3696 ExternalReference::new_space_allocation_top_address(isolate()); |
| 3696 | 3697 |
| 3697 // Just return if allocation top is already known. | 3698 // Just return if allocation top is already known. |
| 3698 if ((flags & RESULT_CONTAINS_TOP) != 0) { | 3699 if ((flags & RESULT_CONTAINS_TOP) != 0) { |
| 3699 // No use of scratch if allocation top is provided. | 3700 // No use of scratch if allocation top is provided. |
| 3700 ASSERT(!scratch.is_valid()); | 3701 ASSERT(!scratch.is_valid()); |
| 3701 #ifdef DEBUG | 3702 #ifdef DEBUG |
| 3702 // Assert that result actually contains top on entry. | 3703 // Assert that result actually contains top on entry. |
| 3703 Operand top_operand = ExternalOperand(new_space_allocation_top); | 3704 Operand top_operand = ExternalOperand(allocation_top); |
| 3704 cmpq(result, top_operand); | 3705 cmpq(result, top_operand); |
| 3705 Check(equal, "Unexpected allocation top"); | 3706 Check(equal, "Unexpected allocation top"); |
| 3706 #endif | 3707 #endif |
| 3707 return; | 3708 return; |
| 3708 } | 3709 } |
| 3709 | 3710 |
| 3710 // Move address of new object to result. Use scratch register if available, | 3711 // Move address of new object to result. Use scratch register if available, |
| 3711 // and keep address in scratch until call to UpdateAllocationTopHelper. | 3712 // and keep address in scratch until call to UpdateAllocationTopHelper. |
| 3712 if (scratch.is_valid()) { | 3713 if (scratch.is_valid()) { |
| 3713 LoadAddress(scratch, new_space_allocation_top); | 3714 LoadAddress(scratch, allocation_top); |
| 3714 movq(result, Operand(scratch, 0)); | 3715 movq(result, Operand(scratch, 0)); |
| 3715 } else { | 3716 } else { |
| 3716 Load(result, new_space_allocation_top); | 3717 Load(result, allocation_top); |
| 3717 } | 3718 } |
| 3718 } | 3719 } |
| 3719 | 3720 |
| 3720 | 3721 |
| 3721 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 3722 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
| 3722 Register scratch) { | 3723 Register scratch, |
| 3724 AllocationFlags flags) { |
| 3723 if (emit_debug_code()) { | 3725 if (emit_debug_code()) { |
| 3724 testq(result_end, Immediate(kObjectAlignmentMask)); | 3726 testq(result_end, Immediate(kObjectAlignmentMask)); |
| 3725 Check(zero, "Unaligned allocation in new space"); | 3727 Check(zero, "Unaligned allocation in new space"); |
| 3726 } | 3728 } |
| 3727 | 3729 |
| 3728 ExternalReference new_space_allocation_top = | 3730 ExternalReference allocation_top = ((flags & PRETENURE) != 0) ? |
| 3731 ExternalReference::old_pointer_space_allocation_top_address(isolate()) : |
| 3729 ExternalReference::new_space_allocation_top_address(isolate()); | 3732 ExternalReference::new_space_allocation_top_address(isolate()); |
| 3730 | 3733 |
| 3731 // Update new top. | 3734 // Update new top. |
| 3732 if (scratch.is_valid()) { | 3735 if (scratch.is_valid()) { |
| 3733 // Scratch already contains address of allocation top. | 3736 // Scratch already contains address of allocation top. |
| 3734 movq(Operand(scratch, 0), result_end); | 3737 movq(Operand(scratch, 0), result_end); |
| 3735 } else { | 3738 } else { |
| 3736 Store(new_space_allocation_top, result_end); | 3739 Store(allocation_top, result_end); |
| 3737 } | 3740 } |
| 3738 } | 3741 } |
| 3739 | 3742 |
| 3740 | 3743 |
| 3741 void MacroAssembler::AllocateInNewSpace(int object_size, | 3744 void MacroAssembler::Allocate(int object_size, |
| 3742 Register result, | 3745 Register result, |
| 3743 Register result_end, | 3746 Register result_end, |
| 3744 Register scratch, | 3747 Register scratch, |
| 3745 Label* gc_required, | 3748 Label* gc_required, |
| 3746 AllocationFlags flags) { | 3749 AllocationFlags flags) { |
| 3747 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | 3750 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); |
| 3748 if (!FLAG_inline_new) { | 3751 if (!FLAG_inline_new) { |
| 3749 if (emit_debug_code()) { | 3752 if (emit_debug_code()) { |
| 3750 // Trash the registers to simulate an allocation failure. | 3753 // Trash the registers to simulate an allocation failure. |
| 3751 movl(result, Immediate(0x7091)); | 3754 movl(result, Immediate(0x7091)); |
| 3752 if (result_end.is_valid()) { | 3755 if (result_end.is_valid()) { |
| 3753 movl(result_end, Immediate(0x7191)); | 3756 movl(result_end, Immediate(0x7191)); |
| 3754 } | 3757 } |
| 3755 if (scratch.is_valid()) { | 3758 if (scratch.is_valid()) { |
| 3756 movl(scratch, Immediate(0x7291)); | 3759 movl(scratch, Immediate(0x7291)); |
| 3757 } | 3760 } |
| 3758 } | 3761 } |
| 3759 jmp(gc_required); | 3762 jmp(gc_required); |
| 3760 return; | 3763 return; |
| 3761 } | 3764 } |
| 3762 ASSERT(!result.is(result_end)); | 3765 ASSERT(!result.is(result_end)); |
| 3763 | 3766 |
| 3764 // Load address of new object into result. | 3767 // Load address of new object into result. |
| 3765 LoadAllocationTopHelper(result, scratch, flags); | 3768 LoadAllocationTopHelper(result, scratch, flags); |
| 3766 | 3769 |
| 3767 // Align the next allocation. Storing the filler map without checking top is | 3770 // Align the next allocation. Storing the filler map without checking top is |
| 3768 // always safe because the limit of the heap is always aligned. | 3771 // always safe because the limit of the heap is always aligned. |
| 3769 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 3772 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
| 3770 testq(result, Immediate(kDoubleAlignmentMask)); | 3773 testq(result, Immediate(kDoubleAlignmentMask)); |
| 3771 Check(zero, "Allocation is not double aligned"); | 3774 Check(zero, "Allocation is not double aligned"); |
| 3772 } | 3775 } |
| 3773 | 3776 |
| 3774 // Calculate new top and bail out if new space is exhausted. | 3777 // Calculate new top and bail out if new space is exhausted. |
| 3775 ExternalReference new_space_allocation_limit = | 3778 ExternalReference allocation_limit = ((flags & PRETENURE) != 0) ? |
| 3779 ExternalReference::old_pointer_space_allocation_limit_address( |
| 3780 isolate()) : |
| 3776 ExternalReference::new_space_allocation_limit_address(isolate()); | 3781 ExternalReference::new_space_allocation_limit_address(isolate()); |
| 3777 | 3782 |
| 3778 Register top_reg = result_end.is_valid() ? result_end : result; | 3783 Register top_reg = result_end.is_valid() ? result_end : result; |
| 3779 | 3784 |
| 3780 if (!top_reg.is(result)) { | 3785 if (!top_reg.is(result)) { |
| 3781 movq(top_reg, result); | 3786 movq(top_reg, result); |
| 3782 } | 3787 } |
| 3783 addq(top_reg, Immediate(object_size)); | 3788 addq(top_reg, Immediate(object_size)); |
| 3784 j(carry, gc_required); | 3789 j(carry, gc_required); |
| 3785 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3790 Operand limit_operand = ExternalOperand(allocation_limit); |
| 3786 cmpq(top_reg, limit_operand); | 3791 cmpq(top_reg, limit_operand); |
| 3787 j(above, gc_required); | 3792 j(above, gc_required); |
| 3788 | 3793 |
| 3789 // Update allocation top. | 3794 // Update allocation top. |
| 3790 UpdateAllocationTopHelper(top_reg, scratch); | 3795 UpdateAllocationTopHelper(top_reg, scratch, flags); |
| 3791 | 3796 |
| 3792 bool tag_result = (flags & TAG_OBJECT) != 0; | 3797 bool tag_result = (flags & TAG_OBJECT) != 0; |
| 3793 if (top_reg.is(result)) { | 3798 if (top_reg.is(result)) { |
| 3794 if (tag_result) { | 3799 if (tag_result) { |
| 3795 subq(result, Immediate(object_size - kHeapObjectTag)); | 3800 subq(result, Immediate(object_size - kHeapObjectTag)); |
| 3796 } else { | 3801 } else { |
| 3797 subq(result, Immediate(object_size)); | 3802 subq(result, Immediate(object_size)); |
| 3798 } | 3803 } |
| 3799 } else if (tag_result) { | 3804 } else if (tag_result) { |
| 3800 // Tag the result if requested. | 3805 // Tag the result if requested. |
| 3801 ASSERT(kHeapObjectTag == 1); | 3806 ASSERT(kHeapObjectTag == 1); |
| 3802 incq(result); | 3807 incq(result); |
| 3803 } | 3808 } |
| 3804 } | 3809 } |
| 3805 | 3810 |
| 3806 | 3811 |
| 3807 void MacroAssembler::AllocateInNewSpace(int header_size, | 3812 void MacroAssembler::AllocateInNewSpace(int header_size, |
| 3808 ScaleFactor element_size, | 3813 ScaleFactor element_size, |
| 3809 Register element_count, | 3814 Register element_count, |
| 3810 Register result, | 3815 Register result, |
| 3811 Register result_end, | 3816 Register result_end, |
| 3812 Register scratch, | 3817 Register scratch, |
| 3813 Label* gc_required, | 3818 Label* gc_required, |
| 3814 AllocationFlags flags) { | 3819 AllocationFlags flags) { |
| 3815 ASSERT((flags & SIZE_IN_WORDS) == 0); | 3820 ASSERT((flags & SIZE_IN_WORDS) == 0); |
| 3821 ASSERT((flags & PRETENURE) == 0); |
| 3816 if (!FLAG_inline_new) { | 3822 if (!FLAG_inline_new) { |
| 3817 if (emit_debug_code()) { | 3823 if (emit_debug_code()) { |
| 3818 // Trash the registers to simulate an allocation failure. | 3824 // Trash the registers to simulate an allocation failure. |
| 3819 movl(result, Immediate(0x7091)); | 3825 movl(result, Immediate(0x7091)); |
| 3820 movl(result_end, Immediate(0x7191)); | 3826 movl(result_end, Immediate(0x7191)); |
| 3821 if (scratch.is_valid()) { | 3827 if (scratch.is_valid()) { |
| 3822 movl(scratch, Immediate(0x7291)); | 3828 movl(scratch, Immediate(0x7291)); |
| 3823 } | 3829 } |
| 3824 // Register element_count is not modified by the function. | 3830 // Register element_count is not modified by the function. |
| 3825 } | 3831 } |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3845 // We assume that element_count*element_size + header_size does not | 3851 // We assume that element_count*element_size + header_size does not |
| 3846 // overflow. | 3852 // overflow. |
| 3847 lea(result_end, Operand(element_count, element_size, header_size)); | 3853 lea(result_end, Operand(element_count, element_size, header_size)); |
| 3848 addq(result_end, result); | 3854 addq(result_end, result); |
| 3849 j(carry, gc_required); | 3855 j(carry, gc_required); |
| 3850 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3856 Operand limit_operand = ExternalOperand(new_space_allocation_limit); |
| 3851 cmpq(result_end, limit_operand); | 3857 cmpq(result_end, limit_operand); |
| 3852 j(above, gc_required); | 3858 j(above, gc_required); |
| 3853 | 3859 |
| 3854 // Update allocation top. | 3860 // Update allocation top. |
| 3855 UpdateAllocationTopHelper(result_end, scratch); | 3861 UpdateAllocationTopHelper(result_end, scratch, flags); |
| 3856 | 3862 |
| 3857 // Tag the result if requested. | 3863 // Tag the result if requested. |
| 3858 if ((flags & TAG_OBJECT) != 0) { | 3864 if ((flags & TAG_OBJECT) != 0) { |
| 3859 ASSERT(kHeapObjectTag == 1); | 3865 ASSERT(kHeapObjectTag == 1); |
| 3860 incq(result); | 3866 incq(result); |
| 3861 } | 3867 } |
| 3862 } | 3868 } |
| 3863 | 3869 |
| 3864 | 3870 |
| 3865 void MacroAssembler::AllocateInNewSpace(Register object_size, | 3871 void MacroAssembler::AllocateInNewSpace(Register object_size, |
| 3866 Register result, | 3872 Register result, |
| 3867 Register result_end, | 3873 Register result_end, |
| 3868 Register scratch, | 3874 Register scratch, |
| 3869 Label* gc_required, | 3875 Label* gc_required, |
| 3870 AllocationFlags flags) { | 3876 AllocationFlags flags) { |
| 3871 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | 3877 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); |
| 3878 ASSERT((flags & PRETENURE) == 0); |
| 3872 if (!FLAG_inline_new) { | 3879 if (!FLAG_inline_new) { |
| 3873 if (emit_debug_code()) { | 3880 if (emit_debug_code()) { |
| 3874 // Trash the registers to simulate an allocation failure. | 3881 // Trash the registers to simulate an allocation failure. |
| 3875 movl(result, Immediate(0x7091)); | 3882 movl(result, Immediate(0x7091)); |
| 3876 movl(result_end, Immediate(0x7191)); | 3883 movl(result_end, Immediate(0x7191)); |
| 3877 if (scratch.is_valid()) { | 3884 if (scratch.is_valid()) { |
| 3878 movl(scratch, Immediate(0x7291)); | 3885 movl(scratch, Immediate(0x7291)); |
| 3879 } | 3886 } |
| 3880 // object_size is left unchanged by this function. | 3887 // object_size is left unchanged by this function. |
| 3881 } | 3888 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 3893 if (!object_size.is(result_end)) { | 3900 if (!object_size.is(result_end)) { |
| 3894 movq(result_end, object_size); | 3901 movq(result_end, object_size); |
| 3895 } | 3902 } |
| 3896 addq(result_end, result); | 3903 addq(result_end, result); |
| 3897 j(carry, gc_required); | 3904 j(carry, gc_required); |
| 3898 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3905 Operand limit_operand = ExternalOperand(new_space_allocation_limit); |
| 3899 cmpq(result_end, limit_operand); | 3906 cmpq(result_end, limit_operand); |
| 3900 j(above, gc_required); | 3907 j(above, gc_required); |
| 3901 | 3908 |
| 3902 // Update allocation top. | 3909 // Update allocation top. |
| 3903 UpdateAllocationTopHelper(result_end, scratch); | 3910 UpdateAllocationTopHelper(result_end, scratch, flags); |
| 3904 | 3911 |
| 3905 // Align the next allocation. Storing the filler map without checking top is | 3912 // Align the next allocation. Storing the filler map without checking top is |
| 3906 // always safe because the limit of the heap is always aligned. | 3913 // always safe because the limit of the heap is always aligned. |
| 3907 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 3914 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
| 3908 testq(result, Immediate(kDoubleAlignmentMask)); | 3915 testq(result, Immediate(kDoubleAlignmentMask)); |
| 3909 Check(zero, "Allocation is not double aligned"); | 3916 Check(zero, "Allocation is not double aligned"); |
| 3910 } | 3917 } |
| 3911 | 3918 |
| 3912 // Tag the result if requested. | 3919 // Tag the result if requested. |
| 3913 if ((flags & TAG_OBJECT) != 0) { | 3920 if ((flags & TAG_OBJECT) != 0) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 3928 Check(below, "Undo allocation of non allocated memory"); | 3935 Check(below, "Undo allocation of non allocated memory"); |
| 3929 #endif | 3936 #endif |
| 3930 movq(top_operand, object); | 3937 movq(top_operand, object); |
| 3931 } | 3938 } |
| 3932 | 3939 |
| 3933 | 3940 |
| 3934 void MacroAssembler::AllocateHeapNumber(Register result, | 3941 void MacroAssembler::AllocateHeapNumber(Register result, |
| 3935 Register scratch, | 3942 Register scratch, |
| 3936 Label* gc_required) { | 3943 Label* gc_required) { |
| 3937 // Allocate heap number in new space. | 3944 // Allocate heap number in new space. |
| 3938 AllocateInNewSpace(HeapNumber::kSize, | 3945 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT); |
| 3939 result, | |
| 3940 scratch, | |
| 3941 no_reg, | |
| 3942 gc_required, | |
| 3943 TAG_OBJECT); | |
| 3944 | 3946 |
| 3945 // Set the map. | 3947 // Set the map. |
| 3946 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); | 3948 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); |
| 3947 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 3949 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 3948 } | 3950 } |
| 3949 | 3951 |
| 3950 | 3952 |
| 3951 void MacroAssembler::AllocateTwoByteString(Register result, | 3953 void MacroAssembler::AllocateTwoByteString(Register result, |
| 3952 Register length, | 3954 Register length, |
| 3953 Register scratch1, | 3955 Register scratch1, |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4023 movq(FieldOperand(result, String::kHashFieldOffset), | 4025 movq(FieldOperand(result, String::kHashFieldOffset), |
| 4024 Immediate(String::kEmptyHashField)); | 4026 Immediate(String::kEmptyHashField)); |
| 4025 } | 4027 } |
| 4026 | 4028 |
| 4027 | 4029 |
| 4028 void MacroAssembler::AllocateTwoByteConsString(Register result, | 4030 void MacroAssembler::AllocateTwoByteConsString(Register result, |
| 4029 Register scratch1, | 4031 Register scratch1, |
| 4030 Register scratch2, | 4032 Register scratch2, |
| 4031 Label* gc_required) { | 4033 Label* gc_required) { |
| 4032 // Allocate heap number in new space. | 4034 // Allocate heap number in new space. |
| 4033 AllocateInNewSpace(ConsString::kSize, | 4035 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 4034 result, | 4036 TAG_OBJECT); |
| 4035 scratch1, | |
| 4036 scratch2, | |
| 4037 gc_required, | |
| 4038 TAG_OBJECT); | |
| 4039 | 4037 |
| 4040 // Set the map. The other fields are left uninitialized. | 4038 // Set the map. The other fields are left uninitialized. |
| 4041 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); | 4039 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); |
| 4042 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4040 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 4043 } | 4041 } |
| 4044 | 4042 |
| 4045 | 4043 |
| 4046 void MacroAssembler::AllocateAsciiConsString(Register result, | 4044 void MacroAssembler::AllocateAsciiConsString(Register result, |
| 4047 Register scratch1, | 4045 Register scratch1, |
| 4048 Register scratch2, | 4046 Register scratch2, |
| 4049 Label* gc_required) { | 4047 Label* gc_required) { |
| 4050 // Allocate heap number in new space. | 4048 // Allocate heap number in new space. |
| 4051 AllocateInNewSpace(ConsString::kSize, | 4049 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 4052 result, | 4050 TAG_OBJECT); |
| 4053 scratch1, | |
| 4054 scratch2, | |
| 4055 gc_required, | |
| 4056 TAG_OBJECT); | |
| 4057 | 4051 |
| 4058 // Set the map. The other fields are left uninitialized. | 4052 // Set the map. The other fields are left uninitialized. |
| 4059 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); | 4053 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); |
| 4060 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4054 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 4061 } | 4055 } |
| 4062 | 4056 |
| 4063 | 4057 |
| 4064 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 4058 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
| 4065 Register scratch1, | 4059 Register scratch1, |
| 4066 Register scratch2, | 4060 Register scratch2, |
| 4067 Label* gc_required) { | 4061 Label* gc_required) { |
| 4068 // Allocate heap number in new space. | 4062 // Allocate heap number in new space. |
| 4069 AllocateInNewSpace(SlicedString::kSize, | 4063 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 4070 result, | 4064 TAG_OBJECT); |
| 4071 scratch1, | |
| 4072 scratch2, | |
| 4073 gc_required, | |
| 4074 TAG_OBJECT); | |
| 4075 | 4065 |
| 4076 // Set the map. The other fields are left uninitialized. | 4066 // Set the map. The other fields are left uninitialized. |
| 4077 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); | 4067 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); |
| 4078 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4068 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 4079 } | 4069 } |
| 4080 | 4070 |
| 4081 | 4071 |
| 4082 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 4072 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
| 4083 Register scratch1, | 4073 Register scratch1, |
| 4084 Register scratch2, | 4074 Register scratch2, |
| 4085 Label* gc_required) { | 4075 Label* gc_required) { |
| 4086 // Allocate heap number in new space. | 4076 // Allocate heap number in new space. |
| 4087 AllocateInNewSpace(SlicedString::kSize, | 4077 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 4088 result, | 4078 TAG_OBJECT); |
| 4089 scratch1, | |
| 4090 scratch2, | |
| 4091 gc_required, | |
| 4092 TAG_OBJECT); | |
| 4093 | 4079 |
| 4094 // Set the map. The other fields are left uninitialized. | 4080 // Set the map. The other fields are left uninitialized. |
| 4095 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); | 4081 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); |
| 4096 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4082 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 4097 } | 4083 } |
| 4098 | 4084 |
| 4099 | 4085 |
| 4100 // Copy memory, byte-by-byte, from source to destination. Not optimized for | 4086 // Copy memory, byte-by-byte, from source to destination. Not optimized for |
| 4101 // long or aligned copies. The contents of scratch and length are destroyed. | 4087 // long or aligned copies. The contents of scratch and length are destroyed. |
| 4102 // Destination is incremented by length, source, length and scratch are | 4088 // Destination is incremented by length, source, length and scratch are |
| (...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4619 j(greater, &no_info_available); | 4605 j(greater, &no_info_available); |
| 4620 CompareRoot(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), | 4606 CompareRoot(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), |
| 4621 Heap::kAllocationSiteInfoMapRootIndex); | 4607 Heap::kAllocationSiteInfoMapRootIndex); |
| 4622 bind(&no_info_available); | 4608 bind(&no_info_available); |
| 4623 } | 4609 } |
| 4624 | 4610 |
| 4625 | 4611 |
| 4626 } } // namespace v8::internal | 4612 } } // namespace v8::internal |
| 4627 | 4613 |
| 4628 #endif // V8_TARGET_ARCH_X64 | 4614 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |