OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3670 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3681 Smi::FromInt(PropertyDetails::TypeField::kMask)); | 3681 Smi::FromInt(PropertyDetails::TypeField::kMask)); |
3682 j(not_zero, miss); | 3682 j(not_zero, miss); |
3683 | 3683 |
3684 // Get the value at the masked, scaled index. | 3684 // Get the value at the masked, scaled index. |
3685 const int kValueOffset = | 3685 const int kValueOffset = |
3686 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 3686 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
3687 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); | 3687 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); |
3688 } | 3688 } |
3689 | 3689 |
3690 | 3690 |
3691 ExternalReference MacroAssembler::GetTopAddress(AllocationTarget target) { | |
danno
2013/03/05 12:18:18
Do these belong in the macro assembler at all? Els
Hannes Payer (out of office)
2013/03/11 17:16:31
Done. I removed these helper functions.
| |
3692 if (target == NEW_SPACE) { | |
3693 return ExternalReference::new_space_allocation_top_address(isolate()); | |
3694 } else { | |
3695 return ExternalReference::old_pointer_space_allocation_top_address( | |
3696 isolate()); | |
3697 } | |
3698 } | |
3699 | |
3700 | |
3701 ExternalReference MacroAssembler::GetLimitAddress(AllocationTarget target) { | |
3702 if (target == NEW_SPACE) { | |
3703 return ExternalReference::new_space_allocation_limit_address(isolate()); | |
3704 } else { | |
3705 return ExternalReference::old_pointer_space_allocation_limit_address( | |
3706 isolate()); | |
3707 } | |
3708 } | |
3709 | |
3710 | |
3691 void MacroAssembler::LoadAllocationTopHelper(Register result, | 3711 void MacroAssembler::LoadAllocationTopHelper(Register result, |
3692 Register scratch, | 3712 Register scratch, |
3693 AllocationFlags flags) { | 3713 AllocationFlags flags, |
3694 ExternalReference new_space_allocation_top = | 3714 AllocationTarget target) { |
3695 ExternalReference::new_space_allocation_top_address(isolate()); | 3715 ExternalReference allocation_top = GetTopAddress(target); |
3696 | 3716 |
3697 // Just return if allocation top is already known. | 3717 // Just return if allocation top is already known. |
3698 if ((flags & RESULT_CONTAINS_TOP) != 0) { | 3718 if ((flags & RESULT_CONTAINS_TOP) != 0) { |
3699 // No use of scratch if allocation top is provided. | 3719 // No use of scratch if allocation top is provided. |
3700 ASSERT(!scratch.is_valid()); | 3720 ASSERT(!scratch.is_valid()); |
3701 #ifdef DEBUG | 3721 #ifdef DEBUG |
3702 // Assert that result actually contains top on entry. | 3722 // Assert that result actually contains top on entry. |
3703 Operand top_operand = ExternalOperand(new_space_allocation_top); | 3723 Operand top_operand = ExternalOperand(allocation_top); |
3704 cmpq(result, top_operand); | 3724 cmpq(result, top_operand); |
3705 Check(equal, "Unexpected allocation top"); | 3725 Check(equal, "Unexpected allocation top"); |
3706 #endif | 3726 #endif |
3707 return; | 3727 return; |
3708 } | 3728 } |
3709 | 3729 |
3710 // Move address of new object to result. Use scratch register if available, | 3730 // Move address of new object to result. Use scratch register if available, |
3711 // and keep address in scratch until call to UpdateAllocationTopHelper. | 3731 // and keep address in scratch until call to UpdateAllocationTopHelper. |
3712 if (scratch.is_valid()) { | 3732 if (scratch.is_valid()) { |
3713 LoadAddress(scratch, new_space_allocation_top); | 3733 LoadAddress(scratch, allocation_top); |
3714 movq(result, Operand(scratch, 0)); | 3734 movq(result, Operand(scratch, 0)); |
3715 } else { | 3735 } else { |
3716 Load(result, new_space_allocation_top); | 3736 Load(result, allocation_top); |
3717 } | 3737 } |
3718 } | 3738 } |
3719 | 3739 |
3720 | 3740 |
3721 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 3741 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
3722 Register scratch) { | 3742 Register scratch, |
3743 AllocationTarget target) { | |
3723 if (emit_debug_code()) { | 3744 if (emit_debug_code()) { |
3724 testq(result_end, Immediate(kObjectAlignmentMask)); | 3745 testq(result_end, Immediate(kObjectAlignmentMask)); |
3725 Check(zero, "Unaligned allocation in new space"); | 3746 Check(zero, "Unaligned allocation in new space"); |
3726 } | 3747 } |
3727 | 3748 |
3728 ExternalReference new_space_allocation_top = | 3749 ExternalReference allocation_top = GetTopAddress(target); |
3729 ExternalReference::new_space_allocation_top_address(isolate()); | |
3730 | 3750 |
3731 // Update new top. | 3751 // Update new top. |
3732 if (scratch.is_valid()) { | 3752 if (scratch.is_valid()) { |
3733 // Scratch already contains address of allocation top. | 3753 // Scratch already contains address of allocation top. |
3734 movq(Operand(scratch, 0), result_end); | 3754 movq(Operand(scratch, 0), result_end); |
3735 } else { | 3755 } else { |
3736 Store(new_space_allocation_top, result_end); | 3756 Store(allocation_top, result_end); |
3737 } | 3757 } |
3738 } | 3758 } |
3739 | 3759 |
3740 | 3760 |
3741 void MacroAssembler::AllocateInNewSpace(int object_size, | 3761 void MacroAssembler::Allocate(int object_size, |
3742 Register result, | 3762 Register result, |
3743 Register result_end, | 3763 Register result_end, |
3744 Register scratch, | 3764 Register scratch, |
3745 Label* gc_required, | 3765 Label* gc_required, |
3746 AllocationFlags flags) { | 3766 AllocationFlags flags, |
3767 AllocationTarget target) { | |
3747 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | 3768 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); |
3748 if (!FLAG_inline_new) { | 3769 if (!FLAG_inline_new) { |
3749 if (emit_debug_code()) { | 3770 if (emit_debug_code()) { |
3750 // Trash the registers to simulate an allocation failure. | 3771 // Trash the registers to simulate an allocation failure. |
3751 movl(result, Immediate(0x7091)); | 3772 movl(result, Immediate(0x7091)); |
3752 if (result_end.is_valid()) { | 3773 if (result_end.is_valid()) { |
3753 movl(result_end, Immediate(0x7191)); | 3774 movl(result_end, Immediate(0x7191)); |
3754 } | 3775 } |
3755 if (scratch.is_valid()) { | 3776 if (scratch.is_valid()) { |
3756 movl(scratch, Immediate(0x7291)); | 3777 movl(scratch, Immediate(0x7291)); |
3757 } | 3778 } |
3758 } | 3779 } |
3759 jmp(gc_required); | 3780 jmp(gc_required); |
3760 return; | 3781 return; |
3761 } | 3782 } |
3762 ASSERT(!result.is(result_end)); | 3783 ASSERT(!result.is(result_end)); |
3763 | 3784 |
3764 // Load address of new object into result. | 3785 // Load address of new object into result. |
3765 LoadAllocationTopHelper(result, scratch, flags); | 3786 LoadAllocationTopHelper(result, scratch, flags, target); |
3766 | 3787 |
3767 // Align the next allocation. Storing the filler map without checking top is | 3788 // Align the next allocation. Storing the filler map without checking top is |
3768 // always safe because the limit of the heap is always aligned. | 3789 // always safe because the limit of the heap is always aligned. |
3769 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 3790 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
3770 testq(result, Immediate(kDoubleAlignmentMask)); | 3791 testq(result, Immediate(kDoubleAlignmentMask)); |
3771 Check(zero, "Allocation is not double aligned"); | 3792 Check(zero, "Allocation is not double aligned"); |
3772 } | 3793 } |
3773 | 3794 |
3774 // Calculate new top and bail out if new space is exhausted. | 3795 // Calculate new top and bail out if new space is exhausted. |
3775 ExternalReference new_space_allocation_limit = | 3796 ExternalReference allocation_limit = GetLimitAddress(target); |
3776 ExternalReference::new_space_allocation_limit_address(isolate()); | |
3777 | 3797 |
3778 Register top_reg = result_end.is_valid() ? result_end : result; | 3798 Register top_reg = result_end.is_valid() ? result_end : result; |
3779 | 3799 |
3780 if (!top_reg.is(result)) { | 3800 if (!top_reg.is(result)) { |
3781 movq(top_reg, result); | 3801 movq(top_reg, result); |
3782 } | 3802 } |
3783 addq(top_reg, Immediate(object_size)); | 3803 addq(top_reg, Immediate(object_size)); |
3784 j(carry, gc_required); | 3804 j(carry, gc_required); |
3785 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3805 Operand limit_operand = ExternalOperand(allocation_limit); |
3786 cmpq(top_reg, limit_operand); | 3806 cmpq(top_reg, limit_operand); |
3787 j(above, gc_required); | 3807 j(above, gc_required); |
3788 | 3808 |
3789 // Update allocation top. | 3809 // Update allocation top. |
3790 UpdateAllocationTopHelper(top_reg, scratch); | 3810 UpdateAllocationTopHelper(top_reg, scratch, target); |
3791 | 3811 |
3792 bool tag_result = (flags & TAG_OBJECT) != 0; | 3812 bool tag_result = (flags & TAG_OBJECT) != 0; |
3793 if (top_reg.is(result)) { | 3813 if (top_reg.is(result)) { |
3794 if (tag_result) { | 3814 if (tag_result) { |
3795 subq(result, Immediate(object_size - kHeapObjectTag)); | 3815 subq(result, Immediate(object_size - kHeapObjectTag)); |
3796 } else { | 3816 } else { |
3797 subq(result, Immediate(object_size)); | 3817 subq(result, Immediate(object_size)); |
3798 } | 3818 } |
3799 } else if (tag_result) { | 3819 } else if (tag_result) { |
3800 // Tag the result if requested. | 3820 // Tag the result if requested. |
(...skipping 21 matching lines...) Expand all Loading... | |
3822 movl(scratch, Immediate(0x7291)); | 3842 movl(scratch, Immediate(0x7291)); |
3823 } | 3843 } |
3824 // Register element_count is not modified by the function. | 3844 // Register element_count is not modified by the function. |
3825 } | 3845 } |
3826 jmp(gc_required); | 3846 jmp(gc_required); |
3827 return; | 3847 return; |
3828 } | 3848 } |
3829 ASSERT(!result.is(result_end)); | 3849 ASSERT(!result.is(result_end)); |
3830 | 3850 |
3831 // Load address of new object into result. | 3851 // Load address of new object into result. |
3832 LoadAllocationTopHelper(result, scratch, flags); | 3852 LoadAllocationTopHelper(result, scratch, flags, NEW_SPACE); |
3833 | 3853 |
3834 // Align the next allocation. Storing the filler map without checking top is | 3854 // Align the next allocation. Storing the filler map without checking top is |
3835 // always safe because the limit of the heap is always aligned. | 3855 // always safe because the limit of the heap is always aligned. |
3836 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 3856 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
3837 testq(result, Immediate(kDoubleAlignmentMask)); | 3857 testq(result, Immediate(kDoubleAlignmentMask)); |
3838 Check(zero, "Allocation is not double aligned"); | 3858 Check(zero, "Allocation is not double aligned"); |
3839 } | 3859 } |
3840 | 3860 |
3841 // Calculate new top and bail out if new space is exhausted. | 3861 // Calculate new top and bail out if new space is exhausted. |
3842 ExternalReference new_space_allocation_limit = | 3862 ExternalReference new_space_allocation_limit = |
3843 ExternalReference::new_space_allocation_limit_address(isolate()); | 3863 ExternalReference::new_space_allocation_limit_address(isolate()); |
3844 | 3864 |
3845 // We assume that element_count*element_size + header_size does not | 3865 // We assume that element_count*element_size + header_size does not |
3846 // overflow. | 3866 // overflow. |
3847 lea(result_end, Operand(element_count, element_size, header_size)); | 3867 lea(result_end, Operand(element_count, element_size, header_size)); |
3848 addq(result_end, result); | 3868 addq(result_end, result); |
3849 j(carry, gc_required); | 3869 j(carry, gc_required); |
3850 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3870 Operand limit_operand = ExternalOperand(new_space_allocation_limit); |
3851 cmpq(result_end, limit_operand); | 3871 cmpq(result_end, limit_operand); |
3852 j(above, gc_required); | 3872 j(above, gc_required); |
3853 | 3873 |
3854 // Update allocation top. | 3874 // Update allocation top. |
3855 UpdateAllocationTopHelper(result_end, scratch); | 3875 UpdateAllocationTopHelper(result_end, scratch, NEW_SPACE); |
3856 | 3876 |
3857 // Tag the result if requested. | 3877 // Tag the result if requested. |
3858 if ((flags & TAG_OBJECT) != 0) { | 3878 if ((flags & TAG_OBJECT) != 0) { |
3859 ASSERT(kHeapObjectTag == 1); | 3879 ASSERT(kHeapObjectTag == 1); |
3860 incq(result); | 3880 incq(result); |
3861 } | 3881 } |
3862 } | 3882 } |
3863 | 3883 |
3864 | 3884 |
3865 void MacroAssembler::AllocateInNewSpace(Register object_size, | 3885 void MacroAssembler::AllocateInNewSpace(Register object_size, |
(...skipping 12 matching lines...) Expand all Loading... | |
3878 movl(scratch, Immediate(0x7291)); | 3898 movl(scratch, Immediate(0x7291)); |
3879 } | 3899 } |
3880 // object_size is left unchanged by this function. | 3900 // object_size is left unchanged by this function. |
3881 } | 3901 } |
3882 jmp(gc_required); | 3902 jmp(gc_required); |
3883 return; | 3903 return; |
3884 } | 3904 } |
3885 ASSERT(!result.is(result_end)); | 3905 ASSERT(!result.is(result_end)); |
3886 | 3906 |
3887 // Load address of new object into result. | 3907 // Load address of new object into result. |
3888 LoadAllocationTopHelper(result, scratch, flags); | 3908 LoadAllocationTopHelper(result, scratch, flags, NEW_SPACE); |
3889 | 3909 |
3890 // Calculate new top and bail out if new space is exhausted. | 3910 // Calculate new top and bail out if new space is exhausted. |
3891 ExternalReference new_space_allocation_limit = | 3911 ExternalReference new_space_allocation_limit = |
3892 ExternalReference::new_space_allocation_limit_address(isolate()); | 3912 ExternalReference::new_space_allocation_limit_address(isolate()); |
3893 if (!object_size.is(result_end)) { | 3913 if (!object_size.is(result_end)) { |
3894 movq(result_end, object_size); | 3914 movq(result_end, object_size); |
3895 } | 3915 } |
3896 addq(result_end, result); | 3916 addq(result_end, result); |
3897 j(carry, gc_required); | 3917 j(carry, gc_required); |
3898 Operand limit_operand = ExternalOperand(new_space_allocation_limit); | 3918 Operand limit_operand = ExternalOperand(new_space_allocation_limit); |
3899 cmpq(result_end, limit_operand); | 3919 cmpq(result_end, limit_operand); |
3900 j(above, gc_required); | 3920 j(above, gc_required); |
3901 | 3921 |
3902 // Update allocation top. | 3922 // Update allocation top. |
3903 UpdateAllocationTopHelper(result_end, scratch); | 3923 UpdateAllocationTopHelper(result_end, scratch, NEW_SPACE); |
3904 | 3924 |
3905 // Align the next allocation. Storing the filler map without checking top is | 3925 // Align the next allocation. Storing the filler map without checking top is |
3906 // always safe because the limit of the heap is always aligned. | 3926 // always safe because the limit of the heap is always aligned. |
3907 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 3927 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
3908 testq(result, Immediate(kDoubleAlignmentMask)); | 3928 testq(result, Immediate(kDoubleAlignmentMask)); |
3909 Check(zero, "Allocation is not double aligned"); | 3929 Check(zero, "Allocation is not double aligned"); |
3910 } | 3930 } |
3911 | 3931 |
3912 // Tag the result if requested. | 3932 // Tag the result if requested. |
3913 if ((flags & TAG_OBJECT) != 0) { | 3933 if ((flags & TAG_OBJECT) != 0) { |
(...skipping 14 matching lines...) Expand all Loading... | |
3928 Check(below, "Undo allocation of non allocated memory"); | 3948 Check(below, "Undo allocation of non allocated memory"); |
3929 #endif | 3949 #endif |
3930 movq(top_operand, object); | 3950 movq(top_operand, object); |
3931 } | 3951 } |
3932 | 3952 |
3933 | 3953 |
3934 void MacroAssembler::AllocateHeapNumber(Register result, | 3954 void MacroAssembler::AllocateHeapNumber(Register result, |
3935 Register scratch, | 3955 Register scratch, |
3936 Label* gc_required) { | 3956 Label* gc_required) { |
3937 // Allocate heap number in new space. | 3957 // Allocate heap number in new space. |
3938 AllocateInNewSpace(HeapNumber::kSize, | 3958 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT, |
3939 result, | 3959 NEW_SPACE); |
3940 scratch, | |
3941 no_reg, | |
3942 gc_required, | |
3943 TAG_OBJECT); | |
3944 | 3960 |
3945 // Set the map. | 3961 // Set the map. |
3946 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); | 3962 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); |
3947 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 3963 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
3948 } | 3964 } |
3949 | 3965 |
3950 | 3966 |
3951 void MacroAssembler::AllocateTwoByteString(Register result, | 3967 void MacroAssembler::AllocateTwoByteString(Register result, |
3952 Register length, | 3968 Register length, |
3953 Register scratch1, | 3969 Register scratch1, |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4023 movq(FieldOperand(result, String::kHashFieldOffset), | 4039 movq(FieldOperand(result, String::kHashFieldOffset), |
4024 Immediate(String::kEmptyHashField)); | 4040 Immediate(String::kEmptyHashField)); |
4025 } | 4041 } |
4026 | 4042 |
4027 | 4043 |
4028 void MacroAssembler::AllocateTwoByteConsString(Register result, | 4044 void MacroAssembler::AllocateTwoByteConsString(Register result, |
4029 Register scratch1, | 4045 Register scratch1, |
4030 Register scratch2, | 4046 Register scratch2, |
4031 Label* gc_required) { | 4047 Label* gc_required) { |
4032 // Allocate heap number in new space. | 4048 // Allocate heap number in new space. |
4033 AllocateInNewSpace(ConsString::kSize, | 4049 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
4034 result, | 4050 TAG_OBJECT, NEW_SPACE); |
4035 scratch1, | |
4036 scratch2, | |
4037 gc_required, | |
4038 TAG_OBJECT); | |
4039 | 4051 |
4040 // Set the map. The other fields are left uninitialized. | 4052 // Set the map. The other fields are left uninitialized. |
4041 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); | 4053 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); |
4042 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4054 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
4043 } | 4055 } |
4044 | 4056 |
4045 | 4057 |
4046 void MacroAssembler::AllocateAsciiConsString(Register result, | 4058 void MacroAssembler::AllocateAsciiConsString(Register result, |
4047 Register scratch1, | 4059 Register scratch1, |
4048 Register scratch2, | 4060 Register scratch2, |
4049 Label* gc_required) { | 4061 Label* gc_required) { |
4050 // Allocate heap number in new space. | 4062 // Allocate heap number in new space. |
4051 AllocateInNewSpace(ConsString::kSize, | 4063 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
4052 result, | 4064 TAG_OBJECT, NEW_SPACE); |
4053 scratch1, | |
4054 scratch2, | |
4055 gc_required, | |
4056 TAG_OBJECT); | |
4057 | 4065 |
4058 // Set the map. The other fields are left uninitialized. | 4066 // Set the map. The other fields are left uninitialized. |
4059 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); | 4067 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); |
4060 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4068 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
4061 } | 4069 } |
4062 | 4070 |
4063 | 4071 |
4064 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 4072 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
4065 Register scratch1, | 4073 Register scratch1, |
4066 Register scratch2, | 4074 Register scratch2, |
4067 Label* gc_required) { | 4075 Label* gc_required) { |
4068 // Allocate heap number in new space. | 4076 // Allocate heap number in new space. |
4069 AllocateInNewSpace(SlicedString::kSize, | 4077 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
4070 result, | 4078 TAG_OBJECT, NEW_SPACE); |
4071 scratch1, | |
4072 scratch2, | |
4073 gc_required, | |
4074 TAG_OBJECT); | |
4075 | 4079 |
4076 // Set the map. The other fields are left uninitialized. | 4080 // Set the map. The other fields are left uninitialized. |
4077 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); | 4081 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); |
4078 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4082 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
4079 } | 4083 } |
4080 | 4084 |
4081 | 4085 |
4082 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 4086 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
4083 Register scratch1, | 4087 Register scratch1, |
4084 Register scratch2, | 4088 Register scratch2, |
4085 Label* gc_required) { | 4089 Label* gc_required) { |
4086 // Allocate heap number in new space. | 4090 // Allocate heap number in new space. |
4087 AllocateInNewSpace(SlicedString::kSize, | 4091 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
4088 result, | 4092 TAG_OBJECT, NEW_SPACE); |
4089 scratch1, | |
4090 scratch2, | |
4091 gc_required, | |
4092 TAG_OBJECT); | |
4093 | 4093 |
4094 // Set the map. The other fields are left uninitialized. | 4094 // Set the map. The other fields are left uninitialized. |
4095 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); | 4095 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); |
4096 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 4096 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
4097 } | 4097 } |
4098 | 4098 |
4099 | 4099 |
4100 // Copy memory, byte-by-byte, from source to destination. Not optimized for | 4100 // Copy memory, byte-by-byte, from source to destination. Not optimized for |
4101 // long or aligned copies. The contents of scratch and length are destroyed. | 4101 // long or aligned copies. The contents of scratch and length are destroyed. |
4102 // Destination is incremented by length, source, length and scratch are | 4102 // Destination is incremented by length, source, length and scratch are |
(...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4619 j(greater, &no_info_available); | 4619 j(greater, &no_info_available); |
4620 CompareRoot(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), | 4620 CompareRoot(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), |
4621 Heap::kAllocationSiteInfoMapRootIndex); | 4621 Heap::kAllocationSiteInfoMapRootIndex); |
4622 bind(&no_info_available); | 4622 bind(&no_info_available); |
4623 } | 4623 } |
4624 | 4624 |
4625 | 4625 |
4626 } } // namespace v8::internal | 4626 } } // namespace v8::internal |
4627 | 4627 |
4628 #endif // V8_TARGET_ARCH_X64 | 4628 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |