OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
656 // 3. Arguments object. | 656 // 3. Arguments object. |
657 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); | 657 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); |
658 | 658 |
659 // Do the allocation of all three objects in one go. | 659 // Do the allocation of all three objects in one go. |
660 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT); | 660 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT); |
661 | 661 |
662 // rax = address of new object(s) (tagged) | 662 // rax = address of new object(s) (tagged) |
663 // r11 = argument count (untagged) | 663 // r11 = argument count (untagged) |
664 // Get the arguments map from the current native context into r9. | 664 // Get the arguments map from the current native context into r9. |
665 Label has_mapped_parameters, instantiate; | 665 Label has_mapped_parameters, instantiate; |
666 __ movp(r9, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 666 __ movp(r9, NativeContextOperand()); |
667 __ movp(r9, FieldOperand(r9, JSGlobalObject::kNativeContextOffset)); | |
668 __ testp(rbx, rbx); | 667 __ testp(rbx, rbx); |
669 __ j(not_zero, &has_mapped_parameters, Label::kNear); | 668 __ j(not_zero, &has_mapped_parameters, Label::kNear); |
670 | 669 |
671 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX; | 670 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX; |
672 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); | 671 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); |
673 __ jmp(&instantiate, Label::kNear); | 672 __ jmp(&instantiate, Label::kNear); |
674 | 673 |
675 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX; | 674 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX; |
676 __ bind(&has_mapped_parameters); | 675 __ bind(&has_mapped_parameters); |
677 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); | 676 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
938 __ testp(rax, rax); | 937 __ testp(rax, rax); |
939 __ j(zero, &add_arguments_object, Label::kNear); | 938 __ j(zero, &add_arguments_object, Label::kNear); |
940 __ leap(rax, Operand(rax, times_pointer_size, FixedArray::kHeaderSize)); | 939 __ leap(rax, Operand(rax, times_pointer_size, FixedArray::kHeaderSize)); |
941 __ bind(&add_arguments_object); | 940 __ bind(&add_arguments_object); |
942 __ addp(rax, Immediate(Heap::kStrictArgumentsObjectSize)); | 941 __ addp(rax, Immediate(Heap::kStrictArgumentsObjectSize)); |
943 | 942 |
944 // Do the allocation of both objects in one go. | 943 // Do the allocation of both objects in one go. |
945 __ Allocate(rax, rax, rbx, no_reg, &runtime, TAG_OBJECT); | 944 __ Allocate(rax, rax, rbx, no_reg, &runtime, TAG_OBJECT); |
946 | 945 |
947 // Get the arguments map from the current native context. | 946 // Get the arguments map from the current native context. |
948 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 947 __ movp(rdi, NativeContextOperand()); |
949 __ movp(rdi, FieldOperand(rdi, JSGlobalObject::kNativeContextOffset)); | 948 __ movp(rdi, ContextOperand(rdi, Context::STRICT_ARGUMENTS_MAP_INDEX)); |
950 const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX); | |
951 __ movp(rdi, Operand(rdi, offset)); | |
952 | 949 |
953 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi); | 950 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi); |
954 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); | 951 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); |
955 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); | 952 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); |
956 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); | 953 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); |
957 | 954 |
958 // Get the length (smi tagged) and set that as an in-object property too. | 955 // Get the length (smi tagged) and set that as an in-object property too. |
959 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 956 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
960 __ movp(FieldOperand(rax, JSObject::kHeaderSize + | 957 __ movp(FieldOperand(rax, JSObject::kHeaderSize + |
961 Heap::kArgumentsLengthIndex * kPointerSize), | 958 Heap::kArgumentsLengthIndex * kPointerSize), |
(...skipping 854 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1816 | 1813 |
1817 __ bind(&check_allocation_site); | 1814 __ bind(&check_allocation_site); |
1818 // If we came here, we need to see if we are the array function. | 1815 // If we came here, we need to see if we are the array function. |
1819 // If we didn't have a matching function, and we didn't find the megamorph | 1816 // If we didn't have a matching function, and we didn't find the megamorph |
1820 // sentinel, then we have in the slot either some other function or an | 1817 // sentinel, then we have in the slot either some other function or an |
1821 // AllocationSite. | 1818 // AllocationSite. |
1822 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); | 1819 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); |
1823 __ j(not_equal, &miss); | 1820 __ j(not_equal, &miss); |
1824 | 1821 |
1825 // Make sure the function is the Array() function | 1822 // Make sure the function is the Array() function |
1826 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); | 1823 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); |
1827 __ cmpp(rdi, r11); | 1824 __ cmpp(rdi, r11); |
1828 __ j(not_equal, &megamorphic); | 1825 __ j(not_equal, &megamorphic); |
1829 __ jmp(&done); | 1826 __ jmp(&done); |
1830 | 1827 |
1831 __ bind(&miss); | 1828 __ bind(&miss); |
1832 | 1829 |
1833 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 1830 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
1834 // megamorphic. | 1831 // megamorphic. |
1835 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); | 1832 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); |
1836 __ j(equal, &initialize); | 1833 __ j(equal, &initialize); |
1837 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 1834 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
1838 // write-barrier is needed. | 1835 // write-barrier is needed. |
1839 __ bind(&megamorphic); | 1836 __ bind(&megamorphic); |
1840 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | 1837 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), |
1841 TypeFeedbackVector::MegamorphicSentinel(isolate)); | 1838 TypeFeedbackVector::MegamorphicSentinel(isolate)); |
1842 __ jmp(&done); | 1839 __ jmp(&done); |
1843 | 1840 |
1844 // An uninitialized cache is patched with the function or sentinel to | 1841 // An uninitialized cache is patched with the function or sentinel to |
1845 // indicate the ElementsKind if function is the Array constructor. | 1842 // indicate the ElementsKind if function is the Array constructor. |
1846 __ bind(&initialize); | 1843 __ bind(&initialize); |
1847 | 1844 |
1848 // Make sure the function is the Array() function | 1845 // Make sure the function is the Array() function |
1849 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); | 1846 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); |
1850 __ cmpp(rdi, r11); | 1847 __ cmpp(rdi, r11); |
1851 __ j(not_equal, ¬_array_function); | 1848 __ j(not_equal, ¬_array_function); |
1852 | 1849 |
1853 CreateAllocationSiteStub create_stub(isolate); | 1850 CreateAllocationSiteStub create_stub(isolate); |
1854 CallStubInRecordCallTarget(masm, &create_stub); | 1851 CallStubInRecordCallTarget(masm, &create_stub); |
1855 __ jmp(&done_no_smi_convert); | 1852 __ jmp(&done_no_smi_convert); |
1856 | 1853 |
1857 __ bind(¬_array_function); | 1854 __ bind(¬_array_function); |
1858 CreateWeakCellStub weak_cell_stub(isolate); | 1855 CreateWeakCellStub weak_cell_stub(isolate); |
1859 CallStubInRecordCallTarget(masm, &weak_cell_stub); | 1856 CallStubInRecordCallTarget(masm, &weak_cell_stub); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1907 __ movp(rdx, rdi); | 1904 __ movp(rdx, rdi); |
1908 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1905 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1909 } | 1906 } |
1910 | 1907 |
1911 | 1908 |
1912 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 1909 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
1913 // rdi - function | 1910 // rdi - function |
1914 // rdx - slot id | 1911 // rdx - slot id |
1915 // rbx - vector | 1912 // rbx - vector |
1916 // rcx - allocation site (loaded from vector[slot]). | 1913 // rcx - allocation site (loaded from vector[slot]). |
1917 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); | 1914 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); |
1918 __ cmpp(rdi, r8); | 1915 __ cmpp(rdi, r8); |
1919 __ j(not_equal, miss); | 1916 __ j(not_equal, miss); |
1920 | 1917 |
1921 __ movp(rax, Immediate(arg_count())); | 1918 __ movp(rax, Immediate(arg_count())); |
1922 | 1919 |
1923 // Increment the call count for monomorphic function calls. | 1920 // Increment the call count for monomorphic function calls. |
1924 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, | 1921 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, |
1925 FixedArray::kHeaderSize + kPointerSize), | 1922 FixedArray::kHeaderSize + kPointerSize), |
1926 Smi::FromInt(CallICNexus::kCallCountIncrement)); | 1923 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
1927 | 1924 |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2029 | 2026 |
2030 // We are going monomorphic, provided we actually have a JSFunction. | 2027 // We are going monomorphic, provided we actually have a JSFunction. |
2031 __ JumpIfSmi(rdi, &miss); | 2028 __ JumpIfSmi(rdi, &miss); |
2032 | 2029 |
2033 // Goto miss case if we do not have a function. | 2030 // Goto miss case if we do not have a function. |
2034 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2031 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2035 __ j(not_equal, &miss); | 2032 __ j(not_equal, &miss); |
2036 | 2033 |
2037 // Make sure the function is not the Array() function, which requires special | 2034 // Make sure the function is not the Array() function, which requires special |
2038 // behavior on MISS. | 2035 // behavior on MISS. |
2039 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); | 2036 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); |
2040 __ cmpp(rdi, rcx); | 2037 __ cmpp(rdi, rcx); |
2041 __ j(equal, &miss); | 2038 __ j(equal, &miss); |
2042 | 2039 |
2043 // Make sure the function belongs to the same native context (which implies | 2040 // Make sure the function belongs to the same native context. |
2044 // the same global object). | |
2045 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); | 2041 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); |
2046 __ movp(rcx, ContextOperand(rcx, Context::GLOBAL_OBJECT_INDEX)); | 2042 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); |
2047 __ cmpp(rcx, GlobalObjectOperand()); | 2043 __ cmpp(rcx, NativeContextOperand()); |
2048 __ j(not_equal, &miss); | 2044 __ j(not_equal, &miss); |
2049 | 2045 |
2050 // Update stats. | 2046 // Update stats. |
2051 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); | 2047 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); |
2052 | 2048 |
2053 // Initialize the call counter. | 2049 // Initialize the call counter. |
2054 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | 2050 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
2055 FixedArray::kHeaderSize + kPointerSize), | 2051 FixedArray::kHeaderSize + kPointerSize), |
2056 Smi::FromInt(CallICNexus::kCallCountIncrement)); | 2052 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
2057 | 2053 |
(...skipping 3333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5391 kStackSpace, nullptr, return_value_operand, NULL); | 5387 kStackSpace, nullptr, return_value_operand, NULL); |
5392 } | 5388 } |
5393 | 5389 |
5394 | 5390 |
5395 #undef __ | 5391 #undef __ |
5396 | 5392 |
5397 } // namespace internal | 5393 } // namespace internal |
5398 } // namespace v8 | 5394 } // namespace v8 |
5399 | 5395 |
5400 #endif // V8_TARGET_ARCH_X64 | 5396 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |