| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 656 // 3. Arguments object. | 656 // 3. Arguments object. |
| 657 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); | 657 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); |
| 658 | 658 |
| 659 // Do the allocation of all three objects in one go. | 659 // Do the allocation of all three objects in one go. |
| 660 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT); | 660 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT); |
| 661 | 661 |
| 662 // rax = address of new object(s) (tagged) | 662 // rax = address of new object(s) (tagged) |
| 663 // r11 = argument count (untagged) | 663 // r11 = argument count (untagged) |
| 664 // Get the arguments map from the current native context into r9. | 664 // Get the arguments map from the current native context into r9. |
| 665 Label has_mapped_parameters, instantiate; | 665 Label has_mapped_parameters, instantiate; |
| 666 __ movp(r9, NativeContextOperand()); | 666 __ movp(r9, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 667 __ movp(r9, FieldOperand(r9, JSGlobalObject::kNativeContextOffset)); |
| 667 __ testp(rbx, rbx); | 668 __ testp(rbx, rbx); |
| 668 __ j(not_zero, &has_mapped_parameters, Label::kNear); | 669 __ j(not_zero, &has_mapped_parameters, Label::kNear); |
| 669 | 670 |
| 670 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX; | 671 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX; |
| 671 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); | 672 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); |
| 672 __ jmp(&instantiate, Label::kNear); | 673 __ jmp(&instantiate, Label::kNear); |
| 673 | 674 |
| 674 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX; | 675 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX; |
| 675 __ bind(&has_mapped_parameters); | 676 __ bind(&has_mapped_parameters); |
| 676 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); | 677 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 937 __ testp(rax, rax); | 938 __ testp(rax, rax); |
| 938 __ j(zero, &add_arguments_object, Label::kNear); | 939 __ j(zero, &add_arguments_object, Label::kNear); |
| 939 __ leap(rax, Operand(rax, times_pointer_size, FixedArray::kHeaderSize)); | 940 __ leap(rax, Operand(rax, times_pointer_size, FixedArray::kHeaderSize)); |
| 940 __ bind(&add_arguments_object); | 941 __ bind(&add_arguments_object); |
| 941 __ addp(rax, Immediate(Heap::kStrictArgumentsObjectSize)); | 942 __ addp(rax, Immediate(Heap::kStrictArgumentsObjectSize)); |
| 942 | 943 |
| 943 // Do the allocation of both objects in one go. | 944 // Do the allocation of both objects in one go. |
| 944 __ Allocate(rax, rax, rbx, no_reg, &runtime, TAG_OBJECT); | 945 __ Allocate(rax, rax, rbx, no_reg, &runtime, TAG_OBJECT); |
| 945 | 946 |
| 946 // Get the arguments map from the current native context. | 947 // Get the arguments map from the current native context. |
| 947 __ movp(rdi, NativeContextOperand()); | 948 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 948 __ movp(rdi, ContextOperand(rdi, Context::STRICT_ARGUMENTS_MAP_INDEX)); | 949 __ movp(rdi, FieldOperand(rdi, JSGlobalObject::kNativeContextOffset)); |
| 950 const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX); |
| 951 __ movp(rdi, Operand(rdi, offset)); |
| 949 | 952 |
| 950 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi); | 953 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi); |
| 951 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); | 954 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); |
| 952 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); | 955 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); |
| 953 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); | 956 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); |
| 954 | 957 |
| 955 // Get the length (smi tagged) and set that as an in-object property too. | 958 // Get the length (smi tagged) and set that as an in-object property too. |
| 956 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 959 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 957 __ movp(FieldOperand(rax, JSObject::kHeaderSize + | 960 __ movp(FieldOperand(rax, JSObject::kHeaderSize + |
| 958 Heap::kArgumentsLengthIndex * kPointerSize), | 961 Heap::kArgumentsLengthIndex * kPointerSize), |
| (...skipping 854 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1813 | 1816 |
| 1814 __ bind(&check_allocation_site); | 1817 __ bind(&check_allocation_site); |
| 1815 // If we came here, we need to see if we are the array function. | 1818 // If we came here, we need to see if we are the array function. |
| 1816 // If we didn't have a matching function, and we didn't find the megamorph | 1819 // If we didn't have a matching function, and we didn't find the megamorph |
| 1817 // sentinel, then we have in the slot either some other function or an | 1820 // sentinel, then we have in the slot either some other function or an |
| 1818 // AllocationSite. | 1821 // AllocationSite. |
| 1819 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); | 1822 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); |
| 1820 __ j(not_equal, &miss); | 1823 __ j(not_equal, &miss); |
| 1821 | 1824 |
| 1822 // Make sure the function is the Array() function | 1825 // Make sure the function is the Array() function |
| 1823 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); | 1826 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); |
| 1824 __ cmpp(rdi, r11); | 1827 __ cmpp(rdi, r11); |
| 1825 __ j(not_equal, &megamorphic); | 1828 __ j(not_equal, &megamorphic); |
| 1826 __ jmp(&done); | 1829 __ jmp(&done); |
| 1827 | 1830 |
| 1828 __ bind(&miss); | 1831 __ bind(&miss); |
| 1829 | 1832 |
| 1830 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 1833 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 1831 // megamorphic. | 1834 // megamorphic. |
| 1832 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); | 1835 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); |
| 1833 __ j(equal, &initialize); | 1836 __ j(equal, &initialize); |
| 1834 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 1837 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 1835 // write-barrier is needed. | 1838 // write-barrier is needed. |
| 1836 __ bind(&megamorphic); | 1839 __ bind(&megamorphic); |
| 1837 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | 1840 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), |
| 1838 TypeFeedbackVector::MegamorphicSentinel(isolate)); | 1841 TypeFeedbackVector::MegamorphicSentinel(isolate)); |
| 1839 __ jmp(&done); | 1842 __ jmp(&done); |
| 1840 | 1843 |
| 1841 // An uninitialized cache is patched with the function or sentinel to | 1844 // An uninitialized cache is patched with the function or sentinel to |
| 1842 // indicate the ElementsKind if function is the Array constructor. | 1845 // indicate the ElementsKind if function is the Array constructor. |
| 1843 __ bind(&initialize); | 1846 __ bind(&initialize); |
| 1844 | 1847 |
| 1845 // Make sure the function is the Array() function | 1848 // Make sure the function is the Array() function |
| 1846 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); | 1849 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); |
| 1847 __ cmpp(rdi, r11); | 1850 __ cmpp(rdi, r11); |
| 1848 __ j(not_equal, ¬_array_function); | 1851 __ j(not_equal, ¬_array_function); |
| 1849 | 1852 |
| 1850 CreateAllocationSiteStub create_stub(isolate); | 1853 CreateAllocationSiteStub create_stub(isolate); |
| 1851 CallStubInRecordCallTarget(masm, &create_stub); | 1854 CallStubInRecordCallTarget(masm, &create_stub); |
| 1852 __ jmp(&done_no_smi_convert); | 1855 __ jmp(&done_no_smi_convert); |
| 1853 | 1856 |
| 1854 __ bind(¬_array_function); | 1857 __ bind(¬_array_function); |
| 1855 CreateWeakCellStub weak_cell_stub(isolate); | 1858 CreateWeakCellStub weak_cell_stub(isolate); |
| 1856 CallStubInRecordCallTarget(masm, &weak_cell_stub); | 1859 CallStubInRecordCallTarget(masm, &weak_cell_stub); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1904 __ movp(rdx, rdi); | 1907 __ movp(rdx, rdi); |
| 1905 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1908 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1906 } | 1909 } |
| 1907 | 1910 |
| 1908 | 1911 |
| 1909 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 1912 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
| 1910 // rdi - function | 1913 // rdi - function |
| 1911 // rdx - slot id | 1914 // rdx - slot id |
| 1912 // rbx - vector | 1915 // rbx - vector |
| 1913 // rcx - allocation site (loaded from vector[slot]). | 1916 // rcx - allocation site (loaded from vector[slot]). |
| 1914 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); | 1917 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); |
| 1915 __ cmpp(rdi, r8); | 1918 __ cmpp(rdi, r8); |
| 1916 __ j(not_equal, miss); | 1919 __ j(not_equal, miss); |
| 1917 | 1920 |
| 1918 __ movp(rax, Immediate(arg_count())); | 1921 __ movp(rax, Immediate(arg_count())); |
| 1919 | 1922 |
| 1920 // Increment the call count for monomorphic function calls. | 1923 // Increment the call count for monomorphic function calls. |
| 1921 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, | 1924 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, |
| 1922 FixedArray::kHeaderSize + kPointerSize), | 1925 FixedArray::kHeaderSize + kPointerSize), |
| 1923 Smi::FromInt(CallICNexus::kCallCountIncrement)); | 1926 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
| 1924 | 1927 |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2026 | 2029 |
| 2027 // We are going monomorphic, provided we actually have a JSFunction. | 2030 // We are going monomorphic, provided we actually have a JSFunction. |
| 2028 __ JumpIfSmi(rdi, &miss); | 2031 __ JumpIfSmi(rdi, &miss); |
| 2029 | 2032 |
| 2030 // Goto miss case if we do not have a function. | 2033 // Goto miss case if we do not have a function. |
| 2031 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2034 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 2032 __ j(not_equal, &miss); | 2035 __ j(not_equal, &miss); |
| 2033 | 2036 |
| 2034 // Make sure the function is not the Array() function, which requires special | 2037 // Make sure the function is not the Array() function, which requires special |
| 2035 // behavior on MISS. | 2038 // behavior on MISS. |
| 2036 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); | 2039 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); |
| 2037 __ cmpp(rdi, rcx); | 2040 __ cmpp(rdi, rcx); |
| 2038 __ j(equal, &miss); | 2041 __ j(equal, &miss); |
| 2039 | 2042 |
| 2040 // Make sure the function belongs to the same native context. | 2043 // Make sure the function belongs to the same native context (which implies |
| 2044 // the same global object). |
| 2041 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); | 2045 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 2042 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); | 2046 __ movp(rcx, ContextOperand(rcx, Context::GLOBAL_OBJECT_INDEX)); |
| 2043 __ cmpp(rcx, NativeContextOperand()); | 2047 __ cmpp(rcx, GlobalObjectOperand()); |
| 2044 __ j(not_equal, &miss); | 2048 __ j(not_equal, &miss); |
| 2045 | 2049 |
| 2046 // Update stats. | 2050 // Update stats. |
| 2047 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); | 2051 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); |
| 2048 | 2052 |
| 2049 // Initialize the call counter. | 2053 // Initialize the call counter. |
| 2050 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | 2054 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
| 2051 FixedArray::kHeaderSize + kPointerSize), | 2055 FixedArray::kHeaderSize + kPointerSize), |
| 2052 Smi::FromInt(CallICNexus::kCallCountIncrement)); | 2056 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
| 2053 | 2057 |
| (...skipping 3333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5387 kStackSpace, nullptr, return_value_operand, NULL); | 5391 kStackSpace, nullptr, return_value_operand, NULL); |
| 5388 } | 5392 } |
| 5389 | 5393 |
| 5390 | 5394 |
| 5391 #undef __ | 5395 #undef __ |
| 5392 | 5396 |
| 5393 } // namespace internal | 5397 } // namespace internal |
| 5394 } // namespace v8 | 5398 } // namespace v8 |
| 5395 | 5399 |
| 5396 #endif // V8_TARGET_ARCH_X64 | 5400 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |