OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1768 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1779 | 1779 |
1780 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1780 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
1781 // tagged as a small integer. | 1781 // tagged as a small integer. |
1782 __ InvokeBuiltin(builtin, JUMP_FUNCTION); | 1782 __ InvokeBuiltin(builtin, JUMP_FUNCTION); |
1783 | 1783 |
1784 __ bind(&miss); | 1784 __ bind(&miss); |
1785 GenerateMiss(masm); | 1785 GenerateMiss(masm); |
1786 } | 1786 } |
1787 | 1787 |
1788 | 1788 |
1789 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { | 1789 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, |
1790 // eax : number of arguments to the construct function | 1790 bool is_super) { |
1791 // ebx : Feedback vector | 1791 // rax : number of arguments to the construct function |
1792 // edx : slot in feedback vector (Smi) | 1792 // rbx : feedback vector |
1793 // edi : the function to call | 1793 // rcx : original constructor (for IsSuperConstructorCall) |
1794 // rdx : slot in feedback vector (Smi) | |
1795 // rdi : the function to call | |
mvstanton
2015/07/22 10:53:25
edi, edx, etc.?! good catch. don't run git-blame p
| |
1794 FrameScope scope(masm, StackFrame::INTERNAL); | 1796 FrameScope scope(masm, StackFrame::INTERNAL); |
1795 | 1797 |
1796 // Number-of-arguments register must be smi-tagged to call out. | 1798 // Number-of-arguments register must be smi-tagged to call out. |
1797 __ Integer32ToSmi(rax, rax); | 1799 __ Integer32ToSmi(rax, rax); |
1798 __ Push(rax); | 1800 __ Push(rax); |
1799 __ Push(rdi); | 1801 __ Push(rdi); |
1800 __ Integer32ToSmi(rdx, rdx); | 1802 __ Integer32ToSmi(rdx, rdx); |
1801 __ Push(rdx); | 1803 __ Push(rdx); |
1802 __ Push(rbx); | 1804 __ Push(rbx); |
1805 if (is_super) { | |
1806 __ Push(rcx); | |
1807 } | |
1803 | 1808 |
1804 __ CallStub(stub); | 1809 __ CallStub(stub); |
1805 | 1810 |
1811 if (is_super) { | |
1812 __ Pop(rcx); | |
1813 } | |
1806 __ Pop(rbx); | 1814 __ Pop(rbx); |
1807 __ Pop(rdx); | 1815 __ Pop(rdx); |
1808 __ Pop(rdi); | 1816 __ Pop(rdi); |
1809 __ Pop(rax); | 1817 __ Pop(rax); |
1810 __ SmiToInteger32(rax, rax); | 1818 __ SmiToInteger32(rax, rax); |
1811 } | 1819 } |
1812 | 1820 |
1813 | 1821 |
1814 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 1822 static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { |
1815 // Cache the called function in a feedback vector slot. Cache states | 1823 // Cache the called function in a feedback vector slot. Cache states |
1816 // are uninitialized, monomorphic (indicated by a JSFunction), and | 1824 // are uninitialized, monomorphic (indicated by a JSFunction), and |
1817 // megamorphic. | 1825 // megamorphic. |
1818 // rax : number of arguments to the construct function | 1826 // rax : number of arguments to the construct function |
1819 // rbx : Feedback vector | 1827 // rbx : feedback vector |
1828 // rcx : original constructor (for IsSuperConstructorCall) | |
1820 // rdx : slot in feedback vector (Smi) | 1829 // rdx : slot in feedback vector (Smi) |
1821 // rdi : the function to call | 1830 // rdi : the function to call |
1822 Isolate* isolate = masm->isolate(); | 1831 Isolate* isolate = masm->isolate(); |
1823 Label initialize, done, miss, megamorphic, not_array_function, | 1832 Label initialize, done, miss, megamorphic, not_array_function, |
1824 done_no_smi_convert; | 1833 done_no_smi_convert; |
1825 | 1834 |
1826 // Load the cache state into rcx. | 1835 // Load the cache state into r11. |
1827 __ SmiToInteger32(rdx, rdx); | 1836 __ SmiToInteger32(rdx, rdx); |
1828 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, | 1837 __ movp(r11, |
1829 FixedArray::kHeaderSize)); | 1838 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); |
1830 | 1839 |
1831 // A monomorphic cache hit or an already megamorphic state: invoke the | 1840 // A monomorphic cache hit or an already megamorphic state: invoke the |
1832 // function without changing the state. | 1841 // function without changing the state. |
1833 // We don't know if rcx is a WeakCell or a Symbol, but it's harmless to read | 1842 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read |
1834 // at this position in a symbol (see static asserts in | 1843 // at this position in a symbol (see static asserts in |
1835 // type-feedback-vector.h). | 1844 // type-feedback-vector.h). |
1836 Label check_allocation_site; | 1845 Label check_allocation_site; |
1837 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); | 1846 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset)); |
1838 __ j(equal, &done, Label::kFar); | 1847 __ j(equal, &done, Label::kFar); |
1839 __ CompareRoot(rcx, Heap::kmegamorphic_symbolRootIndex); | 1848 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex); |
1840 __ j(equal, &done, Label::kFar); | 1849 __ j(equal, &done, Label::kFar); |
1841 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 1850 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset), |
1842 Heap::kWeakCellMapRootIndex); | 1851 Heap::kWeakCellMapRootIndex); |
1843 __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); | 1852 __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); |
1844 | 1853 |
1845 // If the weak cell is cleared, we have a new chance to become monomorphic. | 1854 // If the weak cell is cleared, we have a new chance to become monomorphic. |
1846 __ CheckSmi(FieldOperand(rcx, WeakCell::kValueOffset)); | 1855 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset)); |
1847 __ j(equal, &initialize); | 1856 __ j(equal, &initialize); |
1848 __ jmp(&megamorphic); | 1857 __ jmp(&megamorphic); |
1849 | 1858 |
1850 if (!FLAG_pretenuring_call_new) { | 1859 if (!FLAG_pretenuring_call_new) { |
1851 __ bind(&check_allocation_site); | 1860 __ bind(&check_allocation_site); |
1852 // If we came here, we need to see if we are the array function. | 1861 // If we came here, we need to see if we are the array function. |
1853 // If we didn't have a matching function, and we didn't find the megamorph | 1862 // If we didn't have a matching function, and we didn't find the megamorph |
1854 // sentinel, then we have in the slot either some other function or an | 1863 // sentinel, then we have in the slot either some other function or an |
1855 // AllocationSite. | 1864 // AllocationSite. |
1856 __ CompareRoot(FieldOperand(rcx, 0), Heap::kAllocationSiteMapRootIndex); | 1865 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); |
1857 __ j(not_equal, &miss); | 1866 __ j(not_equal, &miss); |
1858 | 1867 |
1859 // Make sure the function is the Array() function | 1868 // Make sure the function is the Array() function |
1860 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); | 1869 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); |
1861 __ cmpp(rdi, rcx); | 1870 __ cmpp(rdi, r11); |
1862 __ j(not_equal, &megamorphic); | 1871 __ j(not_equal, &megamorphic); |
1863 __ jmp(&done); | 1872 __ jmp(&done); |
1864 } | 1873 } |
1865 | 1874 |
1866 __ bind(&miss); | 1875 __ bind(&miss); |
1867 | 1876 |
1868 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 1877 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
1869 // megamorphic. | 1878 // megamorphic. |
1870 __ CompareRoot(rcx, Heap::kuninitialized_symbolRootIndex); | 1879 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); |
1871 __ j(equal, &initialize); | 1880 __ j(equal, &initialize); |
1872 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 1881 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
1873 // write-barrier is needed. | 1882 // write-barrier is needed. |
1874 __ bind(&megamorphic); | 1883 __ bind(&megamorphic); |
1875 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | 1884 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), |
1876 TypeFeedbackVector::MegamorphicSentinel(isolate)); | 1885 TypeFeedbackVector::MegamorphicSentinel(isolate)); |
1877 __ jmp(&done); | 1886 __ jmp(&done); |
1878 | 1887 |
1879 // An uninitialized cache is patched with the function or sentinel to | 1888 // An uninitialized cache is patched with the function or sentinel to |
1880 // indicate the ElementsKind if function is the Array constructor. | 1889 // indicate the ElementsKind if function is the Array constructor. |
1881 __ bind(&initialize); | 1890 __ bind(&initialize); |
1882 | 1891 |
1883 if (!FLAG_pretenuring_call_new) { | 1892 if (!FLAG_pretenuring_call_new) { |
1884 // Make sure the function is the Array() function | 1893 // Make sure the function is the Array() function |
1885 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); | 1894 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11); |
1886 __ cmpp(rdi, rcx); | 1895 __ cmpp(rdi, r11); |
1887 __ j(not_equal, ¬_array_function); | 1896 __ j(not_equal, ¬_array_function); |
1888 | 1897 |
1889 CreateAllocationSiteStub create_stub(isolate); | 1898 CreateAllocationSiteStub create_stub(isolate); |
1890 CallStubInRecordCallTarget(masm, &create_stub); | 1899 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
1891 __ jmp(&done_no_smi_convert); | 1900 __ jmp(&done_no_smi_convert); |
1892 | 1901 |
1893 __ bind(¬_array_function); | 1902 __ bind(¬_array_function); |
1894 } | 1903 } |
1895 | 1904 |
1896 CreateWeakCellStub create_stub(isolate); | 1905 CreateWeakCellStub create_stub(isolate); |
1897 CallStubInRecordCallTarget(masm, &create_stub); | 1906 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
1898 __ jmp(&done_no_smi_convert); | 1907 __ jmp(&done_no_smi_convert); |
1899 | 1908 |
1900 __ bind(&done); | 1909 __ bind(&done); |
1901 __ Integer32ToSmi(rdx, rdx); | 1910 __ Integer32ToSmi(rdx, rdx); |
1902 | 1911 |
1903 __ bind(&done_no_smi_convert); | 1912 __ bind(&done_no_smi_convert); |
1904 } | 1913 } |
1905 | 1914 |
1906 | 1915 |
1907 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 1916 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2037 // rdi : constructor function | 2046 // rdi : constructor function |
2038 Label slow, non_function_call; | 2047 Label slow, non_function_call; |
2039 | 2048 |
2040 // Check that function is not a smi. | 2049 // Check that function is not a smi. |
2041 __ JumpIfSmi(rdi, &non_function_call); | 2050 __ JumpIfSmi(rdi, &non_function_call); |
2042 // Check that function is a JSFunction. | 2051 // Check that function is a JSFunction. |
2043 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); | 2052 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); |
2044 __ j(not_equal, &slow); | 2053 __ j(not_equal, &slow); |
2045 | 2054 |
2046 if (RecordCallTarget()) { | 2055 if (RecordCallTarget()) { |
2047 if (IsSuperConstructorCall()) { | 2056 GenerateRecordCallTarget(masm, IsSuperConstructorCall()); |
2048 __ Push(rcx); | |
2049 } | |
2050 GenerateRecordCallTarget(masm); | |
2051 if (IsSuperConstructorCall()) { | |
2052 __ Pop(rcx); | |
2053 } | |
2054 | 2057 |
2055 __ SmiToInteger32(rdx, rdx); | 2058 __ SmiToInteger32(rdx, rdx); |
2056 if (FLAG_pretenuring_call_new) { | 2059 if (FLAG_pretenuring_call_new) { |
2057 // Put the AllocationSite from the feedback vector into ebx. | 2060 // Put the AllocationSite from the feedback vector into ebx. |
2058 // By adding kPointerSize we encode that we know the AllocationSite | 2061 // By adding kPointerSize we encode that we know the AllocationSite |
2059 // entry is at the feedback vector slot given by rdx + 1. | 2062 // entry is at the feedback vector slot given by rdx + 1. |
2060 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, | 2063 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, |
2061 FixedArray::kHeaderSize + kPointerSize)); | 2064 FixedArray::kHeaderSize + kPointerSize)); |
2062 } else { | 2065 } else { |
2063 Label feedback_register_initialized; | 2066 Label feedback_register_initialized; |
(...skipping 3368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5432 kStackSpace, nullptr, return_value_operand, NULL); | 5435 kStackSpace, nullptr, return_value_operand, NULL); |
5433 } | 5436 } |
5434 | 5437 |
5435 | 5438 |
5436 #undef __ | 5439 #undef __ |
5437 | 5440 |
5438 } // namespace internal | 5441 } // namespace internal |
5439 } // namespace v8 | 5442 } // namespace v8 |
5440 | 5443 |
5441 #endif // V8_TARGET_ARCH_X64 | 5444 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |