Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 1688283003: [Interpreter] Implements calls through CallICStub in the interpreter. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fixes comments I missed in last patch. Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/bootstrapper.h" 7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
(...skipping 1734 matching lines...) Expand 10 before | Expand all | Expand 10 after
1745 __ bind(&non_function); 1745 __ bind(&non_function);
1746 __ movp(rdx, rdi); 1746 __ movp(rdx, rdi);
1747 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1747 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1748 } 1748 }
1749 1749
1750 1750
1751 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1751 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1752 // rdi - function 1752 // rdi - function
1753 // rdx - slot id 1753 // rdx - slot id
1754 // rbx - vector 1754 // rbx - vector
1755 // rax - number of arguments if argc_in_register() is true.
1755 // rcx - allocation site (loaded from vector[slot]). 1756 // rcx - allocation site (loaded from vector[slot]).
1756 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); 1757 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1757 __ cmpp(rdi, r8); 1758 __ cmpp(rdi, r8);
1758 __ j(not_equal, miss); 1759 __ j(not_equal, miss);
1759 1760
1760 __ movp(rax, Immediate(arg_count()));
1761
1762 // Increment the call count for monomorphic function calls. 1761 // Increment the call count for monomorphic function calls.
1763 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, 1762 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1764 FixedArray::kHeaderSize + kPointerSize), 1763 FixedArray::kHeaderSize + kPointerSize),
1765 Smi::FromInt(CallICNexus::kCallCountIncrement)); 1764 Smi::FromInt(CallICNexus::kCallCountIncrement));
1766 1765
1767 __ movp(rbx, rcx); 1766 __ movp(rbx, rcx);
1768 __ movp(rdx, rdi); 1767 __ movp(rdx, rdi);
1769 ArrayConstructorStub stub(masm->isolate(), arg_count()); 1768 if (argc_in_register()) {
1770 __ TailCallStub(&stub); 1769 ArrayConstructorStub stub(masm->isolate());
1770 __ TailCallStub(&stub);
1771 } else {
1772 ArrayConstructorStub stub(masm->isolate(), arg_count());
1773 __ TailCallStub(&stub);
1774 }
1771 } 1775 }
1772 1776
1773 1777
1774 void CallICStub::Generate(MacroAssembler* masm) { 1778 void CallICStub::Generate(MacroAssembler* masm) {
1775 // ----------- S t a t e ------------- 1779 // ----------- S t a t e -------------
1776 // -- rdi - function 1780 // -- rdi - function
1777 // -- rdx - slot id 1781 // -- rdx - slot id
1778 // -- rbx - vector 1782 // -- rbx - vector
1783 // -- rax - number of arguments if argc_in_register() is true.
1779 // ----------------------------------- 1784 // -----------------------------------
1780 Isolate* isolate = masm->isolate(); 1785 Isolate* isolate = masm->isolate();
1781 Label extra_checks_or_miss, call, call_function; 1786 Label extra_checks_or_miss, call, call_function;
1782 int argc = arg_count(); 1787 if (!argc_in_register()) {
1783 StackArgumentsAccessor args(rsp, argc); 1788 int argc = arg_count();
1784 ParameterCount actual(argc); 1789 __ Set(rax, argc);
1790 }
1785 1791
1786 // The checks. First, does rdi match the recorded monomorphic target? 1792 // The checks. First, does rdi match the recorded monomorphic target?
1787 __ SmiToInteger32(rdx, rdx); 1793 __ SmiToInteger32(rdx, rdx);
1788 __ movp(rcx, 1794 __ movp(rcx,
1789 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); 1795 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1790 1796
1791 // We don't know that we have a weak cell. We might have a private symbol 1797 // We don't know that we have a weak cell. We might have a private symbol
1792 // or an AllocationSite, but the memory is safe to examine. 1798 // or an AllocationSite, but the memory is safe to examine.
1793 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1799 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1794 // FixedArray. 1800 // FixedArray.
(...skipping 13 matching lines...) Expand all
1808 // The compare above could have been a SMI/SMI comparison. Guard against this 1814 // The compare above could have been a SMI/SMI comparison. Guard against this
1809 // convincing us that we have a monomorphic JSFunction. 1815 // convincing us that we have a monomorphic JSFunction.
1810 __ JumpIfSmi(rdi, &extra_checks_or_miss); 1816 __ JumpIfSmi(rdi, &extra_checks_or_miss);
1811 1817
1812 // Increment the call count for monomorphic function calls. 1818 // Increment the call count for monomorphic function calls.
1813 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, 1819 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1814 FixedArray::kHeaderSize + kPointerSize), 1820 FixedArray::kHeaderSize + kPointerSize),
1815 Smi::FromInt(CallICNexus::kCallCountIncrement)); 1821 Smi::FromInt(CallICNexus::kCallCountIncrement));
1816 1822
1817 __ bind(&call_function); 1823 __ bind(&call_function);
1818 __ Set(rax, argc);
1819 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1824 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1820 tail_call_mode()), 1825 tail_call_mode()),
1821 RelocInfo::CODE_TARGET); 1826 RelocInfo::CODE_TARGET);
1822 1827
1823 __ bind(&extra_checks_or_miss); 1828 __ bind(&extra_checks_or_miss);
1824 Label uninitialized, miss, not_allocation_site; 1829 Label uninitialized, miss, not_allocation_site;
1825 1830
1826 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); 1831 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
1827 __ j(equal, &call); 1832 __ j(equal, &call);
1828 1833
(...skipping 18 matching lines...) Expand all
1847 1852
1848 // We are going megamorphic. If the feedback is a JSFunction, it is fine 1853 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1849 // to handle it here. More complex cases are dealt with in the runtime. 1854 // to handle it here. More complex cases are dealt with in the runtime.
1850 __ AssertNotSmi(rcx); 1855 __ AssertNotSmi(rcx);
1851 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); 1856 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1852 __ j(not_equal, &miss); 1857 __ j(not_equal, &miss);
1853 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 1858 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1854 TypeFeedbackVector::MegamorphicSentinel(isolate)); 1859 TypeFeedbackVector::MegamorphicSentinel(isolate));
1855 1860
1856 __ bind(&call); 1861 __ bind(&call);
1857 __ Set(rax, argc);
1858 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 1862 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1859 RelocInfo::CODE_TARGET); 1863 RelocInfo::CODE_TARGET);
1860 1864
1861 __ bind(&uninitialized); 1865 __ bind(&uninitialized);
1862 1866
1863 // We are going monomorphic, provided we actually have a JSFunction. 1867 // We are going monomorphic, provided we actually have a JSFunction.
1864 __ JumpIfSmi(rdi, &miss); 1868 __ JumpIfSmi(rdi, &miss);
1865 1869
1866 // Goto miss case if we do not have a function. 1870 // Goto miss case if we do not have a function.
1867 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 1871 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
(...skipping 16 matching lines...) Expand all
1884 FixedArray::kHeaderSize + kPointerSize), 1888 FixedArray::kHeaderSize + kPointerSize),
1885 Smi::FromInt(CallICNexus::kCallCountIncrement)); 1889 Smi::FromInt(CallICNexus::kCallCountIncrement));
1886 1890
1887 // Store the function. Use a stub since we need a frame for allocation. 1891 // Store the function. Use a stub since we need a frame for allocation.
1888 // rbx - vector 1892 // rbx - vector
1889 // rdx - slot (needs to be in smi form) 1893 // rdx - slot (needs to be in smi form)
1890 // rdi - function 1894 // rdi - function
1891 { 1895 {
1892 FrameScope scope(masm, StackFrame::INTERNAL); 1896 FrameScope scope(masm, StackFrame::INTERNAL);
1893 CreateWeakCellStub create_stub(isolate); 1897 CreateWeakCellStub create_stub(isolate);
1894 1898
mvstanton 2016/02/15 11:13:45 Wow, is it just a bug that rax wasn't being saved?
mythria 2016/02/17 11:02:48 Yes :(
1899 __ Integer32ToSmi(rax, rax);
1900 __ Push(rax);
1895 __ Integer32ToSmi(rdx, rdx); 1901 __ Integer32ToSmi(rdx, rdx);
1896 __ Push(rdi); 1902 __ Push(rdi);
1903
1897 __ CallStub(&create_stub); 1904 __ CallStub(&create_stub);
1905
1898 __ Pop(rdi); 1906 __ Pop(rdi);
1907 __ Pop(rax);
1908 __ SmiToInteger32(rax, rax);
1899 } 1909 }
1900 1910
1901 __ jmp(&call_function); 1911 __ jmp(&call_function);
1902 1912
1903 // We are here because tracing is on or we encountered a MISS case we can't 1913 // We are here because tracing is on or we encountered a MISS case we can't
1904 // handle here. 1914 // handle here.
1905 __ bind(&miss); 1915 __ bind(&miss);
1906 GenerateMiss(masm); 1916 GenerateMiss(masm);
1907 1917
1908 __ jmp(&call); 1918 __ jmp(&call);
1909 1919
1910 // Unreachable 1920 // Unreachable
1911 __ int3(); 1921 __ int3();
1912 } 1922 }
1913 1923
1914 1924
1915 void CallICStub::GenerateMiss(MacroAssembler* masm) { 1925 void CallICStub::GenerateMiss(MacroAssembler* masm) {
1916 FrameScope scope(masm, StackFrame::INTERNAL); 1926 FrameScope scope(masm, StackFrame::INTERNAL);
1917 1927
1928 // Store the number of arguments to be used later.
1929 __ Integer32ToSmi(rax, rax);
1930 __ Push(rax);
1931
1918 // Push the receiver and the function and feedback info. 1932 // Push the receiver and the function and feedback info.
1919 __ Push(rdi); 1933 __ Push(rdi);
1920 __ Push(rbx); 1934 __ Push(rbx);
1921 __ Integer32ToSmi(rdx, rdx); 1935 __ Integer32ToSmi(rdx, rdx);
1922 __ Push(rdx); 1936 __ Push(rdx);
1923 1937
1924 // Call the entry. 1938 // Call the entry.
1925 __ CallRuntime(Runtime::kCallIC_Miss); 1939 __ CallRuntime(Runtime::kCallIC_Miss);
1926 1940
1927 // Move result to edi and exit the internal frame. 1941 // Move result to edi and exit the internal frame.
1928 __ movp(rdi, rax); 1942 __ movp(rdi, rax);
1943 // rdi, rbx, rdx are arguments to CallIC_Miss. They will be popped by
1944 // Runtime_CallIC_Miss.
1945 __ Pop(rax);
1946 __ SmiToInteger32(rax, rax);
1929 } 1947 }
1930 1948
1931 1949
1932 bool CEntryStub::NeedsImmovableCode() { 1950 bool CEntryStub::NeedsImmovableCode() {
1933 return false; 1951 return false;
1934 } 1952 }
1935 1953
1936 1954
1937 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1955 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1938 CEntryStub::GenerateAheadOfTime(isolate); 1956 CEntryStub::GenerateAheadOfTime(isolate);
(...skipping 3604 matching lines...) Expand 10 before | Expand all | Expand 10 after
5543 NULL); 5561 NULL);
5544 } 5562 }
5545 5563
5546 5564
5547 #undef __ 5565 #undef __
5548 5566
5549 } // namespace internal 5567 } // namespace internal
5550 } // namespace v8 5568 } // namespace v8
5551 5569
5552 #endif // V8_TARGET_ARCH_X64 5570 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698