Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(409)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 2356483002: PPC/s390: Record call counts also for megamorphic calls. (Closed)
Patch Set: PPC/s390: Record call counts also for megamorphic calls. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/s390/code-stubs-s390.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h" 8 #include "src/api-arguments.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1843 matching lines...) Expand 10 before | Expand all | Expand 10 after
1854 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 1854 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1855 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); 1855 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
1856 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); 1856 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1857 __ JumpToJSEntry(ip); 1857 __ JumpToJSEntry(ip);
1858 1858
1859 __ bind(&non_function); 1859 __ bind(&non_function);
1860 __ mr(r6, r4); 1860 __ mr(r6, r4);
1861 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1861 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1862 } 1862 }
1863 1863
1864 // Note: feedback_vector and slot are clobbered after the call.
1865 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1866 Register slot, Register temp) {
1867 const int count_offset = FixedArray::kHeaderSize + kPointerSize;
1868 __ SmiToPtrArrayOffset(temp, slot);
1869 __ add(feedback_vector, feedback_vector, temp);
1870 __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
1871 __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
1872 __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
1873 }
1864 1874
1865 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1875 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1866 // r4 - function 1876 // r4 - function
1867 // r6 - slot id 1877 // r6 - slot id
1868 // r5 - vector 1878 // r5 - vector
1869 // r7 - allocation site (loaded from vector[slot]) 1879 // r7 - allocation site (loaded from vector[slot])
1870 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); 1880 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1871 __ cmp(r4, r8); 1881 __ cmp(r4, r8);
1872 __ bne(miss); 1882 __ bne(miss);
1873 1883
1874 __ mov(r3, Operand(arg_count())); 1884 __ mov(r3, Operand(arg_count()));
1875 1885
1876 // Increment the call count for monomorphic function calls. 1886 // Increment the call count for monomorphic function calls.
1877 const int count_offset = FixedArray::kHeaderSize + kPointerSize; 1887 IncrementCallCount(masm, r5, r6, r0);
1878 __ SmiToPtrArrayOffset(r8, r6);
1879 __ add(r5, r5, r8);
1880 __ LoadP(r6, FieldMemOperand(r5, count_offset));
1881 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
1882 __ StoreP(r6, FieldMemOperand(r5, count_offset), r0);
1883 1888
1884 __ mr(r5, r7); 1889 __ mr(r5, r7);
1885 __ mr(r6, r4); 1890 __ mr(r6, r4);
1886 ArrayConstructorStub stub(masm->isolate(), arg_count()); 1891 ArrayConstructorStub stub(masm->isolate(), arg_count());
1887 __ TailCallStub(&stub); 1892 __ TailCallStub(&stub);
1888 } 1893 }
1889 1894
1890 1895
1891 void CallICStub::Generate(MacroAssembler* masm) { 1896 void CallICStub::Generate(MacroAssembler* masm) {
1892 // r4 - function 1897 // r4 - function
1893 // r6 - slot id (Smi) 1898 // r6 - slot id (Smi)
1894 // r5 - vector 1899 // r5 - vector
1895 Label extra_checks_or_miss, call, call_function; 1900 Label extra_checks_or_miss, call, call_function, call_count_incremented;
1896 int argc = arg_count(); 1901 int argc = arg_count();
1897 ParameterCount actual(argc); 1902 ParameterCount actual(argc);
1898 1903
1899 // The checks. First, does r4 match the recorded monomorphic target? 1904 // The checks. First, does r4 match the recorded monomorphic target?
1900 __ SmiToPtrArrayOffset(r9, r6); 1905 __ SmiToPtrArrayOffset(r9, r6);
1901 __ add(r9, r5, r9); 1906 __ add(r9, r5, r9);
1902 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); 1907 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize));
1903 1908
1904 // We don't know that we have a weak cell. We might have a private symbol 1909 // We don't know that we have a weak cell. We might have a private symbol
1905 // or an AllocationSite, but the memory is safe to examine. 1910 // or an AllocationSite, but the memory is safe to examine.
(...skipping 10 matching lines...) Expand all
1916 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 1921 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1917 1922
1918 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset)); 1923 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset));
1919 __ cmp(r4, r8); 1924 __ cmp(r4, r8);
1920 __ bne(&extra_checks_or_miss); 1925 __ bne(&extra_checks_or_miss);
1921 1926
1922 // The compare above could have been a SMI/SMI comparison. Guard against this 1927 // The compare above could have been a SMI/SMI comparison. Guard against this
1923 // convincing us that we have a monomorphic JSFunction. 1928 // convincing us that we have a monomorphic JSFunction.
1924 __ JumpIfSmi(r4, &extra_checks_or_miss); 1929 __ JumpIfSmi(r4, &extra_checks_or_miss);
1925 1930
1931 __ bind(&call_function);
1932
1926 // Increment the call count for monomorphic function calls. 1933 // Increment the call count for monomorphic function calls.
1927 const int count_offset = FixedArray::kHeaderSize + kPointerSize; 1934 IncrementCallCount(masm, r5, r6, r0);
1928 __ LoadP(r6, FieldMemOperand(r9, count_offset));
1929 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
1930 __ StoreP(r6, FieldMemOperand(r9, count_offset), r0);
1931 1935
1932 __ bind(&call_function);
1933 __ mov(r3, Operand(argc)); 1936 __ mov(r3, Operand(argc));
1934 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1937 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1935 tail_call_mode()), 1938 tail_call_mode()),
1936 RelocInfo::CODE_TARGET); 1939 RelocInfo::CODE_TARGET);
1937 1940
1938 __ bind(&extra_checks_or_miss); 1941 __ bind(&extra_checks_or_miss);
1939 Label uninitialized, miss, not_allocation_site; 1942 Label uninitialized, miss, not_allocation_site;
1940 1943
1941 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); 1944 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex);
1942 __ beq(&call); 1945 __ beq(&call);
(...skipping 19 matching lines...) Expand all
1962 1965
1963 // We are going megamorphic. If the feedback is a JSFunction, it is fine 1966 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1964 // to handle it here. More complex cases are dealt with in the runtime. 1967 // to handle it here. More complex cases are dealt with in the runtime.
1965 __ AssertNotSmi(r7); 1968 __ AssertNotSmi(r7);
1966 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); 1969 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE);
1967 __ bne(&miss); 1970 __ bne(&miss);
1968 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); 1971 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
1969 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0); 1972 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0);
1970 1973
1971 __ bind(&call); 1974 __ bind(&call);
1975
1976 // Increment the call count for megamorphic function calls.
1977 IncrementCallCount(masm, r5, r6, r0);
1978
1979 __ bind(&call_count_incremented);
1972 __ mov(r3, Operand(argc)); 1980 __ mov(r3, Operand(argc));
1973 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 1981 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1974 RelocInfo::CODE_TARGET); 1982 RelocInfo::CODE_TARGET);
1975 1983
1976 __ bind(&uninitialized); 1984 __ bind(&uninitialized);
1977 1985
1978 // We are going monomorphic, provided we actually have a JSFunction. 1986 // We are going monomorphic, provided we actually have a JSFunction.
1979 __ JumpIfSmi(r4, &miss); 1987 __ JumpIfSmi(r4, &miss);
1980 1988
1981 // Goto miss case if we do not have a function. 1989 // Goto miss case if we do not have a function.
1982 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); 1990 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
1983 __ bne(&miss); 1991 __ bne(&miss);
1984 1992
1985 // Make sure the function is not the Array() function, which requires special 1993 // Make sure the function is not the Array() function, which requires special
1986 // behavior on MISS. 1994 // behavior on MISS.
1987 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); 1995 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7);
1988 __ cmp(r4, r7); 1996 __ cmp(r4, r7);
1989 __ beq(&miss); 1997 __ beq(&miss);
1990 1998
1991 // Make sure the function belongs to the same native context. 1999 // Make sure the function belongs to the same native context.
1992 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset)); 2000 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset));
1993 __ LoadP(r7, ContextMemOperand(r7, Context::NATIVE_CONTEXT_INDEX)); 2001 __ LoadP(r7, ContextMemOperand(r7, Context::NATIVE_CONTEXT_INDEX));
1994 __ LoadP(ip, NativeContextMemOperand()); 2002 __ LoadP(ip, NativeContextMemOperand());
1995 __ cmp(r7, ip); 2003 __ cmp(r7, ip);
1996 __ bne(&miss); 2004 __ bne(&miss);
1997 2005
1998 // Initialize the call counter.
1999 __ LoadSmiLiteral(r8, Smi::FromInt(1));
2000 __ StoreP(r8, FieldMemOperand(r9, count_offset), r0);
2001
2002 // Store the function. Use a stub since we need a frame for allocation. 2006 // Store the function. Use a stub since we need a frame for allocation.
2003 // r5 - vector 2007 // r5 - vector
2004 // r6 - slot 2008 // r6 - slot
2005 // r4 - function 2009 // r4 - function
2006 { 2010 {
2007 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2011 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2008 CreateWeakCellStub create_stub(masm->isolate()); 2012 CreateWeakCellStub create_stub(masm->isolate());
2013 __ Push(r5);
2014 __ Push(r6);
2009 __ Push(cp, r4); 2015 __ Push(cp, r4);
2010 __ CallStub(&create_stub); 2016 __ CallStub(&create_stub);
2011 __ Pop(cp, r4); 2017 __ Pop(cp, r4);
2018 __ Pop(r6);
2019 __ Pop(r5);
2012 } 2020 }
2013 2021
2014 __ b(&call_function); 2022 __ b(&call_function);
2015 2023
2016 // We are here because tracing is on or we encountered a MISS case we can't 2024 // We are here because tracing is on or we encountered a MISS case we can't
2017 // handle here. 2025 // handle here.
2018 __ bind(&miss); 2026 __ bind(&miss);
2019 GenerateMiss(masm); 2027 GenerateMiss(masm);
2020 2028
2021 __ b(&call); 2029 __ b(&call_count_incremented);
2022 } 2030 }
2023 2031
2024 2032
2025 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2033 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2026 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2034 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2027 2035
2028 // Push the function and feedback info. 2036 // Push the function and feedback info.
2029 __ Push(r4, r5, r6); 2037 __ Push(r4, r5, r6);
2030 2038
2031 // Call the entry. 2039 // Call the entry.
(...skipping 3398 matching lines...) Expand 10 before | Expand all | Expand 10 after
5430 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); 5438 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
5431 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, 5439 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5432 kStackUnwindSpace, NULL, return_value_operand, NULL); 5440 kStackUnwindSpace, NULL, return_value_operand, NULL);
5433 } 5441 }
5434 5442
5435 #undef __ 5443 #undef __
5436 } // namespace internal 5444 } // namespace internal
5437 } // namespace v8 5445 } // namespace v8
5438 5446
5439 #endif // V8_TARGET_ARCH_PPC 5447 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « no previous file | src/s390/code-stubs-s390.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698