Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(321)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 2412453005: [stubs] Refactor the CallICStub to pass the number of arguments. (Closed)
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interface-descriptors.cc ('k') | src/mips/interface-descriptors-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h" 8 #include "src/api-arguments.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1899 matching lines...) Expand 10 before | Expand all | Expand 10 after
1910 // Note: feedback_vector and slot are clobbered after the call. 1910 // Note: feedback_vector and slot are clobbered after the call.
1911 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, 1911 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1912 Register slot) { 1912 Register slot) {
1913 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize); 1913 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize);
1914 __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); 1914 __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1915 __ Addu(slot, slot, Operand(Smi::FromInt(1))); 1915 __ Addu(slot, slot, Operand(Smi::FromInt(1)));
1916 __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); 1916 __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
1917 } 1917 }
1918 1918
1919 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1919 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1920 // a0 - number of arguments
1920 // a1 - function 1921 // a1 - function
1921 // a3 - slot id 1922 // a3 - slot id
1922 // a2 - vector 1923 // a2 - vector
1923 // t0 - loaded from vector[slot] 1924 // t0 - loaded from vector[slot]
1924 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); 1925 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
1925 __ Branch(miss, ne, a1, Operand(at)); 1926 __ Branch(miss, ne, a1, Operand(at));
1926 1927
1927 // Increment the call count for monomorphic function calls. 1928 // Increment the call count for monomorphic function calls.
1928 IncrementCallCount(masm, a2, a3); 1929 IncrementCallCount(masm, a2, a3);
1929 1930
1930 __ mov(a2, t0); 1931 __ mov(a2, t0);
1931 __ mov(a3, a1); 1932 __ mov(a3, a1);
1932 __ li(a0, Operand(arg_count()));
1933 ArrayConstructorStub stub(masm->isolate()); 1933 ArrayConstructorStub stub(masm->isolate());
1934 __ TailCallStub(&stub); 1934 __ TailCallStub(&stub);
1935 } 1935 }
1936 1936
1937 1937
1938 void CallICStub::Generate(MacroAssembler* masm) { 1938 void CallICStub::Generate(MacroAssembler* masm) {
1939 // a0 - number of arguments
1939 // a1 - function 1940 // a1 - function
1940 // a3 - slot id (Smi) 1941 // a3 - slot id (Smi)
1941 // a2 - vector 1942 // a2 - vector
1942 Label extra_checks_or_miss, call, call_function, call_count_incremented; 1943 Label extra_checks_or_miss, call, call_function, call_count_incremented;
1943 int argc = arg_count();
1944 ParameterCount actual(argc);
1945 1944
1946 // The checks. First, does r1 match the recorded monomorphic target? 1945 // The checks. First, does r1 match the recorded monomorphic target?
1947 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); 1946 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
1948 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); 1947 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
1949 1948
1950 // We don't know that we have a weak cell. We might have a private symbol 1949 // We don't know that we have a weak cell. We might have a private symbol
1951 // or an AllocationSite, but the memory is safe to examine. 1950 // or an AllocationSite, but the memory is safe to examine.
1952 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1951 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1953 // FixedArray. 1952 // FixedArray.
1954 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 1953 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
(...skipping 13 matching lines...) Expand all
1968 // convincing us that we have a monomorphic JSFunction. 1967 // convincing us that we have a monomorphic JSFunction.
1969 __ JumpIfSmi(a1, &extra_checks_or_miss); 1968 __ JumpIfSmi(a1, &extra_checks_or_miss);
1970 1969
1971 __ bind(&call_function); 1970 __ bind(&call_function);
1972 1971
1973 // Increment the call count for monomorphic function calls. 1972 // Increment the call count for monomorphic function calls.
1974 IncrementCallCount(masm, a2, a3); 1973 IncrementCallCount(masm, a2, a3);
1975 1974
1976 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1975 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1977 tail_call_mode()), 1976 tail_call_mode()),
1978 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), 1977 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg));
1979 USE_DELAY_SLOT);
1980 __ li(a0, Operand(argc)); // In delay slot.
1981 1978
1982 __ bind(&extra_checks_or_miss); 1979 __ bind(&extra_checks_or_miss);
1983 Label uninitialized, miss, not_allocation_site; 1980 Label uninitialized, miss, not_allocation_site;
1984 1981
1985 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); 1982 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
1986 __ Branch(&call, eq, t0, Operand(at)); 1983 __ Branch(&call, eq, t0, Operand(at));
1987 1984
1988 // Verify that t0 contains an AllocationSite 1985 // Verify that t0 contains an AllocationSite
1989 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset)); 1986 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
1990 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 1987 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
(...skipping 20 matching lines...) Expand all
2011 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); 2008 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
2012 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); 2009 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2013 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); 2010 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
2014 2011
2015 __ bind(&call); 2012 __ bind(&call);
2016 IncrementCallCount(masm, a2, a3); 2013 IncrementCallCount(masm, a2, a3);
2017 2014
2018 __ bind(&call_count_incremented); 2015 __ bind(&call_count_incremented);
2019 2016
2020 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 2017 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
2021 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), 2018 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg));
2022 USE_DELAY_SLOT);
2023 __ li(a0, Operand(argc)); // In delay slot.
2024 2019
2025 __ bind(&uninitialized); 2020 __ bind(&uninitialized);
2026 2021
2027 // We are going monomorphic, provided we actually have a JSFunction. 2022 // We are going monomorphic, provided we actually have a JSFunction.
2028 __ JumpIfSmi(a1, &miss); 2023 __ JumpIfSmi(a1, &miss);
2029 2024
2030 // Goto miss case if we do not have a function. 2025 // Goto miss case if we do not have a function.
2031 __ GetObjectType(a1, t0, t0); 2026 __ GetObjectType(a1, t0, t0);
2032 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); 2027 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE));
2033 2028
2034 // Make sure the function is not the Array() function, which requires special 2029 // Make sure the function is not the Array() function, which requires special
2035 // behavior on MISS. 2030 // behavior on MISS.
2036 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0); 2031 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0);
2037 __ Branch(&miss, eq, a1, Operand(t0)); 2032 __ Branch(&miss, eq, a1, Operand(t0));
2038 2033
2039 // Make sure the function belongs to the same native context. 2034 // Make sure the function belongs to the same native context.
2040 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); 2035 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
2041 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); 2036 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
2042 __ lw(t1, NativeContextMemOperand()); 2037 __ lw(t1, NativeContextMemOperand());
2043 __ Branch(&miss, ne, t0, Operand(t1)); 2038 __ Branch(&miss, ne, t0, Operand(t1));
2044 2039
2045 // Store the function. Use a stub since we need a frame for allocation. 2040 // Store the function. Use a stub since we need a frame for allocation.
2046 // a2 - vector 2041 // a2 - vector
2047 // a3 - slot 2042 // a3 - slot
2048 // a1 - function 2043 // a1 - function
2049 { 2044 {
2050 FrameScope scope(masm, StackFrame::INTERNAL); 2045 FrameScope scope(masm, StackFrame::INTERNAL);
2051 CreateWeakCellStub create_stub(masm->isolate()); 2046 CreateWeakCellStub create_stub(masm->isolate());
2047 __ SmiTag(a0);
2048 __ Push(a0);
2052 __ Push(a2, a3); 2049 __ Push(a2, a3);
2053 __ Push(cp, a1); 2050 __ Push(cp, a1);
2054 __ CallStub(&create_stub); 2051 __ CallStub(&create_stub);
2055 __ Pop(cp, a1); 2052 __ Pop(cp, a1);
2056 __ Pop(a2, a3); 2053 __ Pop(a2, a3);
2054 __ Pop(a0);
2055 __ SmiUntag(a0);
2057 } 2056 }
2058 2057
2059 __ Branch(&call_function); 2058 __ Branch(&call_function);
2060 2059
2061 // We are here because tracing is on or we encountered a MISS case we can't 2060 // We are here because tracing is on or we encountered a MISS case we can't
2062 // handle here. 2061 // handle here.
2063 __ bind(&miss); 2062 __ bind(&miss);
2064 GenerateMiss(masm); 2063 GenerateMiss(masm);
2065 2064
2066 __ Branch(&call_count_incremented); 2065 __ Branch(&call_count_incremented);
2067 } 2066 }
2068 2067
2069 2068
2070 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2069 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2071 FrameScope scope(masm, StackFrame::INTERNAL); 2070 FrameScope scope(masm, StackFrame::INTERNAL);
2072 2071
2072 // Preserve the number of arguments as Smi.
2073 __ SmiTag(a0);
2074 __ Push(a0);
2075
2073 // Push the receiver and the function and feedback info. 2076 // Push the receiver and the function and feedback info.
2074 __ Push(a1, a2, a3); 2077 __ Push(a1, a2, a3);
2075 2078
2076 // Call the entry. 2079 // Call the entry.
2077 __ CallRuntime(Runtime::kCallIC_Miss); 2080 __ CallRuntime(Runtime::kCallIC_Miss);
2078 2081
2079 // Move result to a1 and exit the internal frame. 2082 // Move result to a1 and exit the internal frame.
2080 __ mov(a1, v0); 2083 __ mov(a1, v0);
2084
2085 // Restore number of arguments.
2086 __ Pop(a0);
2087 __ SmiUntag(a0);
2081 } 2088 }
2082 2089
2083 2090
2084 // StringCharCodeAtGenerator. 2091 // StringCharCodeAtGenerator.
2085 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2092 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2086 DCHECK(!t0.is(index_)); 2093 DCHECK(!t0.is(index_));
2087 DCHECK(!t0.is(result_)); 2094 DCHECK(!t0.is(result_));
2088 DCHECK(!t0.is(object_)); 2095 DCHECK(!t0.is(object_));
2089 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2096 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2090 // If the receiver is a smi trigger the non-string case. 2097 // If the receiver is a smi trigger the non-string case.
(...skipping 2984 matching lines...) Expand 10 before | Expand all | Expand 10 after
5075 kStackUnwindSpace, kInvalidStackOffset, 5082 kStackUnwindSpace, kInvalidStackOffset,
5076 return_value_operand, NULL); 5083 return_value_operand, NULL);
5077 } 5084 }
5078 5085
5079 #undef __ 5086 #undef __
5080 5087
5081 } // namespace internal 5088 } // namespace internal
5082 } // namespace v8 5089 } // namespace v8
5083 5090
5084 #endif // V8_TARGET_ARCH_MIPS 5091 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/interface-descriptors.cc ('k') | src/mips/interface-descriptors-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698