Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(52)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 2412453005: [stubs] Refactor the CallICStub to pass the number of arguments. (Closed)
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h" 8 #include "src/api-arguments.h"
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1979 matching lines...) Expand 10 before | Expand all | Expand 10 after
1990 __ Add(feedback_vector, feedback_vector, 1990 __ Add(feedback_vector, feedback_vector,
1991 Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); 1991 Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
1992 __ Add(feedback_vector, feedback_vector, 1992 __ Add(feedback_vector, feedback_vector,
1993 Operand(FixedArray::kHeaderSize + kPointerSize)); 1993 Operand(FixedArray::kHeaderSize + kPointerSize));
1994 __ Ldr(slot, FieldMemOperand(feedback_vector, 0)); 1994 __ Ldr(slot, FieldMemOperand(feedback_vector, 0));
1995 __ Add(slot, slot, Operand(Smi::FromInt(1))); 1995 __ Add(slot, slot, Operand(Smi::FromInt(1)));
1996 __ Str(slot, FieldMemOperand(feedback_vector, 0)); 1996 __ Str(slot, FieldMemOperand(feedback_vector, 0));
1997 } 1997 }
1998 1998
1999 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1999 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
2000 // x0 - number of arguments
2000 // x1 - function 2001 // x1 - function
2001 // x3 - slot id 2002 // x3 - slot id
2002 // x2 - vector 2003 // x2 - vector
2003 // x4 - allocation site (loaded from vector[slot]) 2004 // x4 - allocation site (loaded from vector[slot])
2004 Register function = x1; 2005 Register function = x1;
2005 Register feedback_vector = x2; 2006 Register feedback_vector = x2;
2006 Register index = x3; 2007 Register index = x3;
2007 Register allocation_site = x4; 2008 Register allocation_site = x4;
2008 Register scratch = x5; 2009 Register scratch = x5;
2009 2010
2010 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch); 2011 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
2011 __ Cmp(function, scratch); 2012 __ Cmp(function, scratch);
2012 __ B(ne, miss); 2013 __ B(ne, miss);
2013 2014
2014 // Increment the call count for monomorphic function calls. 2015 // Increment the call count for monomorphic function calls.
2015 IncrementCallCount(masm, feedback_vector, index); 2016 IncrementCallCount(masm, feedback_vector, index);
2016 2017
2017 // Set up arguments for the array constructor stub. 2018 // Set up arguments for the array constructor stub.
2018 Register allocation_site_arg = feedback_vector; 2019 Register allocation_site_arg = feedback_vector;
2019 Register new_target_arg = index; 2020 Register new_target_arg = index;
2020 __ Mov(allocation_site_arg, allocation_site); 2021 __ Mov(allocation_site_arg, allocation_site);
2021 __ Mov(new_target_arg, function); 2022 __ Mov(new_target_arg, function);
2022 __ Mov(x0, Operand(arg_count()));
2023 ArrayConstructorStub stub(masm->isolate()); 2023 ArrayConstructorStub stub(masm->isolate());
2024 __ TailCallStub(&stub); 2024 __ TailCallStub(&stub);
2025 } 2025 }
2026 2026
2027 2027
2028 void CallICStub::Generate(MacroAssembler* masm) { 2028 void CallICStub::Generate(MacroAssembler* masm) {
2029 ASM_LOCATION("CallICStub"); 2029 ASM_LOCATION("CallICStub");
2030 2030
2031 // x0 - number of arguments
2031 // x1 - function 2032 // x1 - function
2032 // x3 - slot id (Smi) 2033 // x3 - slot id (Smi)
2033 // x2 - vector 2034 // x2 - vector
2034 Label extra_checks_or_miss, call, call_function, call_count_incremented; 2035 Label extra_checks_or_miss, call, call_function, call_count_incremented;
2035 int argc = arg_count();
2036 ParameterCount actual(argc);
2037 2036
2038 Register function = x1; 2037 Register function = x1;
2039 Register feedback_vector = x2; 2038 Register feedback_vector = x2;
2040 Register index = x3; 2039 Register index = x3;
2041 2040
2042 // The checks. First, does x1 match the recorded monomorphic target? 2041 // The checks. First, does x1 match the recorded monomorphic target?
2043 __ Add(x4, feedback_vector, 2042 __ Add(x4, feedback_vector,
2044 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2043 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2045 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); 2044 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
2046 2045
(...skipping 17 matching lines...) Expand all
2064 2063
2065 // The compare above could have been a SMI/SMI comparison. Guard against this 2064 // The compare above could have been a SMI/SMI comparison. Guard against this
2066 // convincing us that we have a monomorphic JSFunction. 2065 // convincing us that we have a monomorphic JSFunction.
2067 __ JumpIfSmi(function, &extra_checks_or_miss); 2066 __ JumpIfSmi(function, &extra_checks_or_miss);
2068 2067
2069 __ Bind(&call_function); 2068 __ Bind(&call_function);
2070 2069
2071 // Increment the call count for monomorphic function calls. 2070 // Increment the call count for monomorphic function calls.
2072 IncrementCallCount(masm, feedback_vector, index); 2071 IncrementCallCount(masm, feedback_vector, index);
2073 2072
2074 __ Mov(x0, argc);
2075 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 2073 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2076 tail_call_mode()), 2074 tail_call_mode()),
2077 RelocInfo::CODE_TARGET); 2075 RelocInfo::CODE_TARGET);
2078 2076
2079 __ bind(&extra_checks_or_miss); 2077 __ bind(&extra_checks_or_miss);
2080 Label uninitialized, miss, not_allocation_site; 2078 Label uninitialized, miss, not_allocation_site;
2081 2079
2082 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call); 2080 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call);
2083 2081
2084 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset)); 2082 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
(...skipping 20 matching lines...) Expand all
2105 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2103 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2106 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex); 2104 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex);
2107 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize)); 2105 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
2108 2106
2109 __ Bind(&call); 2107 __ Bind(&call);
2110 2108
2111 // Increment the call count for megamorphic function calls. 2109 // Increment the call count for megamorphic function calls.
2112 IncrementCallCount(masm, feedback_vector, index); 2110 IncrementCallCount(masm, feedback_vector, index);
2113 2111
2114 __ Bind(&call_count_incremented); 2112 __ Bind(&call_count_incremented);
2115 __ Mov(x0, argc);
2116 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 2113 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
2117 RelocInfo::CODE_TARGET); 2114 RelocInfo::CODE_TARGET);
2118 2115
2119 __ bind(&uninitialized); 2116 __ bind(&uninitialized);
2120 2117
2121 // We are going monomorphic, provided we actually have a JSFunction. 2118 // We are going monomorphic, provided we actually have a JSFunction.
2122 __ JumpIfSmi(function, &miss); 2119 __ JumpIfSmi(function, &miss);
2123 2120
2124 // Goto miss case if we do not have a function. 2121 // Goto miss case if we do not have a function.
2125 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); 2122 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss);
2126 2123
2127 // Make sure the function is not the Array() function, which requires special 2124 // Make sure the function is not the Array() function, which requires special
2128 // behavior on MISS. 2125 // behavior on MISS.
2129 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5); 2126 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5);
2130 __ Cmp(function, x5); 2127 __ Cmp(function, x5);
2131 __ B(eq, &miss); 2128 __ B(eq, &miss);
2132 2129
2133 // Make sure the function belongs to the same native context. 2130 // Make sure the function belongs to the same native context.
2134 __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset)); 2131 __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset));
2135 __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX)); 2132 __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX));
2136 __ Ldr(x5, NativeContextMemOperand()); 2133 __ Ldr(x5, NativeContextMemOperand());
2137 __ Cmp(x4, x5); 2134 __ Cmp(x4, x5);
2138 __ B(ne, &miss); 2135 __ B(ne, &miss);
2139 2136
2140 // Store the function. Use a stub since we need a frame for allocation. 2137 // Store the function. Use a stub since we need a frame for allocation.
2141 // x2 - vector 2138 // x2 - vector
2142 // x3 - slot 2139 // x3 - slot
2143 // x1 - function 2140 // x1 - function
2141 // x0 - number of arguments
2144 { 2142 {
2145 FrameScope scope(masm, StackFrame::INTERNAL); 2143 FrameScope scope(masm, StackFrame::INTERNAL);
2146 CreateWeakCellStub create_stub(masm->isolate()); 2144 CreateWeakCellStub create_stub(masm->isolate());
2145 __ SmiTag(x0);
2146 __ Push(x0);
2147 __ Push(feedback_vector, index); 2147 __ Push(feedback_vector, index);
2148 2148
2149 __ Push(cp, function); 2149 __ Push(cp, function);
2150 __ CallStub(&create_stub); 2150 __ CallStub(&create_stub);
2151 __ Pop(cp, function); 2151 __ Pop(cp, function);
2152 2152
2153 __ Pop(feedback_vector, index); 2153 __ Pop(feedback_vector, index);
2154 __ Pop(x0);
2155 __ SmiUntag(x0);
2154 } 2156 }
2155 2157
2156 __ B(&call_function); 2158 __ B(&call_function);
2157 2159
2158 // We are here because tracing is on or we encountered a MISS case we can't 2160 // We are here because tracing is on or we encountered a MISS case we can't
2159 // handle here. 2161 // handle here.
2160 __ bind(&miss); 2162 __ bind(&miss);
2161 GenerateMiss(masm); 2163 GenerateMiss(masm);
2162 2164
2163 // The runtime increments the call count in the vector for us. 2165 // The runtime increments the call count in the vector for us.
2164 __ B(&call_count_incremented); 2166 __ B(&call_count_incremented);
2165 } 2167 }
2166 2168
2167 2169
2168 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2170 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2169 ASM_LOCATION("CallICStub[Miss]"); 2171 ASM_LOCATION("CallICStub[Miss]");
2170 2172
2171 FrameScope scope(masm, StackFrame::INTERNAL); 2173 FrameScope scope(masm, StackFrame::INTERNAL);
2172 2174
2175 // Preserve the number of arguments as Smi.
2176 __ SmiTag(x0);
2177
2173 // Push the receiver and the function and feedback info. 2178 // Push the receiver and the function and feedback info.
2174 __ Push(x1, x2, x3); 2179 __ Push(x0, x1, x2, x3);
2175 2180
2176 // Call the entry. 2181 // Call the entry.
2177 __ CallRuntime(Runtime::kCallIC_Miss); 2182 __ CallRuntime(Runtime::kCallIC_Miss);
2178 2183
2179 // Move result to edi and exit the internal frame. 2184 // Move result to edi and exit the internal frame.
2180 __ Mov(x1, x0); 2185 __ Mov(x1, x0);
2186
2187 // Restore number of arguments.
2188 __ Pop(x0);
2189 __ SmiUntag(x0);
2181 } 2190 }
2182 2191
2183 2192
2184 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2193 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2185 // If the receiver is a smi trigger the non-string case. 2194 // If the receiver is a smi trigger the non-string case.
2186 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2195 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2187 __ JumpIfSmi(object_, receiver_not_string_); 2196 __ JumpIfSmi(object_, receiver_not_string_);
2188 2197
2189 // Fetch the instance type of the receiver into result register. 2198 // Fetch the instance type of the receiver into result register.
2190 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 2199 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
(...skipping 3028 matching lines...) Expand 10 before | Expand all | Expand 10 after
5219 kStackUnwindSpace, NULL, spill_offset, 5228 kStackUnwindSpace, NULL, spill_offset,
5220 return_value_operand, NULL); 5229 return_value_operand, NULL);
5221 } 5230 }
5222 5231
5223 #undef __ 5232 #undef __
5224 5233
5225 } // namespace internal 5234 } // namespace internal
5226 } // namespace v8 5235 } // namespace v8
5227 5236
5228 #endif // V8_TARGET_ARCH_ARM64 5237 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698