Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 2670843002: [stubs] Also port the CallICStub to CSA. (Closed)
Patch Set: Introduce FullCodeGenerator::IntFromSlot. Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h" 8 #include "src/api-arguments.h"
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1953 matching lines...) Expand 10 before | Expand all | Expand 10 after
1964 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 1964 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1965 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); 1965 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1966 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); 1966 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1967 __ Br(x4); 1967 __ Br(x4);
1968 1968
1969 __ Bind(&non_function); 1969 __ Bind(&non_function);
1970 __ Mov(x3, function); 1970 __ Mov(x3, function);
1971 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1971 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1972 } 1972 }
1973 1973
1974 // Note: feedback_vector and slot are clobbered after the call.
1975 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1976 Register slot) {
1977 __ Add(feedback_vector, feedback_vector,
1978 Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
1979 __ Add(feedback_vector, feedback_vector,
1980 Operand(FixedArray::kHeaderSize + kPointerSize));
1981 __ Ldr(slot, FieldMemOperand(feedback_vector, 0));
1982 __ Add(slot, slot, Operand(Smi::FromInt(1)));
1983 __ Str(slot, FieldMemOperand(feedback_vector, 0));
1984 }
1985
1986 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1987 // x0 - number of arguments
1988 // x1 - function
1989 // x3 - slot id
1990 // x2 - vector
1991 // x4 - allocation site (loaded from vector[slot])
1992 Register function = x1;
1993 Register feedback_vector = x2;
1994 Register index = x3;
1995 Register allocation_site = x4;
1996 Register scratch = x5;
1997
1998 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
1999 __ Cmp(function, scratch);
2000 __ B(ne, miss);
2001
2002 // Increment the call count for monomorphic function calls.
2003 IncrementCallCount(masm, feedback_vector, index);
2004
2005 // Set up arguments for the array constructor stub.
2006 Register allocation_site_arg = feedback_vector;
2007 Register new_target_arg = index;
2008 __ Mov(allocation_site_arg, allocation_site);
2009 __ Mov(new_target_arg, function);
2010 ArrayConstructorStub stub(masm->isolate());
2011 __ TailCallStub(&stub);
2012 }
2013
2014
2015 void CallICStub::Generate(MacroAssembler* masm) {
2016 ASM_LOCATION("CallICStub");
2017
2018 // x0 - number of arguments
2019 // x1 - function
2020 // x3 - slot id (Smi)
2021 // x2 - vector
2022 Label extra_checks_or_miss, call, call_function, call_count_incremented;
2023
2024 Register function = x1;
2025 Register feedback_vector = x2;
2026 Register index = x3;
2027
2028 // The checks. First, does x1 match the recorded monomorphic target?
2029 __ Add(x4, feedback_vector,
2030 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2031 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
2032
2033 // We don't know that we have a weak cell. We might have a private symbol
2034 // or an AllocationSite, but the memory is safe to examine.
2035 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2036 // FixedArray.
2037 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2038 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2039 // computed, meaning that it can't appear to be a pointer. If the low bit is
2040 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2041 // to be a pointer.
2042 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2043 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2044 WeakCell::kValueOffset &&
2045 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2046
2047 __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset));
2048 __ Cmp(x5, function);
2049 __ B(ne, &extra_checks_or_miss);
2050
2051 // The compare above could have been a SMI/SMI comparison. Guard against this
2052 // convincing us that we have a monomorphic JSFunction.
2053 __ JumpIfSmi(function, &extra_checks_or_miss);
2054
2055 __ Bind(&call_function);
2056
2057 // Increment the call count for monomorphic function calls.
2058 IncrementCallCount(masm, feedback_vector, index);
2059
2060 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2061 tail_call_mode()),
2062 RelocInfo::CODE_TARGET);
2063
2064 __ bind(&extra_checks_or_miss);
2065 Label uninitialized, miss, not_allocation_site;
2066
2067 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call);
2068
2069 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
2070 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &not_allocation_site);
2071
2072 HandleArrayCase(masm, &miss);
2073
2074 __ bind(&not_allocation_site);
2075
2076 // The following cases attempt to handle MISS cases without going to the
2077 // runtime.
2078 if (FLAG_trace_ic) {
2079 __ jmp(&miss);
2080 }
2081
2082 // TODO(mvstanton): the code below is effectively disabled. Investigate.
2083 __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss);
2084
2085 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2086 // to handle it here. More complex cases are dealt with in the runtime.
2087 __ AssertNotSmi(x4);
2088 __ JumpIfNotObjectType(x4, x5, x5, JS_FUNCTION_TYPE, &miss);
2089 __ Add(x4, feedback_vector,
2090 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2091 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex);
2092 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
2093
2094 __ Bind(&call);
2095
2096 // Increment the call count for megamorphic function calls.
2097 IncrementCallCount(masm, feedback_vector, index);
2098
2099 __ Bind(&call_count_incremented);
2100 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
2101 RelocInfo::CODE_TARGET);
2102
2103 __ bind(&uninitialized);
2104
2105 // We are going monomorphic, provided we actually have a JSFunction.
2106 __ JumpIfSmi(function, &miss);
2107
2108 // Goto miss case if we do not have a function.
2109 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss);
2110
2111 // Make sure the function is not the Array() function, which requires special
2112 // behavior on MISS.
2113 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5);
2114 __ Cmp(function, x5);
2115 __ B(eq, &miss);
2116
2117 // Make sure the function belongs to the same native context.
2118 __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset));
2119 __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX));
2120 __ Ldr(x5, NativeContextMemOperand());
2121 __ Cmp(x4, x5);
2122 __ B(ne, &miss);
2123
2124 // Store the function. Use a stub since we need a frame for allocation.
2125 // x2 - vector
2126 // x3 - slot
2127 // x1 - function
2128 // x0 - number of arguments
2129 {
2130 FrameScope scope(masm, StackFrame::INTERNAL);
2131 CreateWeakCellStub create_stub(masm->isolate());
2132 __ SmiTag(x0);
2133 __ Push(x0);
2134 __ Push(feedback_vector, index);
2135
2136 __ Push(cp, function);
2137 __ CallStub(&create_stub);
2138 __ Pop(cp, function);
2139
2140 __ Pop(feedback_vector, index);
2141 __ Pop(x0);
2142 __ SmiUntag(x0);
2143 }
2144
2145 __ B(&call_function);
2146
2147 // We are here because tracing is on or we encountered a MISS case we can't
2148 // handle here.
2149 __ bind(&miss);
2150 GenerateMiss(masm);
2151
2152 // The runtime increments the call count in the vector for us.
2153 __ B(&call_count_incremented);
2154 }
2155
2156
2157 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2158 ASM_LOCATION("CallICStub[Miss]");
2159
2160 FrameScope scope(masm, StackFrame::INTERNAL);
2161
2162 // Preserve the number of arguments as Smi.
2163 __ SmiTag(x0);
2164
2165 // Push the receiver and the function and feedback info.
2166 __ Push(x0, x1, x2, x3);
2167
2168 // Call the entry.
2169 __ CallRuntime(Runtime::kCallIC_Miss);
2170
2171 // Move result to edi and exit the internal frame.
2172 __ Mov(x1, x0);
2173
2174 // Restore number of arguments.
2175 __ Pop(x0);
2176 __ SmiUntag(x0);
2177 }
2178
2179
2180 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 1974 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2181 // If the receiver is a smi trigger the non-string case. 1975 // If the receiver is a smi trigger the non-string case.
2182 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 1976 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2183 __ JumpIfSmi(object_, receiver_not_string_); 1977 __ JumpIfSmi(object_, receiver_not_string_);
2184 1978
2185 // Fetch the instance type of the receiver into result register. 1979 // Fetch the instance type of the receiver into result register.
2186 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 1980 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2187 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 1981 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2188 1982
2189 // If the receiver is not a string trigger the non-string case. 1983 // If the receiver is not a string trigger the non-string case.
(...skipping 2282 matching lines...) Expand 10 before | Expand all | Expand 10 after
4472 kStackUnwindSpace, NULL, spill_offset, 4266 kStackUnwindSpace, NULL, spill_offset,
4473 return_value_operand, NULL); 4267 return_value_operand, NULL);
4474 } 4268 }
4475 4269
4476 #undef __ 4270 #undef __
4477 4271
4478 } // namespace internal 4272 } // namespace internal
4479 } // namespace v8 4273 } // namespace v8
4480 4274
4481 #endif // V8_TARGET_ARCH_ARM64 4275 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm/interface-descriptors-arm.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698