Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 866493003: Retry "Use a WeakCell in the CallIC type vector." (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: REBASE. Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/type-info.cc ('k') | src/x64/interface-descriptors-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_X64 7 #if V8_TARGET_ARCH_X64
8 8
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 2097 matching lines...) Expand 10 before | Expand all | Expand 10 after
2108 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); 2108 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
2109 Label extra_checks_or_miss, slow_start; 2109 Label extra_checks_or_miss, slow_start;
2110 Label slow, non_function, wrap, cont; 2110 Label slow, non_function, wrap, cont;
2111 Label have_js_function; 2111 Label have_js_function;
2112 int argc = arg_count(); 2112 int argc = arg_count();
2113 StackArgumentsAccessor args(rsp, argc); 2113 StackArgumentsAccessor args(rsp, argc);
2114 ParameterCount actual(argc); 2114 ParameterCount actual(argc);
2115 2115
2116 // The checks. First, does rdi match the recorded monomorphic target? 2116 // The checks. First, does rdi match the recorded monomorphic target?
2117 __ SmiToInteger32(rdx, rdx); 2117 __ SmiToInteger32(rdx, rdx);
2118 __ cmpp(rdi, FieldOperand(rbx, rdx, times_pointer_size, 2118 __ movp(rcx,
2119 FixedArray::kHeaderSize)); 2119 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
2120
2121 // We don't know that we have a weak cell. We might have a private symbol
2122 // or an AllocationSite, but the memory is safe to examine.
2123 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2124 // FixedArray.
2125 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2126 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2127 // computed, meaning that it can't appear to be a pointer. If the low bit is
2128 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2129 // to be a pointer.
2130 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2131 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2132 WeakCell::kValueOffset &&
2133 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2134
2135 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
2120 __ j(not_equal, &extra_checks_or_miss); 2136 __ j(not_equal, &extra_checks_or_miss);
2121 2137
2138 // The compare above could have been a SMI/SMI comparison. Guard against this
2139 // convincing us that we have a monomorphic JSFunction.
2140 __ JumpIfSmi(rdi, &extra_checks_or_miss);
2141
2122 __ bind(&have_js_function); 2142 __ bind(&have_js_function);
2123 if (CallAsMethod()) { 2143 if (CallAsMethod()) {
2124 EmitContinueIfStrictOrNative(masm, &cont); 2144 EmitContinueIfStrictOrNative(masm, &cont);
2125 2145
2126 // Load the receiver from the stack. 2146 // Load the receiver from the stack.
2127 __ movp(rax, args.GetReceiverOperand()); 2147 __ movp(rax, args.GetReceiverOperand());
2128 2148
2129 __ JumpIfSmi(rax, &wrap); 2149 __ JumpIfSmi(rax, &wrap);
2130 2150
2131 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2151 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2132 __ j(below, &wrap); 2152 __ j(below, &wrap);
2133 2153
2134 __ bind(&cont); 2154 __ bind(&cont);
2135 } 2155 }
2136 2156
2137 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); 2157 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2138 2158
2139 __ bind(&slow); 2159 __ bind(&slow);
2140 EmitSlowCase(isolate, masm, &args, argc, &non_function); 2160 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2141 2161
2142 if (CallAsMethod()) { 2162 if (CallAsMethod()) {
2143 __ bind(&wrap); 2163 __ bind(&wrap);
2144 EmitWrapCase(masm, &args, &cont); 2164 EmitWrapCase(masm, &args, &cont);
2145 } 2165 }
2146 2166
2147 __ bind(&extra_checks_or_miss); 2167 __ bind(&extra_checks_or_miss);
2148 Label uninitialized, miss; 2168 Label uninitialized, miss;
2149 2169
2150 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2151 FixedArray::kHeaderSize));
2152 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); 2170 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
2153 __ j(equal, &slow_start); 2171 __ j(equal, &slow_start);
2154 2172
2155 // The following cases attempt to handle MISS cases without going to the 2173 // The following cases attempt to handle MISS cases without going to the
2156 // runtime. 2174 // runtime.
2157 if (FLAG_trace_ic) { 2175 if (FLAG_trace_ic) {
2158 __ jmp(&miss); 2176 __ jmp(&miss);
2159 } 2177 }
2160 2178
2161 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); 2179 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
(...skipping 22 matching lines...) Expand all
2184 2202
2185 // Make sure the function is not the Array() function, which requires special 2203 // Make sure the function is not the Array() function, which requires special
2186 // behavior on MISS. 2204 // behavior on MISS.
2187 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); 2205 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2188 __ cmpp(rdi, rcx); 2206 __ cmpp(rdi, rcx);
2189 __ j(equal, &miss); 2207 __ j(equal, &miss);
2190 2208
2191 // Update stats. 2209 // Update stats.
2192 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); 2210 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1));
2193 2211
2194 // Store the function. 2212 // Store the function. Use a stub since we need a frame for allocation.
2195 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2213 // rbx - vector
2196 rdi); 2214 // rdx - slot (needs to be in smi form)
2215 // rdi - function
2216 {
2217 FrameScope scope(masm, StackFrame::INTERNAL);
2218 CreateWeakCellStub create_stub(isolate);
2197 2219
2198 // Update the write barrier. 2220 __ Integer32ToSmi(rdx, rdx);
2199 __ movp(rax, rdi); 2221 __ Push(rdi);
2200 __ RecordWriteArray(rbx, rax, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, 2222 __ CallStub(&create_stub);
2201 OMIT_SMI_CHECK); 2223 __ Pop(rdi);
2224 }
2225
2202 __ jmp(&have_js_function); 2226 __ jmp(&have_js_function);
2203 2227
2204 // We are here because tracing is on or we encountered a MISS case we can't 2228 // We are here because tracing is on or we encountered a MISS case we can't
2205 // handle here. 2229 // handle here.
2206 __ bind(&miss); 2230 __ bind(&miss);
2207 GenerateMiss(masm); 2231 GenerateMiss(masm);
2208 2232
2209 // the slow case 2233 // the slow case
2210 __ bind(&slow_start); 2234 __ bind(&slow_start);
2211 // Check that function is not a smi. 2235 // Check that function is not a smi.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2253 } 2277 }
2254 2278
2255 2279
2256 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2280 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2257 CEntryStub::GenerateAheadOfTime(isolate); 2281 CEntryStub::GenerateAheadOfTime(isolate);
2258 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2282 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2259 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2283 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2260 // It is important that the store buffer overflow stubs are generated first. 2284 // It is important that the store buffer overflow stubs are generated first.
2261 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2285 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2262 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 2286 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2287 CreateWeakCellStub::GenerateAheadOfTime(isolate);
2263 BinaryOpICStub::GenerateAheadOfTime(isolate); 2288 BinaryOpICStub::GenerateAheadOfTime(isolate);
2264 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 2289 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2265 } 2290 }
2266 2291
2267 2292
2268 void CodeStub::GenerateFPStubs(Isolate* isolate) { 2293 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2269 } 2294 }
2270 2295
2271 2296
2272 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 2297 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
(...skipping 2763 matching lines...) Expand 10 before | Expand all | Expand 10 after
5036 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, 5061 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
5037 kStackSpace, nullptr, return_value_operand, NULL); 5062 kStackSpace, nullptr, return_value_operand, NULL);
5038 } 5063 }
5039 5064
5040 5065
5041 #undef __ 5066 #undef __
5042 5067
5043 } } // namespace v8::internal 5068 } } // namespace v8::internal
5044 5069
5045 #endif // V8_TARGET_ARCH_X64 5070 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/type-info.cc ('k') | src/x64/interface-descriptors-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698