OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 2227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2238 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2238 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2239 const int generic_offset = | 2239 const int generic_offset = |
2240 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2240 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2241 Label extra_checks_or_miss, slow_start; | 2241 Label extra_checks_or_miss, slow_start; |
2242 Label slow, non_function, wrap, cont; | 2242 Label slow, non_function, wrap, cont; |
2243 Label have_js_function; | 2243 Label have_js_function; |
2244 int argc = arg_count(); | 2244 int argc = arg_count(); |
2245 ParameterCount actual(argc); | 2245 ParameterCount actual(argc); |
2246 | 2246 |
2247 // The checks. First, does edi match the recorded monomorphic target? | 2247 // The checks. First, does edi match the recorded monomorphic target? |
2248 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size, | 2248 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
2249 FixedArray::kHeaderSize)); | 2249 FixedArray::kHeaderSize)); |
| 2250 |
| 2251 // We don't know that we have a weak cell. We might have a private symbol |
| 2252 // or an AllocationSite, but the memory is safe to examine. |
| 2253 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 2254 // FixedArray. |
| 2255 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 2256 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 2257 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 2258 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 2259 // to be a pointer. |
| 2260 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 2261 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 2262 WeakCell::kValueOffset && |
| 2263 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 2264 |
| 2265 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); |
2250 __ j(not_equal, &extra_checks_or_miss); | 2266 __ j(not_equal, &extra_checks_or_miss); |
2251 | 2267 |
| 2268 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 2269 // convincing us that we have a monomorphic JSFunction. |
| 2270 __ JumpIfSmi(edi, &extra_checks_or_miss); |
| 2271 |
2252 __ bind(&have_js_function); | 2272 __ bind(&have_js_function); |
2253 if (CallAsMethod()) { | 2273 if (CallAsMethod()) { |
2254 EmitContinueIfStrictOrNative(masm, &cont); | 2274 EmitContinueIfStrictOrNative(masm, &cont); |
2255 | 2275 |
2256 // Load the receiver from the stack. | 2276 // Load the receiver from the stack. |
2257 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); | 2277 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); |
2258 | 2278 |
2259 __ JumpIfSmi(eax, &wrap); | 2279 __ JumpIfSmi(eax, &wrap); |
2260 | 2280 |
2261 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); | 2281 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
2262 __ j(below, &wrap); | 2282 __ j(below, &wrap); |
2263 | 2283 |
2264 __ bind(&cont); | 2284 __ bind(&cont); |
2265 } | 2285 } |
2266 | 2286 |
2267 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); | 2287 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); |
2268 | 2288 |
2269 __ bind(&slow); | 2289 __ bind(&slow); |
2270 EmitSlowCase(isolate, masm, argc, &non_function); | 2290 EmitSlowCase(isolate, masm, argc, &non_function); |
2271 | 2291 |
2272 if (CallAsMethod()) { | 2292 if (CallAsMethod()) { |
2273 __ bind(&wrap); | 2293 __ bind(&wrap); |
2274 EmitWrapCase(masm, argc, &cont); | 2294 EmitWrapCase(masm, argc, &cont); |
2275 } | 2295 } |
2276 | 2296 |
2277 __ bind(&extra_checks_or_miss); | 2297 __ bind(&extra_checks_or_miss); |
2278 Label uninitialized, miss; | 2298 Label uninitialized, miss; |
2279 | 2299 |
2280 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, | |
2281 FixedArray::kHeaderSize)); | |
2282 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); | 2300 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); |
2283 __ j(equal, &slow_start); | 2301 __ j(equal, &slow_start); |
2284 | 2302 |
2285 // The following cases attempt to handle MISS cases without going to the | 2303 // The following cases attempt to handle MISS cases without going to the |
2286 // runtime. | 2304 // runtime. |
2287 if (FLAG_trace_ic) { | 2305 if (FLAG_trace_ic) { |
2288 __ jmp(&miss); | 2306 __ jmp(&miss); |
2289 } | 2307 } |
2290 | 2308 |
2291 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); | 2309 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); |
(...skipping 23 matching lines...) Expand all Loading... |
2315 | 2333 |
2316 // Make sure the function is not the Array() function, which requires special | 2334 // Make sure the function is not the Array() function, which requires special |
2317 // behavior on MISS. | 2335 // behavior on MISS. |
2318 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); | 2336 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |
2319 __ cmp(edi, ecx); | 2337 __ cmp(edi, ecx); |
2320 __ j(equal, &miss); | 2338 __ j(equal, &miss); |
2321 | 2339 |
2322 // Update stats. | 2340 // Update stats. |
2323 __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); | 2341 __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); |
2324 | 2342 |
2325 // Store the function. | 2343 // Store the function. Use a stub since we need a frame for allocation. |
2326 __ mov( | 2344 // ebx - vector |
2327 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), | 2345 // edx - slot |
2328 edi); | 2346 // edi - function |
| 2347 { |
| 2348 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2349 CreateWeakCellStub create_stub(isolate); |
| 2350 __ push(edi); |
| 2351 __ CallStub(&create_stub); |
| 2352 __ pop(edi); |
| 2353 } |
2329 | 2354 |
2330 // Update the write barrier. | |
2331 __ mov(eax, edi); | |
2332 __ RecordWriteArray(ebx, eax, edx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
2333 OMIT_SMI_CHECK); | |
2334 __ jmp(&have_js_function); | 2355 __ jmp(&have_js_function); |
2335 | 2356 |
2336 // We are here because tracing is on or we encountered a MISS case we can't | 2357 // We are here because tracing is on or we encountered a MISS case we can't |
2337 // handle here. | 2358 // handle here. |
2338 __ bind(&miss); | 2359 __ bind(&miss); |
2339 GenerateMiss(masm); | 2360 GenerateMiss(masm); |
2340 | 2361 |
2341 // the slow case | 2362 // the slow case |
2342 __ bind(&slow_start); | 2363 __ bind(&slow_start); |
2343 | 2364 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2386 } | 2407 } |
2387 | 2408 |
2388 | 2409 |
2389 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2410 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2390 CEntryStub::GenerateAheadOfTime(isolate); | 2411 CEntryStub::GenerateAheadOfTime(isolate); |
2391 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2412 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2392 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2413 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2393 // It is important that the store buffer overflow stubs are generated first. | 2414 // It is important that the store buffer overflow stubs are generated first. |
2394 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2415 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
2395 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2416 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2417 CreateWeakCellStub::GenerateAheadOfTime(isolate); |
2396 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2418 BinaryOpICStub::GenerateAheadOfTime(isolate); |
2397 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2419 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
2398 } | 2420 } |
2399 | 2421 |
2400 | 2422 |
2401 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2423 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
2402 // Generate if not already in cache. | 2424 // Generate if not already in cache. |
2403 CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); | 2425 CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); |
2404 isolate->set_fp_stubs_generated(true); | 2426 isolate->set_fp_stubs_generated(true); |
2405 } | 2427 } |
(...skipping 2677 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5083 ApiParameterOperand(2), kStackSpace, nullptr, | 5105 ApiParameterOperand(2), kStackSpace, nullptr, |
5084 Operand(ebp, 7 * kPointerSize), NULL); | 5106 Operand(ebp, 7 * kPointerSize), NULL); |
5085 } | 5107 } |
5086 | 5108 |
5087 | 5109 |
5088 #undef __ | 5110 #undef __ |
5089 | 5111 |
5090 } } // namespace v8::internal | 5112 } } // namespace v8::internal |
5091 | 5113 |
5092 #endif // V8_TARGET_ARCH_IA32 | 5114 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |