OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 2227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2238 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2238 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2239 const int generic_offset = | 2239 const int generic_offset = |
2240 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2240 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2241 Label extra_checks_or_miss, slow_start; | 2241 Label extra_checks_or_miss, slow_start; |
2242 Label slow, non_function, wrap, cont; | 2242 Label slow, non_function, wrap, cont; |
2243 Label have_js_function; | 2243 Label have_js_function; |
2244 int argc = arg_count(); | 2244 int argc = arg_count(); |
2245 ParameterCount actual(argc); | 2245 ParameterCount actual(argc); |
2246 | 2246 |
2247 // The checks. First, does edi match the recorded monomorphic target? | 2247 // The checks. First, does edi match the recorded monomorphic target? |
2248 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, | 2248 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size, |
2249 FixedArray::kHeaderSize)); | 2249 FixedArray::kHeaderSize)); |
2250 | |
2251 // We don't know that we have a weak cell. We might have a private symbol | |
2252 // or an AllocationSite, but the memory is safe to examine. | |
2253 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
2254 // FixedArray. | |
2255 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
2256 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
2257 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
2258 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
2259 // to be a pointer. | |
2260 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
2261 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
2262 WeakCell::kValueOffset && | |
2263 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
2264 | |
2265 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); | |
2266 __ j(not_equal, &extra_checks_or_miss); | 2250 __ j(not_equal, &extra_checks_or_miss); |
2267 | 2251 |
2268 // The compare above could have been a SMI/SMI comparison. Guard against this | |
2269 // convincing us that we have a monomorphic JSFunction. | |
2270 __ JumpIfSmi(edi, &extra_checks_or_miss); | |
2271 | |
2272 __ bind(&have_js_function); | 2252 __ bind(&have_js_function); |
2273 if (CallAsMethod()) { | 2253 if (CallAsMethod()) { |
2274 EmitContinueIfStrictOrNative(masm, &cont); | 2254 EmitContinueIfStrictOrNative(masm, &cont); |
2275 | 2255 |
2276 // Load the receiver from the stack. | 2256 // Load the receiver from the stack. |
2277 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); | 2257 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); |
2278 | 2258 |
2279 __ JumpIfSmi(eax, &wrap); | 2259 __ JumpIfSmi(eax, &wrap); |
2280 | 2260 |
2281 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); | 2261 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
2282 __ j(below, &wrap); | 2262 __ j(below, &wrap); |
2283 | 2263 |
2284 __ bind(&cont); | 2264 __ bind(&cont); |
2285 } | 2265 } |
2286 | 2266 |
2287 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); | 2267 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); |
2288 | 2268 |
2289 __ bind(&slow); | 2269 __ bind(&slow); |
2290 EmitSlowCase(isolate, masm, argc, &non_function); | 2270 EmitSlowCase(isolate, masm, argc, &non_function); |
2291 | 2271 |
2292 if (CallAsMethod()) { | 2272 if (CallAsMethod()) { |
2293 __ bind(&wrap); | 2273 __ bind(&wrap); |
2294 EmitWrapCase(masm, argc, &cont); | 2274 EmitWrapCase(masm, argc, &cont); |
2295 } | 2275 } |
2296 | 2276 |
2297 __ bind(&extra_checks_or_miss); | 2277 __ bind(&extra_checks_or_miss); |
2298 Label uninitialized, miss; | 2278 Label uninitialized, miss; |
2299 | 2279 |
| 2280 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
| 2281 FixedArray::kHeaderSize)); |
2300 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); | 2282 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); |
2301 __ j(equal, &slow_start); | 2283 __ j(equal, &slow_start); |
2302 | 2284 |
2303 // The following cases attempt to handle MISS cases without going to the | 2285 // The following cases attempt to handle MISS cases without going to the |
2304 // runtime. | 2286 // runtime. |
2305 if (FLAG_trace_ic) { | 2287 if (FLAG_trace_ic) { |
2306 __ jmp(&miss); | 2288 __ jmp(&miss); |
2307 } | 2289 } |
2308 | 2290 |
2309 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); | 2291 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); |
(...skipping 23 matching lines...) Expand all Loading... |
2333 | 2315 |
2334 // Make sure the function is not the Array() function, which requires special | 2316 // Make sure the function is not the Array() function, which requires special |
2335 // behavior on MISS. | 2317 // behavior on MISS. |
2336 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); | 2318 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |
2337 __ cmp(edi, ecx); | 2319 __ cmp(edi, ecx); |
2338 __ j(equal, &miss); | 2320 __ j(equal, &miss); |
2339 | 2321 |
2340 // Update stats. | 2322 // Update stats. |
2341 __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); | 2323 __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); |
2342 | 2324 |
2343 // Store the function. Use a stub since we need a frame for allocation. | 2325 // Store the function. |
2344 // ebx - vector | 2326 __ mov( |
2345 // edx - slot | 2327 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), |
2346 // edi - function | 2328 edi); |
2347 { | |
2348 FrameScope scope(masm, StackFrame::INTERNAL); | |
2349 CreateWeakCellStub create_stub(isolate); | |
2350 __ push(edi); | |
2351 __ CallStub(&create_stub); | |
2352 __ pop(edi); | |
2353 } | |
2354 | 2329 |
| 2330 // Update the write barrier. |
| 2331 __ mov(eax, edi); |
| 2332 __ RecordWriteArray(ebx, eax, edx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, |
| 2333 OMIT_SMI_CHECK); |
2355 __ jmp(&have_js_function); | 2334 __ jmp(&have_js_function); |
2356 | 2335 |
2357 // We are here because tracing is on or we encountered a MISS case we can't | 2336 // We are here because tracing is on or we encountered a MISS case we can't |
2358 // handle here. | 2337 // handle here. |
2359 __ bind(&miss); | 2338 __ bind(&miss); |
2360 GenerateMiss(masm); | 2339 GenerateMiss(masm); |
2361 | 2340 |
2362 // the slow case | 2341 // the slow case |
2363 __ bind(&slow_start); | 2342 __ bind(&slow_start); |
2364 | 2343 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2407 } | 2386 } |
2408 | 2387 |
2409 | 2388 |
2410 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2389 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2411 CEntryStub::GenerateAheadOfTime(isolate); | 2390 CEntryStub::GenerateAheadOfTime(isolate); |
2412 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2391 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2413 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2392 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2414 // It is important that the store buffer overflow stubs are generated first. | 2393 // It is important that the store buffer overflow stubs are generated first. |
2415 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2394 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
2416 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2395 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
2417 CreateWeakCellStub::GenerateAheadOfTime(isolate); | |
2418 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2396 BinaryOpICStub::GenerateAheadOfTime(isolate); |
2419 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2397 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
2420 } | 2398 } |
2421 | 2399 |
2422 | 2400 |
2423 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2401 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
2424 // Generate if not already in cache. | 2402 // Generate if not already in cache. |
2425 CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); | 2403 CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); |
2426 isolate->set_fp_stubs_generated(true); | 2404 isolate->set_fp_stubs_generated(true); |
2427 } | 2405 } |
(...skipping 2675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5103 ApiParameterOperand(2), kStackSpace, nullptr, | 5081 ApiParameterOperand(2), kStackSpace, nullptr, |
5104 Operand(ebp, 7 * kPointerSize), NULL); | 5082 Operand(ebp, 7 * kPointerSize), NULL); |
5105 } | 5083 } |
5106 | 5084 |
5107 | 5085 |
5108 #undef __ | 5086 #undef __ |
5109 | 5087 |
5110 } } // namespace v8::internal | 5088 } } // namespace v8::internal |
5111 | 5089 |
5112 #endif // V8_TARGET_ARCH_IA32 | 5090 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |