| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2356 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, | 2356 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, |
| 2357 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 2357 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 2358 __ pop(edx); | 2358 __ pop(edx); |
| 2359 __ pop(ebx); | 2359 __ pop(ebx); |
| 2360 __ pop(edi); | 2360 __ pop(edi); |
| 2361 | 2361 |
| 2362 __ bind(&done); | 2362 __ bind(&done); |
| 2363 } | 2363 } |
| 2364 | 2364 |
| 2365 | 2365 |
| 2366 static void GenericCallHelper(MacroAssembler* masm, | 2366 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 2367 const CallIC::State& state, | 2367 // ebx : feedback vector |
| 2368 bool wrap_and_call = false) { | 2368 // edx : (only if ebx is not the megamorphic symbol) slot in feedback |
| 2369 // vector (Smi) |
| 2369 // edi : the function to call | 2370 // edi : the function to call |
| 2370 | |
| 2371 // wrap_and_call can only be true if we are compiling a monomorphic method. | |
| 2372 ASSERT(!(wrap_and_call && state.IsGeneric())); | |
| 2373 ASSERT(!wrap_and_call || state.CallAsMethod()); | |
| 2374 Isolate* isolate = masm->isolate(); | 2371 Isolate* isolate = masm->isolate(); |
| 2375 Label slow, non_function, wrap, cont; | 2372 Label slow, non_function, wrap, cont; |
| 2376 | 2373 |
| 2377 if (state.IsGeneric()) { | 2374 if (NeedsChecks()) { |
| 2378 // Check that the function really is a JavaScript function. | 2375 // Check that the function really is a JavaScript function. |
| 2379 __ JumpIfSmi(edi, &non_function); | 2376 __ JumpIfSmi(edi, &non_function); |
| 2380 | 2377 |
| 2381 // Goto slow case if we do not have a function. | 2378 // Goto slow case if we do not have a function. |
| 2382 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); | 2379 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
| 2383 __ j(not_equal, &slow); | 2380 __ j(not_equal, &slow); |
| 2381 |
| 2382 if (RecordCallTarget()) { |
| 2383 GenerateRecordCallTarget(masm); |
| 2384 // Type information was updated. Because we may call Array, which |
| 2385 // expects either undefined or an AllocationSite in ebx we need |
| 2386 // to set ebx to undefined. |
| 2387 __ mov(ebx, Immediate(isolate->factory()->undefined_value())); |
| 2388 } |
| 2384 } | 2389 } |
| 2385 | 2390 |
| 2386 // Fast-case: Just invoke the function. | 2391 // Fast-case: Just invoke the function. |
| 2387 int argc = state.arg_count(); | 2392 ParameterCount actual(argc_); |
| 2388 ParameterCount actual(argc); | |
| 2389 | 2393 |
| 2390 if (state.CallAsMethod()) { | 2394 if (CallAsMethod()) { |
| 2391 if (state.IsGeneric()) { | 2395 if (NeedsChecks()) { |
| 2392 // Do not transform the receiver for strict mode functions. | 2396 // Do not transform the receiver for strict mode functions. |
| 2393 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 2397 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
| 2394 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), | 2398 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), |
| 2395 1 << SharedFunctionInfo::kStrictModeBitWithinByte); | 2399 1 << SharedFunctionInfo::kStrictModeBitWithinByte); |
| 2396 __ j(not_equal, &cont); | 2400 __ j(not_equal, &cont); |
| 2397 | 2401 |
| 2398 // Do not transform the receiver for natives (shared already in ecx). | 2402 // Do not transform the receiver for natives (shared already in ecx). |
| 2399 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), | 2403 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), |
| 2400 1 << SharedFunctionInfo::kNativeBitWithinByte); | 2404 1 << SharedFunctionInfo::kNativeBitWithinByte); |
| 2401 __ j(not_equal, &cont); | 2405 __ j(not_equal, &cont); |
| 2402 } | 2406 } |
| 2403 | 2407 |
| 2404 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) { | 2408 // Load the receiver from the stack. |
| 2405 // Load the receiver from the stack. | 2409 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); |
| 2406 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize)); | |
| 2407 | 2410 |
| 2408 if (state.IsGeneric()) { | 2411 if (NeedsChecks()) { |
| 2409 __ JumpIfSmi(eax, &wrap); | 2412 __ JumpIfSmi(eax, &wrap); |
| 2410 | 2413 |
| 2411 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); | 2414 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
| 2412 __ j(below, &wrap); | 2415 __ j(below, &wrap); |
| 2413 } else { | 2416 } else { |
| 2414 __ jmp(&wrap); | 2417 __ jmp(&wrap); |
| 2415 } | |
| 2416 } | 2418 } |
| 2417 | 2419 |
| 2418 __ bind(&cont); | 2420 __ bind(&cont); |
| 2419 } | 2421 } |
| 2420 | 2422 |
| 2421 if (state.ArgumentsMustMatch()) { | 2423 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 2422 __ InvokeFunction(edi, actual, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2423 } else { | |
| 2424 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2425 } | |
| 2426 | 2424 |
| 2427 if (state.IsGeneric()) { | 2425 if (NeedsChecks()) { |
| 2428 // Slow-case: Non-function called. | 2426 // Slow-case: Non-function called. |
| 2429 __ bind(&slow); | 2427 __ bind(&slow); |
| 2428 if (RecordCallTarget()) { |
| 2429 // If there is a call target cache, mark it megamorphic in the |
| 2430 // non-function case. MegamorphicSentinel is an immortal immovable |
| 2431 // object (megamorphic symbol) so no write barrier is needed. |
| 2432 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
| 2433 FixedArray::kHeaderSize), |
| 2434 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
| 2435 } |
| 2430 // Check for function proxy. | 2436 // Check for function proxy. |
| 2431 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); | 2437 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
| 2432 __ j(not_equal, &non_function); | 2438 __ j(not_equal, &non_function); |
| 2433 __ pop(ecx); | 2439 __ pop(ecx); |
| 2434 __ push(edi); // put proxy as additional argument under return address | 2440 __ push(edi); // put proxy as additional argument under return address |
| 2435 __ push(ecx); | 2441 __ push(ecx); |
| 2436 __ Move(eax, Immediate(argc + 1)); | 2442 __ Move(eax, Immediate(argc_ + 1)); |
| 2437 __ Move(ebx, Immediate(0)); | 2443 __ Move(ebx, Immediate(0)); |
| 2438 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); | 2444 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
| 2439 { | 2445 { |
| 2440 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2446 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
| 2441 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2447 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 2442 } | 2448 } |
| 2443 | 2449 |
| 2444 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 2450 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 2445 // of the original receiver from the call site). | 2451 // of the original receiver from the call site). |
| 2446 __ bind(&non_function); | 2452 __ bind(&non_function); |
| 2447 __ mov(Operand(esp, (argc + 1) * kPointerSize), edi); | 2453 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); |
| 2448 __ Move(eax, Immediate(argc)); | 2454 __ Move(eax, Immediate(argc_)); |
| 2449 __ Move(ebx, Immediate(0)); | 2455 __ Move(ebx, Immediate(0)); |
| 2450 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); | 2456 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
| 2451 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2457 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
| 2452 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2458 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 2453 } | 2459 } |
| 2454 | 2460 |
| 2455 if (state.CallAsMethod()) { | 2461 if (CallAsMethod()) { |
| 2456 __ bind(&wrap); | 2462 __ bind(&wrap); |
| 2457 | |
| 2458 if (!state.IsGeneric() && !wrap_and_call) { | |
| 2459 // Do not transform the receiver for natives (shared already in ecx). | |
| 2460 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | |
| 2461 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), | |
| 2462 1 << SharedFunctionInfo::kNativeBitWithinByte); | |
| 2463 __ j(not_equal, &cont); | |
| 2464 } | |
| 2465 | |
| 2466 // Wrap the receiver and patch it back onto the stack. | 2463 // Wrap the receiver and patch it back onto the stack. |
| 2467 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 2464 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 2468 __ push(edi); | 2465 __ push(edi); |
| 2469 __ push(eax); | 2466 __ push(eax); |
| 2470 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 2467 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 2471 __ pop(edi); | 2468 __ pop(edi); |
| 2472 } | 2469 } |
| 2473 __ mov(Operand(esp, (argc + 1) * kPointerSize), eax); | 2470 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax); |
| 2474 __ jmp(&cont); | 2471 __ jmp(&cont); |
| 2475 } | 2472 } |
| 2476 } | 2473 } |
| 2477 | 2474 |
| 2478 | 2475 |
| 2479 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
| 2480 // edi : the function to call | |
| 2481 | |
| 2482 // GenericCallHelper expresses it's options in terms of CallIC::State. | |
| 2483 CallIC::CallType call_type = CallAsMethod() ? | |
| 2484 CallIC::METHOD : CallIC::FUNCTION; | |
| 2485 | |
| 2486 if (NeedsChecks()) { | |
| 2487 GenericCallHelper(masm, | |
| 2488 CallIC::State::SlowCallState( | |
| 2489 argc_, | |
| 2490 call_type)); | |
| 2491 } else { | |
| 2492 GenericCallHelper(masm, | |
| 2493 CallIC::State::MonomorphicCallState( | |
| 2494 argc_, | |
| 2495 call_type, | |
| 2496 CallIC::ARGUMENTS_COUNT_UNKNOWN, | |
| 2497 SLOPPY), | |
| 2498 true); | |
| 2499 } | |
| 2500 } | |
| 2501 | |
| 2502 | |
| 2503 void CallConstructStub::Generate(MacroAssembler* masm) { | 2476 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 2504 // eax : number of arguments | 2477 // eax : number of arguments |
| 2505 // ebx : feedback vector | 2478 // ebx : feedback vector |
| 2506 // edx : (only if ebx is not the megamorphic symbol) slot in feedback | 2479 // edx : (only if ebx is not the megamorphic symbol) slot in feedback |
| 2507 // vector (Smi) | 2480 // vector (Smi) |
| 2508 // edi : constructor function | 2481 // edi : constructor function |
| 2509 Label slow, non_function_call; | 2482 Label slow, non_function_call; |
| 2510 | 2483 |
| 2511 // Check that function is not a smi. | 2484 // Check that function is not a smi. |
| 2512 __ JumpIfSmi(edi, &non_function_call); | 2485 __ JumpIfSmi(edi, &non_function_call); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2561 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 2534 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
| 2562 __ bind(&do_call); | 2535 __ bind(&do_call); |
| 2563 // Set expected number of arguments to zero (not changing eax). | 2536 // Set expected number of arguments to zero (not changing eax). |
| 2564 __ Move(ebx, Immediate(0)); | 2537 __ Move(ebx, Immediate(0)); |
| 2565 Handle<Code> arguments_adaptor = | 2538 Handle<Code> arguments_adaptor = |
| 2566 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 2539 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 2567 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); | 2540 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); |
| 2568 } | 2541 } |
| 2569 | 2542 |
| 2570 | 2543 |
| 2571 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) { | |
| 2572 GenericCallHelper(masm, | |
| 2573 CallIC::State::MonomorphicCallState( | |
| 2574 state_.arg_count(), | |
| 2575 state_.call_type(), | |
| 2576 state_.argument_check(), | |
| 2577 state_.strict_mode())); | |
| 2578 } | |
| 2579 | |
| 2580 | |
| 2581 void CallICStub::GenerateSlowCall(MacroAssembler* masm) { | |
| 2582 GenericCallHelper(masm, | |
| 2583 CallIC::State::SlowCallState( | |
| 2584 state_.arg_count(), | |
| 2585 state_.call_type())); | |
| 2586 } | |
| 2587 | |
| 2588 | |
| 2589 void CallICStub::Generate(MacroAssembler* masm) { | |
| 2590 // edi - function | |
| 2591 // ebx - vector | |
| 2592 // edx - slot id | |
| 2593 Isolate* isolate = masm->isolate(); | |
| 2594 Label extra_checks_or_miss, slow; | |
| 2595 | |
| 2596 // The checks. First, does edi match the recorded monomorphic target? | |
| 2597 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size, | |
| 2598 FixedArray::kHeaderSize)); | |
| 2599 __ j(not_equal, &extra_checks_or_miss); | |
| 2600 | |
| 2601 GenerateMonomorphicCall(masm); | |
| 2602 | |
| 2603 __ bind(&extra_checks_or_miss); | |
| 2604 if (IsGeneric()) { | |
| 2605 Label miss_uninit; | |
| 2606 | |
| 2607 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, | |
| 2608 FixedArray::kHeaderSize)); | |
| 2609 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); | |
| 2610 __ j(equal, &slow); | |
| 2611 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate))); | |
| 2612 __ j(equal, &miss_uninit); | |
| 2613 // If we get here, go from monomorphic to megamorphic, Don't bother missing, | |
| 2614 // just update. | |
| 2615 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, | |
| 2616 FixedArray::kHeaderSize), | |
| 2617 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); | |
| 2618 __ jmp(&slow); | |
| 2619 | |
| 2620 __ bind(&miss_uninit); | |
| 2621 } | |
| 2622 | |
| 2623 GenerateMiss(masm); | |
| 2624 | |
| 2625 // the slow case | |
| 2626 __ bind(&slow); | |
| 2627 GenerateSlowCall(masm); | |
| 2628 } | |
| 2629 | |
| 2630 | |
| 2631 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
| 2632 // Get the receiver of the function from the stack; 1 ~ return address. | |
| 2633 __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize)); | |
| 2634 | |
| 2635 { | |
| 2636 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2637 | |
| 2638 // Push the receiver and the function and feedback info. | |
| 2639 __ push(ecx); | |
| 2640 __ push(edi); | |
| 2641 __ push(ebx); | |
| 2642 __ push(edx); | |
| 2643 | |
| 2644 // Call the entry. | |
| 2645 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), | |
| 2646 masm->isolate()); | |
| 2647 __ CallExternalReference(miss, 4); | |
| 2648 | |
| 2649 // Move result to edi and exit the internal frame. | |
| 2650 __ mov(edi, eax); | |
| 2651 } | |
| 2652 } | |
| 2653 | |
| 2654 | |
| 2655 bool CEntryStub::NeedsImmovableCode() { | 2544 bool CEntryStub::NeedsImmovableCode() { |
| 2656 return false; | 2545 return false; |
| 2657 } | 2546 } |
| 2658 | 2547 |
| 2659 | 2548 |
| 2660 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2549 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 2661 CEntryStub::GenerateAheadOfTime(isolate); | 2550 CEntryStub::GenerateAheadOfTime(isolate); |
| 2662 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2551 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 2663 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2552 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 2664 // It is important that the store buffer overflow stubs are generated first. | 2553 // It is important that the store buffer overflow stubs are generated first. |
| (...skipping 2808 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5473 Operand(ebp, 7 * kPointerSize), | 5362 Operand(ebp, 7 * kPointerSize), |
| 5474 NULL); | 5363 NULL); |
| 5475 } | 5364 } |
| 5476 | 5365 |
| 5477 | 5366 |
| 5478 #undef __ | 5367 #undef __ |
| 5479 | 5368 |
| 5480 } } // namespace v8::internal | 5369 } } // namespace v8::internal |
| 5481 | 5370 |
| 5482 #endif // V8_TARGET_ARCH_IA32 | 5371 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |