Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(369)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 224903005: Reland "create a function call IC" (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comments. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2345 matching lines...) Expand 10 before | Expand all | Expand 10 after
2356 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, 2356 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
2357 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2357 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2358 __ pop(edx); 2358 __ pop(edx);
2359 __ pop(ebx); 2359 __ pop(ebx);
2360 __ pop(edi); 2360 __ pop(edi);
2361 2361
2362 __ bind(&done); 2362 __ bind(&done);
2363 } 2363 }
2364 2364
2365 2365
2366 void CallFunctionStub::Generate(MacroAssembler* masm) { 2366 static void GenericCallHelper(MacroAssembler* masm,
2367 // ebx : feedback vector 2367 const CallIC::State& state,
2368 // edx : (only if ebx is not the megamorphic symbol) slot in feedback 2368 bool wrap_and_call = false) {
2369 // vector (Smi)
2370 // edi : the function to call 2369 // edi : the function to call
2370
2371 // wrap_and_call can only be true if we are compiling a monomorphic method.
2372 ASSERT(!(wrap_and_call && state.IsGeneric()));
2373 ASSERT(!wrap_and_call || state.CallAsMethod());
2371 Isolate* isolate = masm->isolate(); 2374 Isolate* isolate = masm->isolate();
2372 Label slow, non_function, wrap, cont; 2375 Label slow, non_function, wrap, cont;
2373 2376
2374 if (NeedsChecks()) { 2377 if (state.IsGeneric()) {
2375 // Check that the function really is a JavaScript function. 2378 // Check that the function really is a JavaScript function.
2376 __ JumpIfSmi(edi, &non_function); 2379 __ JumpIfSmi(edi, &non_function);
2377 2380
2378 // Goto slow case if we do not have a function. 2381 // Goto slow case if we do not have a function.
2379 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2382 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2380 __ j(not_equal, &slow); 2383 __ j(not_equal, &slow);
2381
2382 if (RecordCallTarget()) {
2383 GenerateRecordCallTarget(masm);
2384 // Type information was updated. Because we may call Array, which
2385 // expects either undefined or an AllocationSite in ebx we need
2386 // to set ebx to undefined.
2387 __ mov(ebx, Immediate(isolate->factory()->undefined_value()));
2388 }
2389 } 2384 }
2390 2385
2391 // Fast-case: Just invoke the function. 2386 // Fast-case: Just invoke the function.
2392 ParameterCount actual(argc_); 2387 int argc = state.arg_count();
2388 ParameterCount actual(argc);
2393 2389
2394 if (CallAsMethod()) { 2390 if (state.CallAsMethod()) {
2395 if (NeedsChecks()) { 2391 if (state.IsGeneric()) {
2396 // Do not transform the receiver for strict mode functions. 2392 // Do not transform the receiver for strict mode functions.
2397 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2393 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2398 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), 2394 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
2399 1 << SharedFunctionInfo::kStrictModeBitWithinByte); 2395 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2400 __ j(not_equal, &cont); 2396 __ j(not_equal, &cont);
2401 2397
2402 // Do not transform the receiver for natives (shared already in ecx). 2398 // Do not transform the receiver for natives (shared already in ecx).
2403 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), 2399 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2404 1 << SharedFunctionInfo::kNativeBitWithinByte); 2400 1 << SharedFunctionInfo::kNativeBitWithinByte);
2405 __ j(not_equal, &cont); 2401 __ j(not_equal, &cont);
2406 } 2402 }
2407 2403
2408 // Load the receiver from the stack. 2404 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) {
2409 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); 2405 // Load the receiver from the stack.
2406 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2410 2407
2411 if (NeedsChecks()) { 2408 if (state.IsGeneric()) {
2412 __ JumpIfSmi(eax, &wrap); 2409 __ JumpIfSmi(eax, &wrap);
2413 2410
2414 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); 2411 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2415 __ j(below, &wrap); 2412 __ j(below, &wrap);
2416 } else { 2413 } else {
2417 __ jmp(&wrap); 2414 __ jmp(&wrap);
2415 }
2418 } 2416 }
2419 2417
2420 __ bind(&cont); 2418 __ bind(&cont);
2421 } 2419 }
2422 2420
2423 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); 2421 if (state.ArgumentsMustMatch()) {
2422 __ InvokeFunction(edi, actual, actual, JUMP_FUNCTION, NullCallWrapper());
2423 } else {
2424 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2425 }
2424 2426
2425 if (NeedsChecks()) { 2427 if (state.IsGeneric()) {
2426 // Slow-case: Non-function called. 2428 // Slow-case: Non-function called.
2427 __ bind(&slow); 2429 __ bind(&slow);
2428 if (RecordCallTarget()) {
2429 // If there is a call target cache, mark it megamorphic in the
2430 // non-function case. MegamorphicSentinel is an immortal immovable
2431 // object (megamorphic symbol) so no write barrier is needed.
2432 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2433 FixedArray::kHeaderSize),
2434 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2435 }
2436 // Check for function proxy. 2430 // Check for function proxy.
2437 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); 2431 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2438 __ j(not_equal, &non_function); 2432 __ j(not_equal, &non_function);
2439 __ pop(ecx); 2433 __ pop(ecx);
2440 __ push(edi); // put proxy as additional argument under return address 2434 __ push(edi); // put proxy as additional argument under return address
2441 __ push(ecx); 2435 __ push(ecx);
2442 __ Move(eax, Immediate(argc_ + 1)); 2436 __ Move(eax, Immediate(argc + 1));
2443 __ Move(ebx, Immediate(0)); 2437 __ Move(ebx, Immediate(0));
2444 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); 2438 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2445 { 2439 {
2446 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); 2440 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2447 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2441 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2448 } 2442 }
2449 2443
2450 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2444 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2451 // of the original receiver from the call site). 2445 // of the original receiver from the call site).
2452 __ bind(&non_function); 2446 __ bind(&non_function);
2453 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); 2447 __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
2454 __ Move(eax, Immediate(argc_)); 2448 __ Move(eax, Immediate(argc));
2455 __ Move(ebx, Immediate(0)); 2449 __ Move(ebx, Immediate(0));
2456 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); 2450 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2457 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); 2451 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2458 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2452 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2459 } 2453 }
2460 2454
2461 if (CallAsMethod()) { 2455 if (state.CallAsMethod()) {
2462 __ bind(&wrap); 2456 __ bind(&wrap);
2457
2458 if (!state.IsGeneric() && !wrap_and_call) {
2459 // Do not transform the receiver for natives (shared already in ecx).
2460 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2461 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2462 1 << SharedFunctionInfo::kNativeBitWithinByte);
2463 __ j(not_equal, &cont);
2464 }
2465
2463 // Wrap the receiver and patch it back onto the stack. 2466 // Wrap the receiver and patch it back onto the stack.
2464 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2467 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2465 __ push(edi); 2468 __ push(edi);
2466 __ push(eax); 2469 __ push(eax);
2467 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2470 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2468 __ pop(edi); 2471 __ pop(edi);
2469 } 2472 }
2470 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax); 2473 __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
2471 __ jmp(&cont); 2474 __ jmp(&cont);
2472 } 2475 }
2473 } 2476 }
2474 2477
2475 2478
2479 void CallFunctionStub::Generate(MacroAssembler* masm) {
2480 // edi : the function to call
2481
2482 // GenericCallHelper expresses it's options in terms of CallIC::State.
2483 CallIC::CallType call_type = CallAsMethod() ?
2484 CallIC::METHOD : CallIC::FUNCTION;
2485
2486 if (NeedsChecks()) {
2487 GenericCallHelper(masm,
2488 CallIC::State::SlowCallState(
2489 argc_,
2490 call_type));
2491 } else {
2492 GenericCallHelper(masm,
2493 CallIC::State::MonomorphicCallState(
2494 argc_,
2495 call_type,
2496 CallIC::ARGUMENTS_COUNT_UNKNOWN,
2497 SLOPPY),
2498 true);
2499 }
2500 }
2501
2502
2476 void CallConstructStub::Generate(MacroAssembler* masm) { 2503 void CallConstructStub::Generate(MacroAssembler* masm) {
2477 // eax : number of arguments 2504 // eax : number of arguments
2478 // ebx : feedback vector 2505 // ebx : feedback vector
2479 // edx : (only if ebx is not the megamorphic symbol) slot in feedback 2506 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2480 // vector (Smi) 2507 // vector (Smi)
2481 // edi : constructor function 2508 // edi : constructor function
2482 Label slow, non_function_call; 2509 Label slow, non_function_call;
2483 2510
2484 // Check that function is not a smi. 2511 // Check that function is not a smi.
2485 __ JumpIfSmi(edi, &non_function_call); 2512 __ JumpIfSmi(edi, &non_function_call);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2534 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2561 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2535 __ bind(&do_call); 2562 __ bind(&do_call);
2536 // Set expected number of arguments to zero (not changing eax). 2563 // Set expected number of arguments to zero (not changing eax).
2537 __ Move(ebx, Immediate(0)); 2564 __ Move(ebx, Immediate(0));
2538 Handle<Code> arguments_adaptor = 2565 Handle<Code> arguments_adaptor =
2539 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2566 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2540 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); 2567 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2541 } 2568 }
2542 2569
2543 2570
2571 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
2572 GenericCallHelper(masm,
2573 CallIC::State::MonomorphicCallState(
2574 state_.arg_count(),
2575 state_.call_type(),
2576 state_.argument_check(),
2577 state_.strict_mode()));
2578 }
2579
2580
2581 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
2582 GenericCallHelper(masm,
2583 CallIC::State::SlowCallState(
2584 state_.arg_count(),
2585 state_.call_type()));
2586 }
2587
2588
2589 void CallICStub::Generate(MacroAssembler* masm) {
2590 // edi - function
2591 // ebx - vector
2592 // edx - slot id
2593 Isolate* isolate = masm->isolate();
2594 Label extra_checks_or_miss, slow;
2595
2596 // The checks. First, does edi match the recorded monomorphic target?
2597 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
2598 FixedArray::kHeaderSize));
2599 __ j(not_equal, &extra_checks_or_miss);
2600
2601 GenerateMonomorphicCall(masm);
2602
2603 __ bind(&extra_checks_or_miss);
2604 if (IsGeneric()) {
2605 Label miss_uninit;
2606
2607 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2608 FixedArray::kHeaderSize));
2609 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2610 __ j(equal, &slow);
2611 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2612 __ j(equal, &miss_uninit);
2613 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
2614 // just update.
2615 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2616 FixedArray::kHeaderSize),
2617 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2618 __ jmp(&slow);
2619
2620 __ bind(&miss_uninit);
2621 }
2622
2623 GenerateMiss(masm);
2624
2625 // the slow case
2626 __ bind(&slow);
2627 GenerateSlowCall(masm);
2628 }
2629
2630
2631 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2632 // Get the receiver of the function from the stack; 1 ~ return address.
2633 __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize));
2634
2635 {
2636 FrameScope scope(masm, StackFrame::INTERNAL);
2637
2638 // Push the receiver and the function and feedback info.
2639 __ push(ecx);
2640 __ push(edi);
2641 __ push(ebx);
2642 __ push(edx);
2643
2644 // Call the entry.
2645 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
2646 masm->isolate());
2647 __ CallExternalReference(miss, 4);
2648
2649 // Move result to edi and exit the internal frame.
2650 __ mov(edi, eax);
2651 }
2652 }
2653
2654
2544 bool CEntryStub::NeedsImmovableCode() { 2655 bool CEntryStub::NeedsImmovableCode() {
2545 return false; 2656 return false;
2546 } 2657 }
2547 2658
2548 2659
2549 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2660 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2550 CEntryStub::GenerateAheadOfTime(isolate); 2661 CEntryStub::GenerateAheadOfTime(isolate);
2551 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2662 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2552 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2663 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2553 // It is important that the store buffer overflow stubs are generated first. 2664 // It is important that the store buffer overflow stubs are generated first.
(...skipping 2808 matching lines...) Expand 10 before | Expand all | Expand 10 after
5362 Operand(ebp, 7 * kPointerSize), 5473 Operand(ebp, 7 * kPointerSize),
5363 NULL); 5474 NULL);
5364 } 5475 }
5365 5476
5366 5477
5367 #undef __ 5478 #undef __
5368 5479
5369 } } // namespace v8::internal 5480 } } // namespace v8::internal
5370 5481
5371 #endif // V8_TARGET_ARCH_IA32 5482 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698