Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(267)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 247373002: CallICStub with a "never patch" approach until customization. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE + code size multiplier. Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #if V8_TARGET_ARCH_IA32 7 #if V8_TARGET_ARCH_IA32
8 8
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 2285 matching lines...) Expand 10 before | Expand all | Expand 10 after
2296 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, 2296 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
2297 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2297 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2298 __ pop(edx); 2298 __ pop(edx);
2299 __ pop(ebx); 2299 __ pop(ebx);
2300 __ pop(edi); 2300 __ pop(edi);
2301 2301
2302 __ bind(&done); 2302 __ bind(&done);
2303 } 2303 }
2304 2304
2305 2305
2306 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2307 // Do not transform the receiver for strict mode functions.
2308 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2309 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
2310 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2311 __ j(not_equal, cont);
2312
2313 // Do not transform the receiver for natives (shared already in ecx).
2314 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2315 1 << SharedFunctionInfo::kNativeBitWithinByte);
2316 __ j(not_equal, cont);
2317 }
2318
2319
2320 static void EmitSlowCase(Isolate* isolate,
2321 MacroAssembler* masm,
2322 int argc,
2323 Label* non_function) {
2324 // Check for function proxy.
2325 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2326 __ j(not_equal, non_function);
2327 __ pop(ecx);
2328 __ push(edi); // put proxy as additional argument under return address
2329 __ push(ecx);
2330 __ Move(eax, Immediate(argc + 1));
2331 __ Move(ebx, Immediate(0));
2332 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2333 {
2334 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2335 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2336 }
2337
2338 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2339 // of the original receiver from the call site).
2340 __ bind(non_function);
2341 __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
2342 __ Move(eax, Immediate(argc));
2343 __ Move(ebx, Immediate(0));
2344 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2345 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2346 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2347 }
2348
2349
2350 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
2351 // Wrap the receiver and patch it back onto the stack.
2352 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2353 __ push(edi);
2354 __ push(eax);
2355 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2356 __ pop(edi);
2357 }
2358 __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
2359 __ jmp(cont);
2360 }
2361
2362
2306 void CallFunctionStub::Generate(MacroAssembler* masm) { 2363 void CallFunctionStub::Generate(MacroAssembler* masm) {
2307 // ebx : feedback vector
2308 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2309 // vector (Smi)
2310 // edi : the function to call 2364 // edi : the function to call
2311 Label slow, non_function, wrap, cont; 2365 Label slow, non_function, wrap, cont;
2312 2366
2313 if (NeedsChecks()) { 2367 if (NeedsChecks()) {
2314 // Check that the function really is a JavaScript function. 2368 // Check that the function really is a JavaScript function.
2315 __ JumpIfSmi(edi, &non_function); 2369 __ JumpIfSmi(edi, &non_function);
2316 2370
2317 // Goto slow case if we do not have a function. 2371 // Goto slow case if we do not have a function.
2318 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2372 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2319 __ j(not_equal, &slow); 2373 __ j(not_equal, &slow);
2320
2321 if (RecordCallTarget()) {
2322 GenerateRecordCallTarget(masm);
2323 // Type information was updated. Because we may call Array, which
2324 // expects either undefined or an AllocationSite in ebx we need
2325 // to set ebx to undefined.
2326 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
2327 }
2328 } 2374 }
2329 2375
2330 // Fast-case: Just invoke the function. 2376 // Fast-case: Just invoke the function.
2331 ParameterCount actual(argc_); 2377 ParameterCount actual(argc_);
2332 2378
2333 if (CallAsMethod()) { 2379 if (CallAsMethod()) {
2334 if (NeedsChecks()) { 2380 if (NeedsChecks()) {
2335 // Do not transform the receiver for strict mode functions. 2381 EmitContinueIfStrictOrNative(masm, &cont);
2336 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2337 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
2338 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2339 __ j(not_equal, &cont);
2340
2341 // Do not transform the receiver for natives (shared already in ecx).
2342 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2343 1 << SharedFunctionInfo::kNativeBitWithinByte);
2344 __ j(not_equal, &cont);
2345 } 2382 }
2346 2383
2347 // Load the receiver from the stack. 2384 // Load the receiver from the stack.
2348 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); 2385 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
2349 2386
2350 if (NeedsChecks()) { 2387 if (NeedsChecks()) {
2351 __ JumpIfSmi(eax, &wrap); 2388 __ JumpIfSmi(eax, &wrap);
2352 2389
2353 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); 2390 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2354 __ j(below, &wrap); 2391 __ j(below, &wrap);
2355 } else { 2392 } else {
2356 __ jmp(&wrap); 2393 __ jmp(&wrap);
2357 } 2394 }
2358 2395
2359 __ bind(&cont); 2396 __ bind(&cont);
2360 } 2397 }
2361 2398
2362 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); 2399 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2363 2400
2364 if (NeedsChecks()) { 2401 if (NeedsChecks()) {
2365 // Slow-case: Non-function called. 2402 // Slow-case: Non-function called.
2366 __ bind(&slow); 2403 __ bind(&slow);
2367 if (RecordCallTarget()) { 2404 // (non_function is bound in EmitSlowCase)
2368 // If there is a call target cache, mark it megamorphic in the 2405 EmitSlowCase(isolate(), masm, argc_, &non_function);
2369 // non-function case. MegamorphicSentinel is an immortal immovable
2370 // object (megamorphic symbol) so no write barrier is needed.
2371 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2372 FixedArray::kHeaderSize),
2373 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
2374 }
2375 // Check for function proxy.
2376 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2377 __ j(not_equal, &non_function);
2378 __ pop(ecx);
2379 __ push(edi); // put proxy as additional argument under return address
2380 __ push(ecx);
2381 __ Move(eax, Immediate(argc_ + 1));
2382 __ Move(ebx, Immediate(0));
2383 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2384 {
2385 Handle<Code> adaptor =
2386 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2387 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2388 }
2389
2390 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2391 // of the original receiver from the call site).
2392 __ bind(&non_function);
2393 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
2394 __ Move(eax, Immediate(argc_));
2395 __ Move(ebx, Immediate(0));
2396 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2397 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
2398 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2399 } 2406 }
2400 2407
2401 if (CallAsMethod()) { 2408 if (CallAsMethod()) {
2402 __ bind(&wrap); 2409 __ bind(&wrap);
2403 // Wrap the receiver and patch it back onto the stack. 2410 EmitWrapCase(masm, argc_, &cont);
2404 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2405 __ push(edi);
2406 __ push(eax);
2407 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2408 __ pop(edi);
2409 }
2410 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
2411 __ jmp(&cont);
2412 } 2411 }
2413 } 2412 }
2414 2413
2415 2414
2416 void CallConstructStub::Generate(MacroAssembler* masm) { 2415 void CallConstructStub::Generate(MacroAssembler* masm) {
2417 // eax : number of arguments 2416 // eax : number of arguments
2418 // ebx : feedback vector 2417 // ebx : feedback vector
2419 // edx : (only if ebx is not the megamorphic symbol) slot in feedback 2418 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2420 // vector (Smi) 2419 // vector (Smi)
2421 // edi : constructor function 2420 // edi : constructor function
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2474 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2473 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2475 __ bind(&do_call); 2474 __ bind(&do_call);
2476 // Set expected number of arguments to zero (not changing eax). 2475 // Set expected number of arguments to zero (not changing eax).
2477 __ Move(ebx, Immediate(0)); 2476 __ Move(ebx, Immediate(0));
2478 Handle<Code> arguments_adaptor = 2477 Handle<Code> arguments_adaptor =
2479 isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2478 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2480 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); 2479 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2481 } 2480 }
2482 2481
2483 2482
2483 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2484 __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2485 __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2486 __ mov(vector, FieldOperand(vector,
2487 SharedFunctionInfo::kFeedbackVectorOffset));
2488 }
2489
2490
2491 void CallICStub::Generate(MacroAssembler* masm) {
2492 // edi - function
2493 // edx - slot id
2494 Isolate* isolate = masm->isolate();
2495 Label extra_checks_or_miss, slow_start;
2496 Label slow, non_function, wrap, cont;
2497 Label have_js_function;
2498 int argc = state_.arg_count();
2499 ParameterCount actual(argc);
2500
2501 EmitLoadTypeFeedbackVector(masm, ebx);
2502
2503 // The checks. First, does edi match the recorded monomorphic target?
2504 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
2505 FixedArray::kHeaderSize));
2506 __ j(not_equal, &extra_checks_or_miss);
2507
2508 __ bind(&have_js_function);
2509 if (state_.CallAsMethod()) {
2510 EmitContinueIfStrictOrNative(masm, &cont);
2511
2512 // Load the receiver from the stack.
2513 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2514
2515 __ JumpIfSmi(eax, &wrap);
2516
2517 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2518 __ j(below, &wrap);
2519
2520 __ bind(&cont);
2521 }
2522
2523 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2524
2525 __ bind(&slow);
2526 EmitSlowCase(isolate, masm, argc, &non_function);
2527
2528 if (state_.CallAsMethod()) {
2529 __ bind(&wrap);
2530 EmitWrapCase(masm, argc, &cont);
2531 }
2532
2533 __ bind(&extra_checks_or_miss);
2534 Label miss;
2535
2536 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2537 FixedArray::kHeaderSize));
2538 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2539 __ j(equal, &slow_start);
2540 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2541 __ j(equal, &miss);
2542
2543 if (!FLAG_trace_ic) {
2544 // We are going megamorphic, and we don't want to visit the runtime.
2545 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2546 FixedArray::kHeaderSize),
2547 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2548 __ jmp(&slow_start);
2549 }
2550
2551 // We are here because tracing is on or we are going monomorphic.
2552 __ bind(&miss);
2553 GenerateMiss(masm);
2554
2555 // the slow case
2556 __ bind(&slow_start);
2557
2558 // Check that the function really is a JavaScript function.
2559 __ JumpIfSmi(edi, &non_function);
2560
2561 // Goto slow case if we do not have a function.
2562 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2563 __ j(not_equal, &slow);
2564 __ jmp(&have_js_function);
2565
2566 // Unreachable
2567 __ int3();
2568 }
2569
2570
2571 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2572 // Get the receiver of the function from the stack; 1 ~ return address.
2573 __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize));
2574
2575 {
2576 FrameScope scope(masm, StackFrame::INTERNAL);
2577
2578 // Push the receiver and the function and feedback info.
2579 __ push(ecx);
2580 __ push(edi);
2581 __ push(ebx);
2582 __ push(edx);
2583
2584 // Call the entry.
2585 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
2586 masm->isolate());
2587 __ CallExternalReference(miss, 4);
2588
2589 // Move result to edi and exit the internal frame.
2590 __ mov(edi, eax);
2591 }
2592 }
2593
2594
2484 bool CEntryStub::NeedsImmovableCode() { 2595 bool CEntryStub::NeedsImmovableCode() {
2485 return false; 2596 return false;
2486 } 2597 }
2487 2598
2488 2599
2489 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2600 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2490 CEntryStub::GenerateAheadOfTime(isolate); 2601 CEntryStub::GenerateAheadOfTime(isolate);
2491 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2602 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2492 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2603 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2493 // It is important that the store buffer overflow stubs are generated first. 2604 // It is important that the store buffer overflow stubs are generated first.
(...skipping 2529 matching lines...) Expand 10 before | Expand all | Expand 10 after
5023 Operand(ebp, 7 * kPointerSize), 5134 Operand(ebp, 7 * kPointerSize),
5024 NULL); 5135 NULL);
5025 } 5136 }
5026 5137
5027 5138
5028 #undef __ 5139 #undef __
5029 5140
5030 } } // namespace v8::internal 5141 } } // namespace v8::internal
5031 5142
5032 #endif // V8_TARGET_ARCH_IA32 5143 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698