Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(12)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 224903005: Reland "create a function call IC" (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comments. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2179 matching lines...) Expand 10 before | Expand all | Expand 10 after
2190 __ Pop(rbx); 2190 __ Pop(rbx);
2191 __ Pop(rdi); 2191 __ Pop(rdi);
2192 2192
2193 __ bind(&done); 2193 __ bind(&done);
2194 __ Integer32ToSmi(rdx, rdx); 2194 __ Integer32ToSmi(rdx, rdx);
2195 2195
2196 __ bind(&done_no_smi_convert); 2196 __ bind(&done_no_smi_convert);
2197 } 2197 }
2198 2198
2199 2199
2200 void CallFunctionStub::Generate(MacroAssembler* masm) { 2200 static void GenericCallHelper(MacroAssembler* masm,
2201 // rbx : feedback vector 2201 const CallIC::State& state,
2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2202 bool wrap_and_call = false) {
2203 // vector (Smi)
2204 // rdi : the function to call 2203 // rdi : the function to call
2204
2205 // wrap_and_call can only be true if we are compiling a monomorphic method.
2206 ASSERT(!(wrap_and_call && state.IsGeneric()));
2207 ASSERT(!wrap_and_call || state.CallAsMethod());
2205 Isolate* isolate = masm->isolate(); 2208 Isolate* isolate = masm->isolate();
2206 Label slow, non_function, wrap, cont; 2209 Label slow, non_function, wrap, cont;
2207 StackArgumentsAccessor args(rsp, argc_); 2210 int argc = state.arg_count();
2211 StackArgumentsAccessor args(rsp, argc);
2208 2212
2209 if (NeedsChecks()) { 2213 if (state.IsGeneric()) {
2210 // Check that the function really is a JavaScript function. 2214 // Check that the function really is a JavaScript function.
2211 __ JumpIfSmi(rdi, &non_function); 2215 __ JumpIfSmi(rdi, &non_function);
2212 2216
2213 // Goto slow case if we do not have a function. 2217 // Goto slow case if we do not have a function.
2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2218 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2215 __ j(not_equal, &slow); 2219 __ j(not_equal, &slow);
2216
2217 if (RecordCallTarget()) {
2218 GenerateRecordCallTarget(masm);
2219 // Type information was updated. Because we may call Array, which
2220 // expects either undefined or an AllocationSite in rbx we need
2221 // to set rbx to undefined.
2222 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2223 }
2224 } 2220 }
2225 2221
2226 // Fast-case: Just invoke the function. 2222 // Fast-case: Just invoke the function.
2227 ParameterCount actual(argc_); 2223 ParameterCount actual(argc);
2228 2224
2229 if (CallAsMethod()) { 2225 if (state.CallAsMethod()) {
2230 if (NeedsChecks()) { 2226 if (state.IsGeneric()) {
2231 // Do not transform the receiver for strict mode functions. 2227 // Do not transform the receiver for strict mode functions.
2232 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2228 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2233 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), 2229 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
2234 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 2230 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2235 __ j(not_equal, &cont); 2231 __ j(not_equal, &cont);
2236 2232
2237 // Do not transform the receiver for natives. 2233 // Do not transform the receiver for natives.
2238 // SharedFunctionInfo is already loaded into rcx. 2234 // SharedFunctionInfo is already loaded into rcx.
2239 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2235 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2240 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2236 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2241 __ j(not_equal, &cont); 2237 __ j(not_equal, &cont);
2242 } 2238 }
2243 2239
2240 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) {
2241 // Load the receiver from the stack.
2242 __ movp(rax, args.GetReceiverOperand());
2244 2243
2245 // Load the receiver from the stack. 2244 if (state.IsGeneric()) {
2246 __ movp(rax, args.GetReceiverOperand()); 2245 __ JumpIfSmi(rax, &wrap);
2247 2246
2248 if (NeedsChecks()) { 2247 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2249 __ JumpIfSmi(rax, &wrap); 2248 __ j(below, &wrap);
2250 2249 } else {
2251 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2250 __ jmp(&wrap);
2252 __ j(below, &wrap); 2251 }
2253 } else {
2254 __ jmp(&wrap);
2255 } 2252 }
2256 2253
2257 __ bind(&cont); 2254 __ bind(&cont);
2258 } 2255 }
2259 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2260 2256
2261 if (NeedsChecks()) { 2257 if (state.ArgumentsMustMatch()) {
2258 __ InvokeFunction(rdi, actual, actual, JUMP_FUNCTION, NullCallWrapper());
2259 } else {
2260 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2261 }
2262
2263 if (state.IsGeneric()) {
2262 // Slow-case: Non-function called. 2264 // Slow-case: Non-function called.
2263 __ bind(&slow); 2265 __ bind(&slow);
2264 if (RecordCallTarget()) {
2265 // If there is a call target cache, mark it megamorphic in the
2266 // non-function case. MegamorphicSentinel is an immortal immovable
2267 // object (megamorphic symbol) so no write barrier is needed.
2268 __ SmiToInteger32(rdx, rdx);
2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2270 FixedArray::kHeaderSize),
2271 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2272 __ Integer32ToSmi(rdx, rdx);
2273 }
2274 // Check for function proxy. 2266 // Check for function proxy.
2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2267 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2276 __ j(not_equal, &non_function); 2268 __ j(not_equal, &non_function);
2277 __ PopReturnAddressTo(rcx); 2269 __ PopReturnAddressTo(rcx);
2278 __ Push(rdi); // put proxy as additional argument under return address 2270 __ Push(rdi); // put proxy as additional argument under return address
2279 __ PushReturnAddressFrom(rcx); 2271 __ PushReturnAddressFrom(rcx);
2280 __ Set(rax, argc_ + 1); 2272 __ Set(rax, argc + 1);
2281 __ Set(rbx, 0); 2273 __ Set(rbx, 0);
2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2274 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2283 { 2275 {
2284 Handle<Code> adaptor = 2276 Handle<Code> adaptor =
2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2277 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2286 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2278 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2287 } 2279 }
2288 2280
2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2281 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2290 // of the original receiver from the call site). 2282 // of the original receiver from the call site).
2291 __ bind(&non_function); 2283 __ bind(&non_function);
2292 __ movp(args.GetReceiverOperand(), rdi); 2284 __ movp(args.GetReceiverOperand(), rdi);
2293 __ Set(rax, argc_); 2285 __ Set(rax, argc);
2294 __ Set(rbx, 0); 2286 __ Set(rbx, 0);
2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2287 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2296 Handle<Code> adaptor = 2288 Handle<Code> adaptor =
2297 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2289 isolate->builtins()->ArgumentsAdaptorTrampoline();
2298 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2290 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2299 } 2291 }
2300 2292
2301 if (CallAsMethod()) { 2293 if (state.CallAsMethod()) {
2302 __ bind(&wrap); 2294 __ bind(&wrap);
2295
2296 if (!state.IsGeneric() && !wrap_and_call) {
2297 // Do not transform the receiver for natives.
2298 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2299 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2300 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2301 __ j(not_equal, &cont);
2302 }
2303
2303 // Wrap the receiver and patch it back onto the stack. 2304 // Wrap the receiver and patch it back onto the stack.
2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2305 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2305 __ Push(rdi); 2306 __ Push(rdi);
2306 __ Push(rax); 2307 __ Push(rax);
2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2308 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2308 __ Pop(rdi); 2309 __ Pop(rdi);
2309 } 2310 }
2310 __ movp(args.GetReceiverOperand(), rax); 2311 __ movp(args.GetReceiverOperand(), rax);
2311 __ jmp(&cont); 2312 __ jmp(&cont);
2312 } 2313 }
2313 } 2314 }
2314 2315
2315 2316
2317 void CallFunctionStub::Generate(MacroAssembler* masm) {
2318 // rdi : the function to call
2319
2320 // GenericCallHelper expresses it's options in terms of CallIC::State.
2321 CallIC::CallType call_type = CallAsMethod() ?
2322 CallIC::METHOD : CallIC::FUNCTION;
2323
2324 if (NeedsChecks()) {
2325 GenericCallHelper(masm,
2326 CallIC::State::SlowCallState(
2327 argc_,
2328 call_type));
2329 } else {
2330 GenericCallHelper(masm,
2331 CallIC::State::MonomorphicCallState(
2332 argc_,
2333 call_type,
2334 CallIC::ARGUMENTS_COUNT_UNKNOWN,
2335 SLOPPY),
2336 true);
2337 }
2338 }
2339
2340
2316 void CallConstructStub::Generate(MacroAssembler* masm) { 2341 void CallConstructStub::Generate(MacroAssembler* masm) {
2317 // rax : number of arguments 2342 // rax : number of arguments
2318 // rbx : feedback vector 2343 // rbx : feedback vector
2319 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2344 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2320 // vector (Smi) 2345 // vector (Smi)
2321 // rdi : constructor function 2346 // rdi : constructor function
2322 Label slow, non_function_call; 2347 Label slow, non_function_call;
2323 2348
2324 // Check that function is not a smi. 2349 // Check that function is not a smi.
2325 __ JumpIfSmi(rdi, &non_function_call); 2350 __ JumpIfSmi(rdi, &non_function_call);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2372 __ bind(&non_function_call); 2397 __ bind(&non_function_call);
2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2398 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2374 __ bind(&do_call); 2399 __ bind(&do_call);
2375 // Set expected number of arguments to zero (not changing rax). 2400 // Set expected number of arguments to zero (not changing rax).
2376 __ Set(rbx, 0); 2401 __ Set(rbx, 0);
2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 2402 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2378 RelocInfo::CODE_TARGET); 2403 RelocInfo::CODE_TARGET);
2379 } 2404 }
2380 2405
2381 2406
2407 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
2408 GenericCallHelper(masm,
2409 CallIC::State::MonomorphicCallState(
2410 state_.arg_count(),
2411 state_.call_type(),
2412 state_.argument_check(),
2413 state_.strict_mode()));
2414 }
2415
2416
2417 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
2418 GenericCallHelper(masm,
2419 CallIC::State::SlowCallState(
2420 state_.arg_count(),
2421 state_.call_type()));
2422 }
2423
2424
2425 void CallICStub::Generate(MacroAssembler* masm) {
2426 // rdi - function
2427 // rbx - vector
2428 // rdx - slot id
2429 Isolate* isolate = masm->isolate();
2430 Label extra_checks_or_miss, slow;
2431
2432 // The checks. First, does edi match the recorded monomorphic target?
2433 __ SmiToInteger32(rdx, rdx);
2434 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
2435 FixedArray::kHeaderSize));
2436 __ j(not_equal, &extra_checks_or_miss);
2437
2438 GenerateMonomorphicCall(masm);
2439
2440 __ bind(&extra_checks_or_miss);
2441 if (IsGeneric()) {
2442 Label miss_uninit;
2443
2444 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2445 FixedArray::kHeaderSize));
2446 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2447 __ j(equal, &slow);
2448 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2449 __ j(equal, &miss_uninit);
2450 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
2451 // just update.
2452 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2453 FixedArray::kHeaderSize),
2454 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2455 __ jmp(&slow);
2456
2457 __ bind(&miss_uninit);
2458 }
2459
2460 GenerateMiss(masm);
2461
2462 // the slow case
2463 __ bind(&slow);
2464 GenerateSlowCall(masm);
2465 }
2466
2467
2468 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2469 // Get the receiver of the function from the stack; 1 ~ return address.
2470 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize));
2471
2472 {
2473 FrameScope scope(masm, StackFrame::INTERNAL);
2474
2475 // Push the receiver and the function and feedback info.
2476 __ Push(rcx);
2477 __ Push(rdi);
2478 __ Push(rbx);
2479 __ Integer32ToSmi(rdx, rdx);
2480 __ Push(rdx);
2481
2482 // Call the entry.
2483 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
2484 masm->isolate());
2485 __ CallExternalReference(miss, 4);
2486
2487 // Move result to edi and exit the internal frame.
2488 __ movp(rdi, rax);
2489 }
2490 }
2491
2492
2382 bool CEntryStub::NeedsImmovableCode() { 2493 bool CEntryStub::NeedsImmovableCode() {
2383 return false; 2494 return false;
2384 } 2495 }
2385 2496
2386 2497
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2498 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2388 CEntryStub::GenerateAheadOfTime(isolate); 2499 CEntryStub::GenerateAheadOfTime(isolate);
2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2500 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2501 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2391 // It is important that the store buffer overflow stubs are generated first. 2502 // It is important that the store buffer overflow stubs are generated first.
(...skipping 2832 matching lines...) Expand 10 before | Expand all | Expand 10 after
5224 return_value_operand, 5335 return_value_operand,
5225 NULL); 5336 NULL);
5226 } 5337 }
5227 5338
5228 5339
5229 #undef __ 5340 #undef __
5230 5341
5231 } } // namespace v8::internal 5342 } } // namespace v8::internal
5232 5343
5233 #endif // V8_TARGET_ARCH_X64 5344 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698