Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(488)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 172523002: Create a function call IC (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2179 matching lines...) Expand 10 before | Expand all | Expand 10 after
2190 __ Pop(rbx); 2190 __ Pop(rbx);
2191 __ Pop(rdi); 2191 __ Pop(rdi);
2192 2192
2193 __ bind(&done); 2193 __ bind(&done);
2194 __ Integer32ToSmi(rdx, rdx); 2194 __ Integer32ToSmi(rdx, rdx);
2195 2195
2196 __ bind(&done_no_smi_convert); 2196 __ bind(&done_no_smi_convert);
2197 } 2197 }
2198 2198
2199 2199
2200 void CallFunctionStub::Generate(MacroAssembler* masm) { 2200 static void GenericCallHelper(MacroAssembler* masm,
2201 // rbx : feedback vector 2201 const CallIC::State& state) {
2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2203 // vector (Smi)
2204 // rdi : the function to call 2202 // rdi : the function to call
2205 Isolate* isolate = masm->isolate(); 2203 Isolate* isolate = masm->isolate();
2206 Label slow, non_function, wrap, cont; 2204 Label slow, non_function, wrap, cont;
2207 StackArgumentsAccessor args(rsp, argc_); 2205 int argc = state.arg_count();
2206 StackArgumentsAccessor args(rsp, argc);
2208 2207
2209 if (NeedsChecks()) { 2208 if (state.IsGeneric()) {
2210 // Check that the function really is a JavaScript function. 2209 // Check that the function really is a JavaScript function.
2211 __ JumpIfSmi(rdi, &non_function); 2210 __ JumpIfSmi(rdi, &non_function);
2212 2211
2213 // Goto slow case if we do not have a function. 2212 // Goto slow case if we do not have a function.
2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2213 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2215 __ j(not_equal, &slow); 2214 __ j(not_equal, &slow);
2216
2217 if (RecordCallTarget()) {
2218 GenerateRecordCallTarget(masm);
2219 // Type information was updated. Because we may call Array, which
2220 // expects either undefined or an AllocationSite in rbx we need
2221 // to set rbx to undefined.
2222 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2223 }
2224 } 2215 }
2225 2216
2226 // Fast-case: Just invoke the function. 2217 // Fast-case: Just invoke the function.
2227 ParameterCount actual(argc_); 2218 ParameterCount actual(argc);
2228 2219
2229 if (CallAsMethod()) { 2220 if (state.CallAsMethod()) {
2230 if (NeedsChecks()) { 2221 if (state.IsGeneric()) {
2231 // Do not transform the receiver for strict mode functions. 2222 // Do not transform the receiver for strict mode functions.
2232 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2223 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2233 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), 2224 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
2234 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 2225 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2235 __ j(not_equal, &cont); 2226 __ j(not_equal, &cont);
2236 2227
2237 // Do not transform the receiver for natives. 2228 // Do not transform the receiver for natives.
2238 // SharedFunctionInfo is already loaded into rcx. 2229 // SharedFunctionInfo is already loaded into rcx.
2239 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2230 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2240 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2231 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2241 __ j(not_equal, &cont); 2232 __ j(not_equal, &cont);
2242 } 2233 }
2243 2234
2244 2235 if (state.IsSloppy()) {
2245 // Load the receiver from the stack. 2236 // Load the receiver from the stack.
2246 __ movp(rax, args.GetReceiverOperand()); 2237 __ movp(rax, args.GetReceiverOperand());
2247
2248 if (NeedsChecks()) {
2249 __ JumpIfSmi(rax, &wrap); 2238 __ JumpIfSmi(rax, &wrap);
2250 2239
2251 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2240 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2252 __ j(below, &wrap); 2241 __ j(below, &wrap);
2253 } else {
2254 __ jmp(&wrap);
2255 } 2242 }
2256 2243
2257 __ bind(&cont); 2244 __ bind(&cont);
2258 } 2245 }
2259 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2260 2246
2261 if (NeedsChecks()) { 2247 if (state.ArgumentsMustMatch()) {
2248 __ InvokeFunction(rdi, actual, actual, JUMP_FUNCTION, NullCallWrapper());
2249 } else {
2250 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2251 }
2252
2253 if (state.IsGeneric()) {
2262 // Slow-case: Non-function called. 2254 // Slow-case: Non-function called.
2263 __ bind(&slow); 2255 __ bind(&slow);
2264 if (RecordCallTarget()) {
2265 // If there is a call target cache, mark it megamorphic in the
2266 // non-function case. MegamorphicSentinel is an immortal immovable
2267 // object (megamorphic symbol) so no write barrier is needed.
2268 __ SmiToInteger32(rdx, rdx);
2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2270 FixedArray::kHeaderSize),
2271 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2272 __ Integer32ToSmi(rdx, rdx);
2273 }
2274 // Check for function proxy. 2256 // Check for function proxy.
2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2257 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2276 __ j(not_equal, &non_function); 2258 __ j(not_equal, &non_function);
2277 __ PopReturnAddressTo(rcx); 2259 __ PopReturnAddressTo(rcx);
2278 __ Push(rdi); // put proxy as additional argument under return address 2260 __ Push(rdi); // put proxy as additional argument under return address
2279 __ PushReturnAddressFrom(rcx); 2261 __ PushReturnAddressFrom(rcx);
2280 __ Set(rax, argc_ + 1); 2262 __ Set(rax, argc + 1);
2281 __ Set(rbx, 0); 2263 __ Set(rbx, 0);
2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2264 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2283 { 2265 {
2284 Handle<Code> adaptor = 2266 Handle<Code> adaptor =
2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2267 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2286 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2268 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2287 } 2269 }
2288 2270
2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2271 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2290 // of the original receiver from the call site). 2272 // of the original receiver from the call site).
2291 __ bind(&non_function); 2273 __ bind(&non_function);
2292 __ movp(args.GetReceiverOperand(), rdi); 2274 __ movp(args.GetReceiverOperand(), rdi);
2293 __ Set(rax, argc_); 2275 __ Set(rax, argc);
2294 __ Set(rbx, 0); 2276 __ Set(rbx, 0);
2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2277 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2296 Handle<Code> adaptor = 2278 Handle<Code> adaptor =
2297 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2279 isolate->builtins()->ArgumentsAdaptorTrampoline();
2298 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2280 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2299 } 2281 }
2300 2282
2301 if (CallAsMethod()) { 2283 if (state.CallAsMethod() && state.IsSloppy()) {
2302 __ bind(&wrap); 2284 __ bind(&wrap);
2285
2286 if (!state.IsGeneric()) {
2287 // Do not transform the receiver for natives.
2288 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2289 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2290 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2291 }
2292
2303 // Wrap the receiver and patch it back onto the stack. 2293 // Wrap the receiver and patch it back onto the stack.
2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2294 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2305 __ Push(rdi); 2295 __ Push(rdi);
2306 __ Push(rax); 2296 __ Push(rax);
2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2297 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2308 __ Pop(rdi); 2298 __ Pop(rdi);
2309 } 2299 }
2310 __ movp(args.GetReceiverOperand(), rax); 2300 __ movp(args.GetReceiverOperand(), rax);
2311 __ jmp(&cont); 2301 __ jmp(&cont);
2312 } 2302 }
2313 } 2303 }
2314 2304
2315 2305
2306 void CallFunctionStub::Generate(MacroAssembler* masm) {
2307 // rdi : the function to call
2308
2309 // GenericCallHelper expresses it's options in terms of CallIC::State.
2310 CallIC::CallType call_type = CallAsMethod() ?
2311 CallIC::METHOD : CallIC::FUNCTION;
2312
2313 if (NeedsChecks()) {
2314 GenericCallHelper(masm,
2315 CallIC::State::SlowCallState(
2316 argc_,
2317 call_type));
2318 } else {
2319 GenericCallHelper(masm,
2320 CallIC::State::MonomorphicCallState(
2321 argc_,
2322 call_type,
2323 CallIC::ARGUMENTS_COUNT_UNKNOWN,
2324 SLOPPY));
2325 }
2326 }
2327
2328
2316 void CallConstructStub::Generate(MacroAssembler* masm) { 2329 void CallConstructStub::Generate(MacroAssembler* masm) {
2317 // rax : number of arguments 2330 // rax : number of arguments
2318 // rbx : feedback vector 2331 // rbx : feedback vector
2319 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2332 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2320 // vector (Smi) 2333 // vector (Smi)
2321 // rdi : constructor function 2334 // rdi : constructor function
2322 Label slow, non_function_call; 2335 Label slow, non_function_call;
2323 2336
2324 // Check that function is not a smi. 2337 // Check that function is not a smi.
2325 __ JumpIfSmi(rdi, &non_function_call); 2338 __ JumpIfSmi(rdi, &non_function_call);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2372 __ bind(&non_function_call); 2385 __ bind(&non_function_call);
2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2386 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2374 __ bind(&do_call); 2387 __ bind(&do_call);
2375 // Set expected number of arguments to zero (not changing rax). 2388 // Set expected number of arguments to zero (not changing rax).
2376 __ Set(rbx, 0); 2389 __ Set(rbx, 0);
2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 2390 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2378 RelocInfo::CODE_TARGET); 2391 RelocInfo::CODE_TARGET);
2379 } 2392 }
2380 2393
2381 2394
2395 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
2396 GenericCallHelper(masm,
2397 CallIC::State::MonomorphicCallState(
2398 state_.arg_count(),
2399 state_.call_type(),
2400 state_.argument_check(),
2401 state_.strict_mode()));
2402 }
2403
2404
2405 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
2406 GenericCallHelper(masm,
2407 CallIC::State::SlowCallState(
2408 state_.arg_count(),
2409 state_.call_type()));
2410 }
2411
2412
2413 void CallICStub::Generate(MacroAssembler* masm) {
2414 // rdi - function
2415 // rbx - vector
2416 // rdx - slot id
2417 Isolate* isolate = masm->isolate();
2418 Label extra_checks_or_miss, slow;
2419
2420 // The checks. First, does edi match the recorded monomorphic target?
2421 __ SmiToInteger32(rdx, rdx);
2422 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
2423 FixedArray::kHeaderSize));
2424 __ j(not_equal, &extra_checks_or_miss);
2425
2426 GenerateMonomorphicCall(masm);
2427
2428 __ bind(&extra_checks_or_miss);
2429 if (IsGeneric()) {
2430 Label miss_uninit;
2431
2432 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2433 FixedArray::kHeaderSize));
2434 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2435 __ j(equal, &slow);
2436 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2437 __ j(equal, &miss_uninit);
2438 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
2439 // just update.
2440 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2441 FixedArray::kHeaderSize),
2442 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2443 __ jmp(&slow);
2444
2445 __ bind(&miss_uninit);
2446 }
2447
2448 GenerateMiss(masm);
2449
2450 // the slow case
2451 __ bind(&slow);
2452 GenerateSlowCall(masm);
2453 }
2454
2455
2456 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2457 // Get the receiver of the function from the stack; 1 ~ return address.
2458 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize));
2459
2460 {
2461 FrameScope scope(masm, StackFrame::INTERNAL);
2462
2463 // Push the receiver and the function and feedback info.
2464 __ Push(rcx);
2465 __ Push(rdi);
2466 __ Push(rbx);
2467 __ Integer32ToSmi(rdx, rdx);
2468 __ Push(rdx);
2469
2470 // Call the entry.
2471 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
2472 masm->isolate());
2473 __ CallExternalReference(miss, 4);
2474
2475 // Move result to edi and exit the internal frame.
2476 __ movp(rdi, rax);
2477 }
2478 }
2479
2480
2382 bool CEntryStub::NeedsImmovableCode() { 2481 bool CEntryStub::NeedsImmovableCode() {
2383 return false; 2482 return false;
2384 } 2483 }
2385 2484
2386 2485
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2486 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2388 CEntryStub::GenerateAheadOfTime(isolate); 2487 CEntryStub::GenerateAheadOfTime(isolate);
2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2488 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2489 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2391 // It is important that the store buffer overflow stubs are generated first. 2490 // It is important that the store buffer overflow stubs are generated first.
(...skipping 2832 matching lines...) Expand 10 before | Expand all | Expand 10 after
5224 return_value_operand, 5323 return_value_operand,
5225 NULL); 5324 NULL);
5226 } 5325 }
5227 5326
5228 5327
5229 #undef __ 5328 #undef __
5230 5329
5231 } } // namespace v8::internal 5330 } } // namespace v8::internal
5232 5331
5233 #endif // V8_TARGET_ARCH_X64 5332 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698