| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2190 __ Pop(rbx); | 2190 __ Pop(rbx); |
| 2191 __ Pop(rdi); | 2191 __ Pop(rdi); |
| 2192 | 2192 |
| 2193 __ bind(&done); | 2193 __ bind(&done); |
| 2194 __ Integer32ToSmi(rdx, rdx); | 2194 __ Integer32ToSmi(rdx, rdx); |
| 2195 | 2195 |
| 2196 __ bind(&done_no_smi_convert); | 2196 __ bind(&done_no_smi_convert); |
| 2197 } | 2197 } |
| 2198 | 2198 |
| 2199 | 2199 |
| 2200 static void GenericCallHelper(MacroAssembler* masm, | 2200 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 2201 const CallIC::State& state, | 2201 // rbx : feedback vector |
| 2202 bool wrap_and_call = false) { | 2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback |
| 2203 // vector (Smi) |
| 2203 // rdi : the function to call | 2204 // rdi : the function to call |
| 2204 | |
| 2205 // wrap_and_call can only be true if we are compiling a monomorphic method. | |
| 2206 ASSERT(!(wrap_and_call && state.IsGeneric())); | |
| 2207 ASSERT(!wrap_and_call || state.CallAsMethod()); | |
| 2208 Isolate* isolate = masm->isolate(); | 2205 Isolate* isolate = masm->isolate(); |
| 2209 Label slow, non_function, wrap, cont; | 2206 Label slow, non_function, wrap, cont; |
| 2210 int argc = state.arg_count(); | 2207 StackArgumentsAccessor args(rsp, argc_); |
| 2211 StackArgumentsAccessor args(rsp, argc); | |
| 2212 | 2208 |
| 2213 if (state.IsGeneric()) { | 2209 if (NeedsChecks()) { |
| 2214 // Check that the function really is a JavaScript function. | 2210 // Check that the function really is a JavaScript function. |
| 2215 __ JumpIfSmi(rdi, &non_function); | 2211 __ JumpIfSmi(rdi, &non_function); |
| 2216 | 2212 |
| 2217 // Goto slow case if we do not have a function. | 2213 // Goto slow case if we do not have a function. |
| 2218 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 2219 __ j(not_equal, &slow); | 2215 __ j(not_equal, &slow); |
| 2216 |
| 2217 if (RecordCallTarget()) { |
| 2218 GenerateRecordCallTarget(masm); |
| 2219 // Type information was updated. Because we may call Array, which |
| 2220 // expects either undefined or an AllocationSite in rbx we need |
| 2221 // to set rbx to undefined. |
| 2222 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 2223 } |
| 2220 } | 2224 } |
| 2221 | 2225 |
| 2222 // Fast-case: Just invoke the function. | 2226 // Fast-case: Just invoke the function. |
| 2223 ParameterCount actual(argc); | 2227 ParameterCount actual(argc_); |
| 2224 | 2228 |
| 2225 if (state.CallAsMethod()) { | 2229 if (CallAsMethod()) { |
| 2226 if (state.IsGeneric()) { | 2230 if (NeedsChecks()) { |
| 2227 // Do not transform the receiver for strict mode functions. | 2231 // Do not transform the receiver for strict mode functions. |
| 2228 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 2232 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 2229 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), | 2233 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), |
| 2230 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | 2234 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 2231 __ j(not_equal, &cont); | 2235 __ j(not_equal, &cont); |
| 2232 | 2236 |
| 2233 // Do not transform the receiver for natives. | 2237 // Do not transform the receiver for natives. |
| 2234 // SharedFunctionInfo is already loaded into rcx. | 2238 // SharedFunctionInfo is already loaded into rcx. |
| 2235 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), | 2239 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), |
| 2236 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | 2240 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 2237 __ j(not_equal, &cont); | 2241 __ j(not_equal, &cont); |
| 2238 } | 2242 } |
| 2239 | 2243 |
| 2240 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) { | |
| 2241 // Load the receiver from the stack. | |
| 2242 __ movp(rax, args.GetReceiverOperand()); | |
| 2243 | 2244 |
| 2244 if (state.IsGeneric()) { | 2245 // Load the receiver from the stack. |
| 2245 __ JumpIfSmi(rax, &wrap); | 2246 __ movp(rax, args.GetReceiverOperand()); |
| 2246 | 2247 |
| 2247 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); | 2248 if (NeedsChecks()) { |
| 2248 __ j(below, &wrap); | 2249 __ JumpIfSmi(rax, &wrap); |
| 2249 } else { | 2250 |
| 2250 __ jmp(&wrap); | 2251 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
| 2251 } | 2252 __ j(below, &wrap); |
| 2253 } else { |
| 2254 __ jmp(&wrap); |
| 2252 } | 2255 } |
| 2253 | 2256 |
| 2254 __ bind(&cont); | 2257 __ bind(&cont); |
| 2255 } | 2258 } |
| 2259 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 2256 | 2260 |
| 2257 if (state.ArgumentsMustMatch()) { | 2261 if (NeedsChecks()) { |
| 2258 __ InvokeFunction(rdi, actual, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2259 } else { | |
| 2260 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2261 } | |
| 2262 | |
| 2263 if (state.IsGeneric()) { | |
| 2264 // Slow-case: Non-function called. | 2262 // Slow-case: Non-function called. |
| 2265 __ bind(&slow); | 2263 __ bind(&slow); |
| 2264 if (RecordCallTarget()) { |
| 2265 // If there is a call target cache, mark it megamorphic in the |
| 2266 // non-function case. MegamorphicSentinel is an immortal immovable |
| 2267 // object (megamorphic symbol) so no write barrier is needed. |
| 2268 __ SmiToInteger32(rdx, rdx); |
| 2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
| 2270 FixedArray::kHeaderSize), |
| 2271 TypeFeedbackInfo::MegamorphicSentinel(isolate)); |
| 2272 __ Integer32ToSmi(rdx, rdx); |
| 2273 } |
| 2266 // Check for function proxy. | 2274 // Check for function proxy. |
| 2267 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
| 2268 __ j(not_equal, &non_function); | 2276 __ j(not_equal, &non_function); |
| 2269 __ PopReturnAddressTo(rcx); | 2277 __ PopReturnAddressTo(rcx); |
| 2270 __ Push(rdi); // put proxy as additional argument under return address | 2278 __ Push(rdi); // put proxy as additional argument under return address |
| 2271 __ PushReturnAddressFrom(rcx); | 2279 __ PushReturnAddressFrom(rcx); |
| 2272 __ Set(rax, argc + 1); | 2280 __ Set(rax, argc_ + 1); |
| 2273 __ Set(rbx, 0); | 2281 __ Set(rbx, 0); |
| 2274 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
| 2275 { | 2283 { |
| 2276 Handle<Code> adaptor = | 2284 Handle<Code> adaptor = |
| 2277 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 2278 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2286 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 2279 } | 2287 } |
| 2280 | 2288 |
| 2281 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 2282 // of the original receiver from the call site). | 2290 // of the original receiver from the call site). |
| 2283 __ bind(&non_function); | 2291 __ bind(&non_function); |
| 2284 __ movp(args.GetReceiverOperand(), rdi); | 2292 __ movp(args.GetReceiverOperand(), rdi); |
| 2285 __ Set(rax, argc); | 2293 __ Set(rax, argc_); |
| 2286 __ Set(rbx, 0); | 2294 __ Set(rbx, 0); |
| 2287 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
| 2288 Handle<Code> adaptor = | 2296 Handle<Code> adaptor = |
| 2289 isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2297 isolate->builtins()->ArgumentsAdaptorTrampoline(); |
| 2290 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 2298 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 2291 } | 2299 } |
| 2292 | 2300 |
| 2293 if (state.CallAsMethod()) { | 2301 if (CallAsMethod()) { |
| 2294 __ bind(&wrap); | 2302 __ bind(&wrap); |
| 2295 | |
| 2296 if (!state.IsGeneric() && !wrap_and_call) { | |
| 2297 // Do not transform the receiver for natives. | |
| 2298 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 2299 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), | |
| 2300 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | |
| 2301 __ j(not_equal, &cont); | |
| 2302 } | |
| 2303 | |
| 2304 // Wrap the receiver and patch it back onto the stack. | 2303 // Wrap the receiver and patch it back onto the stack. |
| 2305 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 2306 __ Push(rdi); | 2305 __ Push(rdi); |
| 2307 __ Push(rax); | 2306 __ Push(rax); |
| 2308 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 2309 __ Pop(rdi); | 2308 __ Pop(rdi); |
| 2310 } | 2309 } |
| 2311 __ movp(args.GetReceiverOperand(), rax); | 2310 __ movp(args.GetReceiverOperand(), rax); |
| 2312 __ jmp(&cont); | 2311 __ jmp(&cont); |
| 2313 } | 2312 } |
| 2314 } | 2313 } |
| 2315 | 2314 |
| 2316 | 2315 |
| 2317 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
| 2318 // rdi : the function to call | |
| 2319 | |
| 2320 // GenericCallHelper expresses it's options in terms of CallIC::State. | |
| 2321 CallIC::CallType call_type = CallAsMethod() ? | |
| 2322 CallIC::METHOD : CallIC::FUNCTION; | |
| 2323 | |
| 2324 if (NeedsChecks()) { | |
| 2325 GenericCallHelper(masm, | |
| 2326 CallIC::State::SlowCallState( | |
| 2327 argc_, | |
| 2328 call_type)); | |
| 2329 } else { | |
| 2330 GenericCallHelper(masm, | |
| 2331 CallIC::State::MonomorphicCallState( | |
| 2332 argc_, | |
| 2333 call_type, | |
| 2334 CallIC::ARGUMENTS_COUNT_UNKNOWN, | |
| 2335 SLOPPY), | |
| 2336 true); | |
| 2337 } | |
| 2338 } | |
| 2339 | |
| 2340 | |
| 2341 void CallConstructStub::Generate(MacroAssembler* masm) { | 2316 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 2342 // rax : number of arguments | 2317 // rax : number of arguments |
| 2343 // rbx : feedback vector | 2318 // rbx : feedback vector |
| 2344 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback | 2319 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback |
| 2345 // vector (Smi) | 2320 // vector (Smi) |
| 2346 // rdi : constructor function | 2321 // rdi : constructor function |
| 2347 Label slow, non_function_call; | 2322 Label slow, non_function_call; |
| 2348 | 2323 |
| 2349 // Check that function is not a smi. | 2324 // Check that function is not a smi. |
| 2350 __ JumpIfSmi(rdi, &non_function_call); | 2325 __ JumpIfSmi(rdi, &non_function_call); |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2397 __ bind(&non_function_call); | 2372 __ bind(&non_function_call); |
| 2398 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
| 2399 __ bind(&do_call); | 2374 __ bind(&do_call); |
| 2400 // Set expected number of arguments to zero (not changing rax). | 2375 // Set expected number of arguments to zero (not changing rax). |
| 2401 __ Set(rbx, 0); | 2376 __ Set(rbx, 0); |
| 2402 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 2403 RelocInfo::CODE_TARGET); | 2378 RelocInfo::CODE_TARGET); |
| 2404 } | 2379 } |
| 2405 | 2380 |
| 2406 | 2381 |
| 2407 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) { | |
| 2408 GenericCallHelper(masm, | |
| 2409 CallIC::State::MonomorphicCallState( | |
| 2410 state_.arg_count(), | |
| 2411 state_.call_type(), | |
| 2412 state_.argument_check(), | |
| 2413 state_.strict_mode())); | |
| 2414 } | |
| 2415 | |
| 2416 | |
| 2417 void CallICStub::GenerateSlowCall(MacroAssembler* masm) { | |
| 2418 GenericCallHelper(masm, | |
| 2419 CallIC::State::SlowCallState( | |
| 2420 state_.arg_count(), | |
| 2421 state_.call_type())); | |
| 2422 } | |
| 2423 | |
| 2424 | |
| 2425 void CallICStub::Generate(MacroAssembler* masm) { | |
| 2426 // rdi - function | |
| 2427 // rbx - vector | |
| 2428 // rdx - slot id | |
| 2429 Isolate* isolate = masm->isolate(); | |
| 2430 Label extra_checks_or_miss, slow; | |
| 2431 | |
| 2432 // The checks. First, does edi match the recorded monomorphic target? | |
| 2433 __ SmiToInteger32(rdx, rdx); | |
| 2434 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size, | |
| 2435 FixedArray::kHeaderSize)); | |
| 2436 __ j(not_equal, &extra_checks_or_miss); | |
| 2437 | |
| 2438 GenerateMonomorphicCall(masm); | |
| 2439 | |
| 2440 __ bind(&extra_checks_or_miss); | |
| 2441 if (IsGeneric()) { | |
| 2442 Label miss_uninit; | |
| 2443 | |
| 2444 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, | |
| 2445 FixedArray::kHeaderSize)); | |
| 2446 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); | |
| 2447 __ j(equal, &slow); | |
| 2448 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); | |
| 2449 __ j(equal, &miss_uninit); | |
| 2450 // If we get here, go from monomorphic to megamorphic, Don't bother missing, | |
| 2451 // just update. | |
| 2452 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | |
| 2453 FixedArray::kHeaderSize), | |
| 2454 TypeFeedbackInfo::MegamorphicSentinel(isolate)); | |
| 2455 __ jmp(&slow); | |
| 2456 | |
| 2457 __ bind(&miss_uninit); | |
| 2458 } | |
| 2459 | |
| 2460 GenerateMiss(masm); | |
| 2461 | |
| 2462 // the slow case | |
| 2463 __ bind(&slow); | |
| 2464 GenerateSlowCall(masm); | |
| 2465 } | |
| 2466 | |
| 2467 | |
| 2468 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
| 2469 // Get the receiver of the function from the stack; 1 ~ return address. | |
| 2470 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize)); | |
| 2471 | |
| 2472 { | |
| 2473 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2474 | |
| 2475 // Push the receiver and the function and feedback info. | |
| 2476 __ Push(rcx); | |
| 2477 __ Push(rdi); | |
| 2478 __ Push(rbx); | |
| 2479 __ Integer32ToSmi(rdx, rdx); | |
| 2480 __ Push(rdx); | |
| 2481 | |
| 2482 // Call the entry. | |
| 2483 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), | |
| 2484 masm->isolate()); | |
| 2485 __ CallExternalReference(miss, 4); | |
| 2486 | |
| 2487 // Move result to edi and exit the internal frame. | |
| 2488 __ movp(rdi, rax); | |
| 2489 } | |
| 2490 } | |
| 2491 | |
| 2492 | |
| 2493 bool CEntryStub::NeedsImmovableCode() { | 2382 bool CEntryStub::NeedsImmovableCode() { |
| 2494 return false; | 2383 return false; |
| 2495 } | 2384 } |
| 2496 | 2385 |
| 2497 | 2386 |
| 2498 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 2499 CEntryStub::GenerateAheadOfTime(isolate); | 2388 CEntryStub::GenerateAheadOfTime(isolate); |
| 2500 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 2501 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 2502 // It is important that the store buffer overflow stubs are generated first. | 2391 // It is important that the store buffer overflow stubs are generated first. |
| (...skipping 2832 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5335 return_value_operand, | 5224 return_value_operand, |
| 5336 NULL); | 5225 NULL); |
| 5337 } | 5226 } |
| 5338 | 5227 |
| 5339 | 5228 |
| 5340 #undef __ | 5229 #undef __ |
| 5341 | 5230 |
| 5342 } } // namespace v8::internal | 5231 } } // namespace v8::internal |
| 5343 | 5232 |
| 5344 #endif // V8_TARGET_ARCH_X64 | 5233 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |