Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(273)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 223823002: Revert r20474 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2179 matching lines...) Expand 10 before | Expand all | Expand 10 after
2190 __ Pop(rbx); 2190 __ Pop(rbx);
2191 __ Pop(rdi); 2191 __ Pop(rdi);
2192 2192
2193 __ bind(&done); 2193 __ bind(&done);
2194 __ Integer32ToSmi(rdx, rdx); 2194 __ Integer32ToSmi(rdx, rdx);
2195 2195
2196 __ bind(&done_no_smi_convert); 2196 __ bind(&done_no_smi_convert);
2197 } 2197 }
2198 2198
2199 2199
2200 static void GenericCallHelper(MacroAssembler* masm, 2200 void CallFunctionStub::Generate(MacroAssembler* masm) {
2201 const CallIC::State& state) { 2201 // rbx : feedback vector
2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2203 // vector (Smi)
2202 // rdi : the function to call 2204 // rdi : the function to call
2203 Isolate* isolate = masm->isolate(); 2205 Isolate* isolate = masm->isolate();
2204 Label slow, non_function, wrap, cont; 2206 Label slow, non_function, wrap, cont;
2205 int argc = state.arg_count(); 2207 StackArgumentsAccessor args(rsp, argc_);
2206 StackArgumentsAccessor args(rsp, argc);
2207 2208
2208 if (state.IsGeneric()) { 2209 if (NeedsChecks()) {
2209 // Check that the function really is a JavaScript function. 2210 // Check that the function really is a JavaScript function.
2210 __ JumpIfSmi(rdi, &non_function); 2211 __ JumpIfSmi(rdi, &non_function);
2211 2212
2212 // Goto slow case if we do not have a function. 2213 // Goto slow case if we do not have a function.
2213 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2214 __ j(not_equal, &slow); 2215 __ j(not_equal, &slow);
2216
2217 if (RecordCallTarget()) {
2218 GenerateRecordCallTarget(masm);
2219 // Type information was updated. Because we may call Array, which
2220 // expects either undefined or an AllocationSite in rbx we need
2221 // to set rbx to undefined.
2222 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2223 }
2215 } 2224 }
2216 2225
2217 // Fast-case: Just invoke the function. 2226 // Fast-case: Just invoke the function.
2218 ParameterCount actual(argc); 2227 ParameterCount actual(argc_);
2219 2228
2220 if (state.CallAsMethod()) { 2229 if (CallAsMethod()) {
2221 if (state.IsGeneric()) { 2230 if (NeedsChecks()) {
2222 // Do not transform the receiver for strict mode functions. 2231 // Do not transform the receiver for strict mode functions.
2223 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2232 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2224 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), 2233 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
2225 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 2234 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2226 __ j(not_equal, &cont); 2235 __ j(not_equal, &cont);
2227 2236
2228 // Do not transform the receiver for natives. 2237 // Do not transform the receiver for natives.
2229 // SharedFunctionInfo is already loaded into rcx. 2238 // SharedFunctionInfo is already loaded into rcx.
2230 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2239 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2231 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2240 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2232 __ j(not_equal, &cont); 2241 __ j(not_equal, &cont);
2233 } 2242 }
2234 2243
2235 if (state.IsSloppy()) { 2244
2236 // Load the receiver from the stack. 2245 // Load the receiver from the stack.
2237 __ movp(rax, args.GetReceiverOperand()); 2246 __ movp(rax, args.GetReceiverOperand());
2247
2248 if (NeedsChecks()) {
2238 __ JumpIfSmi(rax, &wrap); 2249 __ JumpIfSmi(rax, &wrap);
2239 2250
2240 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2251 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2241 __ j(below, &wrap); 2252 __ j(below, &wrap);
2253 } else {
2254 __ jmp(&wrap);
2242 } 2255 }
2243 2256
2244 __ bind(&cont); 2257 __ bind(&cont);
2245 } 2258 }
2259 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2246 2260
2247 if (state.ArgumentsMustMatch()) { 2261 if (NeedsChecks()) {
2248 __ InvokeFunction(rdi, actual, actual, JUMP_FUNCTION, NullCallWrapper());
2249 } else {
2250 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2251 }
2252
2253 if (state.IsGeneric()) {
2254 // Slow-case: Non-function called. 2262 // Slow-case: Non-function called.
2255 __ bind(&slow); 2263 __ bind(&slow);
2264 if (RecordCallTarget()) {
2265 // If there is a call target cache, mark it megamorphic in the
2266 // non-function case. MegamorphicSentinel is an immortal immovable
2267 // object (megamorphic symbol) so no write barrier is needed.
2268 __ SmiToInteger32(rdx, rdx);
2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2270 FixedArray::kHeaderSize),
2271 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2272 __ Integer32ToSmi(rdx, rdx);
2273 }
2256 // Check for function proxy. 2274 // Check for function proxy.
2257 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2258 __ j(not_equal, &non_function); 2276 __ j(not_equal, &non_function);
2259 __ PopReturnAddressTo(rcx); 2277 __ PopReturnAddressTo(rcx);
2260 __ Push(rdi); // put proxy as additional argument under return address 2278 __ Push(rdi); // put proxy as additional argument under return address
2261 __ PushReturnAddressFrom(rcx); 2279 __ PushReturnAddressFrom(rcx);
2262 __ Set(rax, argc + 1); 2280 __ Set(rax, argc_ + 1);
2263 __ Set(rbx, 0); 2281 __ Set(rbx, 0);
2264 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2265 { 2283 {
2266 Handle<Code> adaptor = 2284 Handle<Code> adaptor =
2267 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2268 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2286 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2269 } 2287 }
2270 2288
2271 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2272 // of the original receiver from the call site). 2290 // of the original receiver from the call site).
2273 __ bind(&non_function); 2291 __ bind(&non_function);
2274 __ movp(args.GetReceiverOperand(), rdi); 2292 __ movp(args.GetReceiverOperand(), rdi);
2275 __ Set(rax, argc); 2293 __ Set(rax, argc_);
2276 __ Set(rbx, 0); 2294 __ Set(rbx, 0);
2277 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2278 Handle<Code> adaptor = 2296 Handle<Code> adaptor =
2279 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2297 isolate->builtins()->ArgumentsAdaptorTrampoline();
2280 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2298 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2281 } 2299 }
2282 2300
2283 if (state.CallAsMethod() && state.IsSloppy()) { 2301 if (CallAsMethod()) {
2284 __ bind(&wrap); 2302 __ bind(&wrap);
2285
2286 if (!state.IsGeneric()) {
2287 // Do not transform the receiver for natives.
2288 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2289 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2290 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2291 }
2292
2293 // Wrap the receiver and patch it back onto the stack. 2303 // Wrap the receiver and patch it back onto the stack.
2294 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2295 __ Push(rdi); 2305 __ Push(rdi);
2296 __ Push(rax); 2306 __ Push(rax);
2297 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2298 __ Pop(rdi); 2308 __ Pop(rdi);
2299 } 2309 }
2300 __ movp(args.GetReceiverOperand(), rax); 2310 __ movp(args.GetReceiverOperand(), rax);
2301 __ jmp(&cont); 2311 __ jmp(&cont);
2302 } 2312 }
2303 } 2313 }
2304 2314
2305 2315
2306 void CallFunctionStub::Generate(MacroAssembler* masm) {
2307 // rdi : the function to call
2308
2309 // GenericCallHelper expresses it's options in terms of CallIC::State.
2310 CallIC::CallType call_type = CallAsMethod() ?
2311 CallIC::METHOD : CallIC::FUNCTION;
2312
2313 if (NeedsChecks()) {
2314 GenericCallHelper(masm,
2315 CallIC::State::SlowCallState(
2316 argc_,
2317 call_type));
2318 } else {
2319 GenericCallHelper(masm,
2320 CallIC::State::MonomorphicCallState(
2321 argc_,
2322 call_type,
2323 CallIC::ARGUMENTS_COUNT_UNKNOWN,
2324 SLOPPY));
2325 }
2326 }
2327
2328
2329 void CallConstructStub::Generate(MacroAssembler* masm) { 2316 void CallConstructStub::Generate(MacroAssembler* masm) {
2330 // rax : number of arguments 2317 // rax : number of arguments
2331 // rbx : feedback vector 2318 // rbx : feedback vector
2332 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2319 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2333 // vector (Smi) 2320 // vector (Smi)
2334 // rdi : constructor function 2321 // rdi : constructor function
2335 Label slow, non_function_call; 2322 Label slow, non_function_call;
2336 2323
2337 // Check that function is not a smi. 2324 // Check that function is not a smi.
2338 __ JumpIfSmi(rdi, &non_function_call); 2325 __ JumpIfSmi(rdi, &non_function_call);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2385 __ bind(&non_function_call); 2372 __ bind(&non_function_call);
2386 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2387 __ bind(&do_call); 2374 __ bind(&do_call);
2388 // Set expected number of arguments to zero (not changing rax). 2375 // Set expected number of arguments to zero (not changing rax).
2389 __ Set(rbx, 0); 2376 __ Set(rbx, 0);
2390 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2391 RelocInfo::CODE_TARGET); 2378 RelocInfo::CODE_TARGET);
2392 } 2379 }
2393 2380
2394 2381
2395 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
2396 GenericCallHelper(masm,
2397 CallIC::State::MonomorphicCallState(
2398 state_.arg_count(),
2399 state_.call_type(),
2400 state_.argument_check(),
2401 state_.strict_mode()));
2402 }
2403
2404
2405 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
2406 GenericCallHelper(masm,
2407 CallIC::State::SlowCallState(
2408 state_.arg_count(),
2409 state_.call_type()));
2410 }
2411
2412
2413 void CallICStub::Generate(MacroAssembler* masm) {
2414 // rdi - function
2415 // rbx - vector
2416 // rdx - slot id
2417 Isolate* isolate = masm->isolate();
2418 Label extra_checks_or_miss, slow;
2419
2420 // The checks. First, does edi match the recorded monomorphic target?
2421 __ SmiToInteger32(rdx, rdx);
2422 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
2423 FixedArray::kHeaderSize));
2424 __ j(not_equal, &extra_checks_or_miss);
2425
2426 GenerateMonomorphicCall(masm);
2427
2428 __ bind(&extra_checks_or_miss);
2429 if (IsGeneric()) {
2430 Label miss_uninit;
2431
2432 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2433 FixedArray::kHeaderSize));
2434 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2435 __ j(equal, &slow);
2436 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2437 __ j(equal, &miss_uninit);
2438 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
2439 // just update.
2440 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2441 FixedArray::kHeaderSize),
2442 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2443 __ jmp(&slow);
2444
2445 __ bind(&miss_uninit);
2446 }
2447
2448 GenerateMiss(masm);
2449
2450 // the slow case
2451 __ bind(&slow);
2452 GenerateSlowCall(masm);
2453 }
2454
2455
2456 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2457 // Get the receiver of the function from the stack; 1 ~ return address.
2458 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize));
2459
2460 {
2461 FrameScope scope(masm, StackFrame::INTERNAL);
2462
2463 // Push the receiver and the function and feedback info.
2464 __ Push(rcx);
2465 __ Push(rdi);
2466 __ Push(rbx);
2467 __ Integer32ToSmi(rdx, rdx);
2468 __ Push(rdx);
2469
2470 // Call the entry.
2471 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
2472 masm->isolate());
2473 __ CallExternalReference(miss, 4);
2474
2475 // Move result to edi and exit the internal frame.
2476 __ movp(rdi, rax);
2477 }
2478 }
2479
2480
2481 bool CEntryStub::NeedsImmovableCode() { 2382 bool CEntryStub::NeedsImmovableCode() {
2482 return false; 2383 return false;
2483 } 2384 }
2484 2385
2485 2386
2486 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2487 CEntryStub::GenerateAheadOfTime(isolate); 2388 CEntryStub::GenerateAheadOfTime(isolate);
2488 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2489 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2490 // It is important that the store buffer overflow stubs are generated first. 2391 // It is important that the store buffer overflow stubs are generated first.
(...skipping 2832 matching lines...) Expand 10 before | Expand all | Expand 10 after
5323 return_value_operand, 5224 return_value_operand,
5324 NULL); 5225 NULL);
5325 } 5226 }
5326 5227
5327 5228
5328 #undef __ 5229 #undef __
5329 5230
5330 } } // namespace v8::internal 5231 } } // namespace v8::internal
5331 5232
5332 #endif // V8_TARGET_ARCH_X64 5233 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698