OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
403 | 403 |
404 // Reserves space for the extra arguments to API function in the | 404 // Reserves space for the extra arguments to API function in the |
405 // caller's frame. | 405 // caller's frame. |
406 // | 406 // |
407 // These arguments are set by CheckPrototypes and GenerateFastApiCall. | 407 // These arguments are set by CheckPrototypes and GenerateFastApiCall. |
408 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | 408 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
409 // ----------- S t a t e ------------- | 409 // ----------- S t a t e ------------- |
410 // -- rsp[0] : return address | 410 // -- rsp[0] : return address |
411 // -- rsp[8] : last argument in the internal frame of the caller | 411 // -- rsp[8] : last argument in the internal frame of the caller |
412 // ----------------------------------- | 412 // ----------------------------------- |
413 __ movq(scratch, Operand(rsp, 0)); | 413 __ movq(scratch, StackOperandForReturnAddress(0)); |
414 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 414 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
415 __ movq(Operand(rsp, 0), scratch); | 415 __ movq(StackOperandForReturnAddress(0), scratch); |
416 __ Move(scratch, Smi::FromInt(0)); | 416 __ Move(scratch, Smi::FromInt(0)); |
417 for (int i = 1; i <= kFastApiCallArguments; i++) { | 417 for (int i = 1; i <= kFastApiCallArguments; i++) { |
418 __ movq(Operand(rsp, i * kPointerSize), scratch); | 418 __ movq(Operand(rsp, i * kPointerSize), scratch); |
419 } | 419 } |
420 } | 420 } |
421 | 421 |
422 | 422 |
423 // Undoes the effects of ReserveSpaceForFastApiCall. | 423 // Undoes the effects of ReserveSpaceForFastApiCall. |
424 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | 424 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
425 // ----------- S t a t e ------------- | 425 // ----------- S t a t e ------------- |
426 // -- rsp[0] : return address. | 426 // -- rsp[0] : return address. |
427 // -- rsp[8] : last fast api call extra argument. | 427 // -- rsp[8] : last fast api call extra argument. |
428 // -- ... | 428 // -- ... |
429 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra | 429 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra |
430 // argument. | 430 // argument. |
431 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal | 431 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal |
432 // frame. | 432 // frame. |
433 // ----------------------------------- | 433 // ----------------------------------- |
434 __ movq(scratch, Operand(rsp, 0)); | 434 __ movq(scratch, StackOperandForReturnAddress(0)); |
435 __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch); | 435 __ movq(StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize), |
| 436 scratch); |
436 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); | 437 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); |
437 } | 438 } |
438 | 439 |
439 | 440 |
440 // Generates call to API function. | 441 // Generates call to API function. |
441 static void GenerateFastApiCall(MacroAssembler* masm, | 442 static void GenerateFastApiCall(MacroAssembler* masm, |
442 const CallOptimization& optimization, | 443 const CallOptimization& optimization, |
443 int argc) { | 444 int argc) { |
444 // ----------- S t a t e ------------- | 445 // ----------- S t a t e ------------- |
445 // -- rsp[0] : return address | 446 // -- rsp[0] : return address |
(...skipping 1897 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2343 | 2344 |
2344 // Allocate space for v8::Arguments implicit values. Must be initialized | 2345 // Allocate space for v8::Arguments implicit values. Must be initialized |
2345 // before calling any runtime function. | 2346 // before calling any runtime function. |
2346 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 2347 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
2347 | 2348 |
2348 // Check that the maps haven't changed and find a Holder as a side effect. | 2349 // Check that the maps haven't changed and find a Holder as a side effect. |
2349 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, | 2350 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
2350 name, depth, &miss); | 2351 name, depth, &miss); |
2351 | 2352 |
2352 // Move the return address on top of the stack. | 2353 // Move the return address on top of the stack. |
2353 __ movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize)); | 2354 __ movq(rax, |
2354 __ movq(Operand(rsp, 0 * kPointerSize), rax); | 2355 StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize)); |
| 2356 __ movq(StackOperandForReturnAddress(0), rax); |
2355 | 2357 |
2356 GenerateFastApiCall(masm(), optimization, argc); | 2358 GenerateFastApiCall(masm(), optimization, argc); |
2357 | 2359 |
2358 __ bind(&miss); | 2360 __ bind(&miss); |
2359 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 2361 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
2360 | 2362 |
2361 __ bind(&miss_before_stack_reserved); | 2363 __ bind(&miss_before_stack_reserved); |
2362 GenerateMissBranch(); | 2364 GenerateMissBranch(); |
2363 | 2365 |
2364 // Return the generated code. | 2366 // Return the generated code. |
(...skipping 1147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3512 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3514 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
3513 } | 3515 } |
3514 } | 3516 } |
3515 | 3517 |
3516 | 3518 |
3517 #undef __ | 3519 #undef __ |
3518 | 3520 |
3519 } } // namespace v8::internal | 3521 } } // namespace v8::internal |
3520 | 3522 |
3521 #endif // V8_TARGET_ARCH_X64 | 3523 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |