| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 399 | 399 |
| 400 // Reserves space for the extra arguments to API function in the | 400 // Reserves space for the extra arguments to API function in the |
| 401 // caller's frame. | 401 // caller's frame. |
| 402 // | 402 // |
| 403 // These arguments are set by CheckPrototypes and GenerateFastApiCall. | 403 // These arguments are set by CheckPrototypes and GenerateFastApiCall. |
| 404 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | 404 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
| 405 // ----------- S t a t e ------------- | 405 // ----------- S t a t e ------------- |
| 406 // -- rsp[0] : return address | 406 // -- rsp[0] : return address |
| 407 // -- rsp[8] : last argument in the internal frame of the caller | 407 // -- rsp[8] : last argument in the internal frame of the caller |
| 408 // ----------------------------------- | 408 // ----------------------------------- |
| 409 __ movq(scratch, StackOperandForReturnAddress(0)); | 409 __ MoveReturnAddress(scratch, Operand(rsp, 0)); |
| 410 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 410 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
| 411 __ movq(StackOperandForReturnAddress(0), scratch); | 411 __ MoveReturnAddress(Operand(rsp, 0), scratch); |
| 412 __ Move(scratch, Smi::FromInt(0)); | 412 __ Move(scratch, Smi::FromInt(0)); |
| 413 StackArgumentsAccessor args(rsp, kFastApiCallArguments, | 413 StackArgumentsAccessor args(rsp, kFastApiCallArguments, |
| 414 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 414 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
| 415 for (int i = 0; i < kFastApiCallArguments; i++) { | 415 for (int i = 0; i < kFastApiCallArguments; i++) { |
| 416 __ movq(args.GetArgumentOperand(i), scratch); | 416 __ movq(args.GetArgumentOperand(i), scratch); |
| 417 } | 417 } |
| 418 } | 418 } |
| 419 | 419 |
| 420 | 420 |
| 421 // Undoes the effects of ReserveSpaceForFastApiCall. | 421 // Undoes the effects of ReserveSpaceForFastApiCall. |
| 422 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | 422 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
| 423 // ----------- S t a t e ------------- | 423 // ----------- S t a t e ------------- |
| 424 // -- rsp[0] : return address. | 424 // -- rsp[0] : return address. |
| 425 // -- rsp[8] : last fast api call extra argument. | 425 // -- rsp[8] : last fast api call extra argument. |
| 426 // -- ... | 426 // -- ... |
| 427 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra | 427 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra |
| 428 // argument. | 428 // argument. |
| 429 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal | 429 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal |
| 430 // frame. | 430 // frame. |
| 431 // ----------------------------------- | 431 // ----------------------------------- |
| 432 __ movq(scratch, StackOperandForReturnAddress(0)); | 432 __ MoveReturnAddress(scratch, Operand(rsp, 0)); |
| 433 __ movq(StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize), | 433 __ MoveReturnAddress(Operand(rsp, kFastApiCallArguments * kPointerSize), |
| 434 scratch); | 434 scratch); |
| 435 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); | 435 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); |
| 436 } | 436 } |
| 437 | 437 |
| 438 | 438 |
| 439 // Generates call to API function. | 439 // Generates call to API function. |
| 440 static void GenerateFastApiCall(MacroAssembler* masm, | 440 static void GenerateFastApiCall(MacroAssembler* masm, |
| 441 const CallOptimization& optimization, | 441 const CallOptimization& optimization, |
| 442 int argc, | 442 int argc, |
| 443 bool restore_context) { | 443 bool restore_context) { |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 540 int argc, | 540 int argc, |
| 541 Register* values) { | 541 Register* values) { |
| 542 ASSERT(optimization.is_simple_api_call()); | 542 ASSERT(optimization.is_simple_api_call()); |
| 543 ASSERT(!receiver.is(scratch)); | 543 ASSERT(!receiver.is(scratch)); |
| 544 | 544 |
| 545 const int fast_api_call_argc = argc + kFastApiCallArguments; | 545 const int fast_api_call_argc = argc + kFastApiCallArguments; |
| 546 StackArgumentsAccessor args(rsp, fast_api_call_argc); | 546 StackArgumentsAccessor args(rsp, fast_api_call_argc); |
| 547 // argc + 1 is the argument number before FastApiCall arguments, 1 ~ receiver | 547 // argc + 1 is the argument number before FastApiCall arguments, 1 ~ receiver |
| 548 const int kHolderIndex = argc + 1 + | 548 const int kHolderIndex = argc + 1 + |
| 549 kFastApiCallArguments - 1 - FunctionCallbackArguments::kHolderIndex; | 549 kFastApiCallArguments - 1 - FunctionCallbackArguments::kHolderIndex; |
| 550 __ movq(scratch, StackOperandForReturnAddress(0)); | 550 __ MoveReturnAddress(scratch, Operand(rsp, 0)); |
| 551 // Assign stack space for the call arguments and receiver. | 551 // Assign stack space for the call arguments and receiver. |
| 552 __ subq(rsp, Immediate((fast_api_call_argc + 1) * kPointerSize)); | 552 __ subq(rsp, Immediate((fast_api_call_argc + 1) * kPointerSize)); |
| 553 __ movq(StackOperandForReturnAddress(0), scratch); | 553 __ MoveReturnAddress(Operand(rsp, 0), scratch); |
| 554 // Write holder to stack frame. | 554 // Write holder to stack frame. |
| 555 __ movq(args.GetArgumentOperand(kHolderIndex), receiver); | 555 __ movq(args.GetArgumentOperand(kHolderIndex), receiver); |
| 556 __ movq(args.GetReceiverOperand(), receiver); | 556 __ movq(args.GetReceiverOperand(), receiver); |
| 557 // Write the arguments to stack frame. | 557 // Write the arguments to stack frame. |
| 558 for (int i = 0; i < argc; i++) { | 558 for (int i = 0; i < argc; i++) { |
| 559 ASSERT(!receiver.is(values[i])); | 559 ASSERT(!receiver.is(values[i])); |
| 560 ASSERT(!scratch.is(values[i])); | 560 ASSERT(!scratch.is(values[i])); |
| 561 __ movq(args.GetArgumentOperand(i + 1), values[i]); | 561 __ movq(args.GetArgumentOperand(i + 1), values[i]); |
| 562 } | 562 } |
| 563 | 563 |
| (...skipping 1919 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2483 | 2483 |
| 2484 // Allocate space for v8::Arguments implicit values. Must be initialized | 2484 // Allocate space for v8::Arguments implicit values. Must be initialized |
| 2485 // before calling any runtime function. | 2485 // before calling any runtime function. |
| 2486 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 2486 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
| 2487 | 2487 |
| 2488 // Check that the maps haven't changed and find a Holder as a side effect. | 2488 // Check that the maps haven't changed and find a Holder as a side effect. |
| 2489 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, | 2489 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
| 2490 name, depth, &miss); | 2490 name, depth, &miss); |
| 2491 | 2491 |
| 2492 // Move the return address on top of the stack. | 2492 // Move the return address on top of the stack. |
| 2493 __ movq(rax, | 2493 __ MoveReturnAddress(rax, Operand(rsp, kFastApiCallArguments * kPointerSize)); |
| 2494 StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize)); | 2494 __ MoveReturnAddress(Operand(rsp, 0), rax); |
| 2495 __ movq(StackOperandForReturnAddress(0), rax); | |
| 2496 | 2495 |
| 2497 GenerateFastApiCall(masm(), optimization, argc, false); | 2496 GenerateFastApiCall(masm(), optimization, argc, false); |
| 2498 | 2497 |
| 2499 __ bind(&miss); | 2498 __ bind(&miss); |
| 2500 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 2499 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
| 2501 | 2500 |
| 2502 __ bind(&miss_before_stack_reserved); | 2501 __ bind(&miss_before_stack_reserved); |
| 2503 GenerateMissBranch(); | 2502 GenerateMissBranch(); |
| 2504 | 2503 |
| 2505 // Return the generated code. | 2504 // Return the generated code. |
| (...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3146 // ----------------------------------- | 3145 // ----------------------------------- |
| 3147 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric); | 3146 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric); |
| 3148 } | 3147 } |
| 3149 | 3148 |
| 3150 | 3149 |
| 3151 #undef __ | 3150 #undef __ |
| 3152 | 3151 |
| 3153 } } // namespace v8::internal | 3152 } } // namespace v8::internal |
| 3154 | 3153 |
| 3155 #endif // V8_TARGET_ARCH_X64 | 3154 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |