| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | 66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); |
| 67 } | 67 } |
| 68 | 68 |
| 69 // JumpToExternalReference expects rax to contain the number of arguments | 69 // JumpToExternalReference expects rax to contain the number of arguments |
| 70 // including the receiver and the extra arguments. | 70 // including the receiver and the extra arguments. |
| 71 __ addq(rax, Immediate(num_extra_args + 1)); | 71 __ addq(rax, Immediate(num_extra_args + 1)); |
| 72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); | 72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); |
| 73 } | 73 } |
| 74 | 74 |
| 75 | 75 |
| 76 static void CallRuntimePassFunction(MacroAssembler* masm, |
| 77 Runtime::FunctionId function_id) { |
| 78 FrameScope scope(masm, StackFrame::INTERNAL); |
| 79 // Push a copy of the function onto the stack. |
| 80 __ push(rdi); |
| 81 // Push call kind information. |
| 82 __ push(rcx); |
| 83 // Function is also the parameter to the runtime call. |
| 84 __ push(rdi); |
| 85 |
| 86 __ CallRuntime(function_id, 1); |
| 87 // Restore call kind information. |
| 88 __ pop(rcx); |
| 89 // Restore receiver. |
| 90 __ pop(rdi); |
| 91 } |
| 92 |
| 93 |
| 76 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 94 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 77 __ movq(kScratchRegister, | 95 __ movq(kScratchRegister, |
| 78 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 96 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 79 __ movq(kScratchRegister, | 97 __ movq(kScratchRegister, |
| 80 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); | 98 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); |
| 81 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); | 99 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); |
| 82 __ jmp(kScratchRegister); | 100 __ jmp(kScratchRegister); |
| 83 } | 101 } |
| 84 | 102 |
| 85 | 103 |
| 86 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { | 104 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { |
| 105 // Checking whether the queued function is ready for install is optional, |
| 106 // since we come across interrupts and stack checks elsewhere. However, |
| 107 // not checking may delay installing ready functions, and always checking |
| 108 // would be quite expensive. A good compromise is to first check against |
| 109 // stack limit as a cue for an interrupt signal. |
| 110 Label ok; |
| 111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 112 __ j(above_equal, &ok); |
| 113 |
| 114 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); |
| 115 // Tail call to returned code. |
| 116 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
| 117 __ jmp(rax); |
| 118 |
| 119 __ bind(&ok); |
| 87 GenerateTailCallToSharedCode(masm); | 120 GenerateTailCallToSharedCode(masm); |
| 88 } | 121 } |
| 89 | 122 |
| 90 | 123 |
| 91 void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) { | |
| 92 // Enter an internal frame. | |
| 93 { | |
| 94 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 95 | |
| 96 // Push a copy of the function onto the stack. | |
| 97 __ push(rdi); | |
| 98 // Push call kind information. | |
| 99 __ push(rcx); | |
| 100 | |
| 101 __ push(rdi); // Function is also the parameter to the runtime call. | |
| 102 __ CallRuntime(Runtime::kInstallRecompiledCode, 1); | |
| 103 | |
| 104 // Restore call kind information. | |
| 105 __ pop(rcx); | |
| 106 // Restore function. | |
| 107 __ pop(rdi); | |
| 108 | |
| 109 // Tear down internal frame. | |
| 110 } | |
| 111 | |
| 112 // Do a tail-call of the compiled function. | |
| 113 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); | |
| 114 __ jmp(rax); | |
| 115 } | |
| 116 | |
| 117 | |
| 118 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { | 124 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { |
| 119 { | 125 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); |
| 120 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 121 | |
| 122 // Push a copy of the function onto the stack. | |
| 123 __ push(rdi); | |
| 124 // Push call kind information. | |
| 125 __ push(rcx); | |
| 126 | |
| 127 __ push(rdi); // Function is also the parameter to the runtime call. | |
| 128 __ CallRuntime(Runtime::kConcurrentRecompile, 1); | |
| 129 | |
| 130 // Restore call kind information. | |
| 131 __ pop(rcx); | |
| 132 // Restore receiver. | |
| 133 __ pop(rdi); | |
| 134 | |
| 135 // Tear down internal frame. | |
| 136 } | |
| 137 | |
| 138 GenerateTailCallToSharedCode(masm); | 126 GenerateTailCallToSharedCode(masm); |
| 139 } | 127 } |
| 140 | 128 |
| 141 | 129 |
| 142 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 130 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 143 bool is_api_function, | 131 bool is_api_function, |
| 144 bool count_constructions) { | 132 bool count_constructions) { |
| 145 // ----------- S t a t e ------------- | 133 // ----------- S t a t e ------------- |
| 146 // -- rax: number of arguments | 134 // -- rax: number of arguments |
| 147 // -- rdi: constructor function | 135 // -- rdi: constructor function |
| (...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 579 Generate_JSEntryTrampolineHelper(masm, false); | 567 Generate_JSEntryTrampolineHelper(masm, false); |
| 580 } | 568 } |
| 581 | 569 |
| 582 | 570 |
| 583 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 571 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 584 Generate_JSEntryTrampolineHelper(masm, true); | 572 Generate_JSEntryTrampolineHelper(masm, true); |
| 585 } | 573 } |
| 586 | 574 |
| 587 | 575 |
| 588 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 576 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
| 589 // Enter an internal frame. | 577 CallRuntimePassFunction(masm, Runtime::kLazyCompile); |
| 590 { | |
| 591 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 592 | |
| 593 // Push a copy of the function onto the stack. | |
| 594 __ push(rdi); | |
| 595 // Push call kind information. | |
| 596 __ push(rcx); | |
| 597 | |
| 598 __ push(rdi); // Function is also the parameter to the runtime call. | |
| 599 __ CallRuntime(Runtime::kLazyCompile, 1); | |
| 600 | |
| 601 // Restore call kind information. | |
| 602 __ pop(rcx); | |
| 603 // Restore receiver. | |
| 604 __ pop(rdi); | |
| 605 | |
| 606 // Tear down internal frame. | |
| 607 } | |
| 608 | |
| 609 // Do a tail-call of the compiled function. | 578 // Do a tail-call of the compiled function. |
| 610 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); | 579 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
| 611 __ jmp(rax); | 580 __ jmp(rax); |
| 612 } | 581 } |
| 613 | 582 |
| 614 | 583 |
| 615 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 584 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { |
| 616 // Enter an internal frame. | 585 CallRuntimePassFunction(masm, Runtime::kLazyRecompile); |
| 617 { | |
| 618 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 619 | |
| 620 // Push a copy of the function onto the stack. | |
| 621 __ push(rdi); | |
| 622 // Push call kind information. | |
| 623 __ push(rcx); | |
| 624 | |
| 625 __ push(rdi); // Function is also the parameter to the runtime call. | |
| 626 __ CallRuntime(Runtime::kLazyRecompile, 1); | |
| 627 | |
| 628 // Restore call kind information. | |
| 629 __ pop(rcx); | |
| 630 // Restore function. | |
| 631 __ pop(rdi); | |
| 632 | |
| 633 // Tear down internal frame. | |
| 634 } | |
| 635 | |
| 636 // Do a tail-call of the compiled function. | 586 // Do a tail-call of the compiled function. |
| 637 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); | 587 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
| 638 __ jmp(rax); | 588 __ jmp(rax); |
| 639 } | 589 } |
| 640 | 590 |
| 641 | 591 |
| 642 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 592 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
| 643 // For now, we are relying on the fact that make_code_young doesn't do any | 593 // For now, we are relying on the fact that make_code_young doesn't do any |
| 644 // garbage collection which allows us to save/restore the registers without | 594 // garbage collection which allows us to save/restore the registers without |
| 645 // worrying about which of them contain pointers. We also don't build an | 595 // worrying about which of them contain pointers. We also don't build an |
| 646 // internal frame to make the code faster, since we shouldn't have to do stack | 596 // internal frame to make the code faster, since we shouldn't have to do stack |
| 647 // crawls in MakeCodeYoung. This seems a bit fragile. | 597 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 648 | 598 |
| 649 // Re-execute the code that was patched back to the young age when | 599 // Re-execute the code that was patched back to the young age when |
| 650 // the stub returns. | 600 // the stub returns. |
| 651 __ subq(Operand(rsp, 0), Immediate(5)); | 601 __ subq(Operand(rsp, 0), Immediate(5)); |
| 652 __ Pushad(); | 602 __ Pushad(); |
| 603 __ movq(arg_reg_2, |
| 604 ExternalReference::isolate_address(masm->isolate())); |
| 653 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); | 605 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); |
| 654 { // NOLINT | 606 { // NOLINT |
| 655 FrameScope scope(masm, StackFrame::MANUAL); | 607 FrameScope scope(masm, StackFrame::MANUAL); |
| 656 __ PrepareCallCFunction(1); | 608 __ PrepareCallCFunction(1); |
| 657 __ CallCFunction( | 609 __ CallCFunction( |
| 658 ExternalReference::get_make_code_young_function(masm->isolate()), 1); | 610 ExternalReference::get_make_code_young_function(masm->isolate()), 1); |
| 659 } | 611 } |
| 660 __ Popad(); | 612 __ Popad(); |
| 661 __ ret(0); | 613 __ ret(0); |
| 662 } | 614 } |
| (...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1449 // And "return" to the OSR entry point of the function. | 1401 // And "return" to the OSR entry point of the function. |
| 1450 __ ret(0); | 1402 __ ret(0); |
| 1451 } | 1403 } |
| 1452 | 1404 |
| 1453 | 1405 |
| 1454 #undef __ | 1406 #undef __ |
| 1455 | 1407 |
| 1456 } } // namespace v8::internal | 1408 } } // namespace v8::internal |
| 1457 | 1409 |
| 1458 #endif // V8_TARGET_ARCH_X64 | 1410 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |