| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/deoptimizer.h" | 10 #include "src/deoptimizer.h" |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 85 // Checking whether the queued function is ready for install is optional, | 85 // Checking whether the queued function is ready for install is optional, |
| 86 // since we come across interrupts and stack checks elsewhere. However, | 86 // since we come across interrupts and stack checks elsewhere. However, |
| 87 // not checking may delay installing ready functions, and always checking | 87 // not checking may delay installing ready functions, and always checking |
| 88 // would be quite expensive. A good compromise is to first check against | 88 // would be quite expensive. A good compromise is to first check against |
| 89 // stack limit as a cue for an interrupt signal. | 89 // stack limit as a cue for an interrupt signal. |
| 90 Label ok; | 90 Label ok; |
| 91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 92 __ j(above_equal, &ok); | 92 __ j(above_equal, &ok); |
| 93 | 93 |
| 94 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); | 94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
| 95 GenerateTailCallToReturnedCode(masm); | 95 GenerateTailCallToReturnedCode(masm); |
| 96 | 96 |
| 97 __ bind(&ok); | 97 __ bind(&ok); |
| 98 GenerateTailCallToSharedCode(masm); | 98 GenerateTailCallToSharedCode(masm); |
| 99 } | 99 } |
| 100 | 100 |
| 101 | 101 |
| 102 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 102 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 103 bool is_api_function, | 103 bool is_api_function, |
| 104 bool create_memento) { | 104 bool create_memento) { |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 173 __ subl(FieldOperand(rax, Map::kBitField3Offset), | 173 __ subl(FieldOperand(rax, Map::kBitField3Offset), |
| 174 Immediate(1 << Map::ConstructionCount::kShift)); | 174 Immediate(1 << Map::ConstructionCount::kShift)); |
| 175 | 175 |
| 176 __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking)); | 176 __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking)); |
| 177 __ j(not_equal, &allocate); | 177 __ j(not_equal, &allocate); |
| 178 | 178 |
| 179 __ Push(rax); | 179 __ Push(rax); |
| 180 __ Push(rdi); | 180 __ Push(rdi); |
| 181 | 181 |
| 182 __ Push(rdi); // constructor | 182 __ Push(rdi); // constructor |
| 183 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); | 183 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
| 184 | 184 |
| 185 __ Pop(rdi); | 185 __ Pop(rdi); |
| 186 __ Pop(rax); | 186 __ Pop(rax); |
| 187 __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking | 187 __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking |
| 188 | 188 |
| 189 __ bind(&allocate); | 189 __ bind(&allocate); |
| 190 } | 190 } |
| 191 | 191 |
| 192 // Now allocate the JSObject on the heap. | 192 // Now allocate the JSObject on the heap. |
| 193 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); | 193 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); |
| (...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 353 __ movp(rdi, Operand(rsp, kPointerSize*2)); | 353 __ movp(rdi, Operand(rsp, kPointerSize*2)); |
| 354 __ Push(rdi); | 354 __ Push(rdi); |
| 355 offset = kPointerSize; | 355 offset = kPointerSize; |
| 356 } | 356 } |
| 357 | 357 |
| 358 // Must restore rsi (context) and rdi (constructor) before calling runtime. | 358 // Must restore rsi (context) and rdi (constructor) before calling runtime. |
| 359 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 359 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 360 __ movp(rdi, Operand(rsp, offset)); | 360 __ movp(rdi, Operand(rsp, offset)); |
| 361 __ Push(rdi); | 361 __ Push(rdi); |
| 362 if (create_memento) { | 362 if (create_memento) { |
| 363 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); | 363 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); |
| 364 } else { | 364 } else { |
| 365 __ CallRuntime(Runtime::kHiddenNewObject, 1); | 365 __ CallRuntime(Runtime::kNewObject, 1); |
| 366 } | 366 } |
| 367 __ movp(rbx, rax); // store result in rbx | 367 __ movp(rbx, rax); // store result in rbx |
| 368 | 368 |
| 369 // If we ended up using the runtime, and we want a memento, then the | 369 // If we ended up using the runtime, and we want a memento, then the |
| 370 // runtime call made it for us, and we shouldn't do create count | 370 // runtime call made it for us, and we shouldn't do create count |
| 371 // increment. | 371 // increment. |
| 372 Label count_incremented; | 372 Label count_incremented; |
| 373 if (create_memento) { | 373 if (create_memento) { |
| 374 __ jmp(&count_incremented); | 374 __ jmp(&count_incremented); |
| 375 } | 375 } |
| (...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 603 Generate_JSEntryTrampolineHelper(masm, false); | 603 Generate_JSEntryTrampolineHelper(masm, false); |
| 604 } | 604 } |
| 605 | 605 |
| 606 | 606 |
| 607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 608 Generate_JSEntryTrampolineHelper(masm, true); | 608 Generate_JSEntryTrampolineHelper(masm, true); |
| 609 } | 609 } |
| 610 | 610 |
| 611 | 611 |
| 612 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 612 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 613 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); | 613 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
| 614 GenerateTailCallToReturnedCode(masm); | 614 GenerateTailCallToReturnedCode(masm); |
| 615 } | 615 } |
| 616 | 616 |
| 617 | 617 |
| 618 static void CallCompileOptimized(MacroAssembler* masm, | 618 static void CallCompileOptimized(MacroAssembler* masm, |
| 619 bool concurrent) { | 619 bool concurrent) { |
| 620 FrameScope scope(masm, StackFrame::INTERNAL); | 620 FrameScope scope(masm, StackFrame::INTERNAL); |
| 621 // Push a copy of the function onto the stack. | 621 // Push a copy of the function onto the stack. |
| 622 __ Push(rdi); | 622 __ Push(rdi); |
| 623 // Function is also the parameter to the runtime call. | 623 // Function is also the parameter to the runtime call. |
| 624 __ Push(rdi); | 624 __ Push(rdi); |
| 625 // Whether to compile in a background thread. | 625 // Whether to compile in a background thread. |
| 626 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 626 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 627 | 627 |
| 628 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); | 628 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 629 // Restore receiver. | 629 // Restore receiver. |
| 630 __ Pop(rdi); | 630 __ Pop(rdi); |
| 631 } | 631 } |
| 632 | 632 |
| 633 | 633 |
| 634 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 634 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 635 CallCompileOptimized(masm, false); | 635 CallCompileOptimized(masm, false); |
| 636 GenerateTailCallToReturnedCode(masm); | 636 GenerateTailCallToReturnedCode(masm); |
| 637 } | 637 } |
| 638 | 638 |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 719 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 719 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 720 SaveFPRegsMode save_doubles) { | 720 SaveFPRegsMode save_doubles) { |
| 721 // Enter an internal frame. | 721 // Enter an internal frame. |
| 722 { | 722 { |
| 723 FrameScope scope(masm, StackFrame::INTERNAL); | 723 FrameScope scope(masm, StackFrame::INTERNAL); |
| 724 | 724 |
| 725 // Preserve registers across notification, this is important for compiled | 725 // Preserve registers across notification, this is important for compiled |
| 726 // stubs that tail call the runtime on deopts passing their parameters in | 726 // stubs that tail call the runtime on deopts passing their parameters in |
| 727 // registers. | 727 // registers. |
| 728 __ Pushad(); | 728 __ Pushad(); |
| 729 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); | 729 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
| 730 __ Popad(); | 730 __ Popad(); |
| 731 // Tear down internal frame. | 731 // Tear down internal frame. |
| 732 } | 732 } |
| 733 | 733 |
| 734 __ DropUnderReturnAddress(1); // Ignore state offset | 734 __ DropUnderReturnAddress(1); // Ignore state offset |
| 735 __ ret(0); // Return to IC Miss stub, continuation still on stack. | 735 __ ret(0); // Return to IC Miss stub, continuation still on stack. |
| 736 } | 736 } |
| 737 | 737 |
| 738 | 738 |
| 739 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 739 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 740 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 740 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 741 } | 741 } |
| 742 | 742 |
| 743 | 743 |
| 744 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 744 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 745 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 745 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 746 } | 746 } |
| 747 | 747 |
| 748 | 748 |
| 749 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 749 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 750 Deoptimizer::BailoutType type) { | 750 Deoptimizer::BailoutType type) { |
| 751 // Enter an internal frame. | 751 // Enter an internal frame. |
| 752 { | 752 { |
| 753 FrameScope scope(masm, StackFrame::INTERNAL); | 753 FrameScope scope(masm, StackFrame::INTERNAL); |
| 754 | 754 |
| 755 // Pass the deoptimization type to the runtime system. | 755 // Pass the deoptimization type to the runtime system. |
| 756 __ Push(Smi::FromInt(static_cast<int>(type))); | 756 __ Push(Smi::FromInt(static_cast<int>(type))); |
| 757 | 757 |
| 758 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); | 758 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 759 // Tear down internal frame. | 759 // Tear down internal frame. |
| 760 } | 760 } |
| 761 | 761 |
| 762 // Get the full codegen state from the stack and untag it. | 762 // Get the full codegen state from the stack and untag it. |
| 763 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); | 763 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); |
| 764 | 764 |
| 765 // Switch on the state. | 765 // Switch on the state. |
| 766 Label not_no_registers, not_tos_rax; | 766 Label not_no_registers, not_tos_rax; |
| 767 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); | 767 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); |
| 768 __ j(not_equal, ¬_no_registers, Label::kNear); | 768 __ j(not_equal, ¬_no_registers, Label::kNear); |
| (...skipping 729 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1498 } | 1498 } |
| 1499 | 1499 |
| 1500 | 1500 |
| 1501 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1501 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1502 // We check the stack limit as indicator that recompilation might be done. | 1502 // We check the stack limit as indicator that recompilation might be done. |
| 1503 Label ok; | 1503 Label ok; |
| 1504 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 1504 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 1505 __ j(above_equal, &ok); | 1505 __ j(above_equal, &ok); |
| 1506 { | 1506 { |
| 1507 FrameScope scope(masm, StackFrame::INTERNAL); | 1507 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1508 __ CallRuntime(Runtime::kHiddenStackGuard, 0); | 1508 __ CallRuntime(Runtime::kStackGuard, 0); |
| 1509 } | 1509 } |
| 1510 __ jmp(masm->isolate()->builtins()->OnStackReplacement(), | 1510 __ jmp(masm->isolate()->builtins()->OnStackReplacement(), |
| 1511 RelocInfo::CODE_TARGET); | 1511 RelocInfo::CODE_TARGET); |
| 1512 | 1512 |
| 1513 __ bind(&ok); | 1513 __ bind(&ok); |
| 1514 __ ret(0); | 1514 __ ret(0); |
| 1515 } | 1515 } |
| 1516 | 1516 |
| 1517 | 1517 |
| 1518 #undef __ | 1518 #undef __ |
| 1519 | 1519 |
| 1520 } } // namespace v8::internal | 1520 } } // namespace v8::internal |
| 1521 | 1521 |
| 1522 #endif // V8_TARGET_ARCH_X64 | 1522 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |