OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
107 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 107 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
108 // Checking whether the queued function is ready for install is optional, | 108 // Checking whether the queued function is ready for install is optional, |
109 // since we come across interrupts and stack checks elsewhere. However, | 109 // since we come across interrupts and stack checks elsewhere. However, |
110 // not checking may delay installing ready functions, and always checking | 110 // not checking may delay installing ready functions, and always checking |
111 // would be quite expensive. A good compromise is to first check against | 111 // would be quite expensive. A good compromise is to first check against |
112 // stack limit as a cue for an interrupt signal. | 112 // stack limit as a cue for an interrupt signal. |
113 Label ok; | 113 Label ok; |
114 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 114 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
115 __ j(above_equal, &ok); | 115 __ j(above_equal, &ok); |
116 | 116 |
117 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 117 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); |
118 GenerateTailCallToReturnedCode(masm); | 118 GenerateTailCallToReturnedCode(masm); |
119 | 119 |
120 __ bind(&ok); | 120 __ bind(&ok); |
121 GenerateTailCallToSharedCode(masm); | 121 GenerateTailCallToSharedCode(masm); |
122 } | 122 } |
123 | 123 |
124 | 124 |
125 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 125 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
126 bool is_api_function, | 126 bool is_api_function, |
127 bool count_constructions, | 127 bool count_constructions, |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
197 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 197 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
198 __ decb(FieldOperand(rcx, | 198 __ decb(FieldOperand(rcx, |
199 SharedFunctionInfo::kConstructionCountOffset)); | 199 SharedFunctionInfo::kConstructionCountOffset)); |
200 __ j(not_zero, &allocate); | 200 __ j(not_zero, &allocate); |
201 | 201 |
202 __ Push(rax); | 202 __ Push(rax); |
203 __ Push(rdi); | 203 __ Push(rdi); |
204 | 204 |
205 __ Push(rdi); // constructor | 205 __ Push(rdi); // constructor |
206 // The call will replace the stub, so the countdown is only done once. | 206 // The call will replace the stub, so the countdown is only done once. |
207 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 207 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); |
208 | 208 |
209 __ Pop(rdi); | 209 __ Pop(rdi); |
210 __ Pop(rax); | 210 __ Pop(rax); |
211 | 211 |
212 __ bind(&allocate); | 212 __ bind(&allocate); |
213 } | 213 } |
214 | 214 |
215 // Now allocate the JSObject on the heap. | 215 // Now allocate the JSObject on the heap. |
216 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); | 216 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); |
217 __ shl(rdi, Immediate(kPointerSizeLog2)); | 217 __ shl(rdi, Immediate(kPointerSizeLog2)); |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
366 // Get the cell or allocation site. | 366 // Get the cell or allocation site. |
367 __ movp(rdi, Operand(rsp, kPointerSize*2)); | 367 __ movp(rdi, Operand(rsp, kPointerSize*2)); |
368 __ Push(rdi); | 368 __ Push(rdi); |
369 offset = kPointerSize; | 369 offset = kPointerSize; |
370 } | 370 } |
371 | 371 |
372 // Must restore rdi (constructor) before calling runtime. | 372 // Must restore rdi (constructor) before calling runtime. |
373 __ movp(rdi, Operand(rsp, offset)); | 373 __ movp(rdi, Operand(rsp, offset)); |
374 __ Push(rdi); | 374 __ Push(rdi); |
375 if (create_memento) { | 375 if (create_memento) { |
376 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | 376 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); |
377 } else { | 377 } else { |
378 __ CallRuntime(Runtime::kNewObject, 1); | 378 __ CallRuntime(Runtime::kHiddenNewObject, 1); |
379 } | 379 } |
380 __ movp(rbx, rax); // store result in rbx | 380 __ movp(rbx, rax); // store result in rbx |
381 | 381 |
382 // If we ended up using the runtime, and we want a memento, then the | 382 // If we ended up using the runtime, and we want a memento, then the |
383 // runtime call made it for us, and we shouldn't do create count | 383 // runtime call made it for us, and we shouldn't do create count |
384 // increment. | 384 // increment. |
385 Label count_incremented; | 385 Label count_incremented; |
386 if (create_memento) { | 386 if (create_memento) { |
387 __ jmp(&count_incremented); | 387 __ jmp(&count_incremented); |
388 } | 388 } |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
621 Generate_JSEntryTrampolineHelper(masm, false); | 621 Generate_JSEntryTrampolineHelper(masm, false); |
622 } | 622 } |
623 | 623 |
624 | 624 |
625 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 625 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
626 Generate_JSEntryTrampolineHelper(masm, true); | 626 Generate_JSEntryTrampolineHelper(masm, true); |
627 } | 627 } |
628 | 628 |
629 | 629 |
630 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 630 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
631 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 631 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); |
632 GenerateTailCallToReturnedCode(masm); | 632 GenerateTailCallToReturnedCode(masm); |
633 } | 633 } |
634 | 634 |
635 | 635 |
636 static void CallCompileOptimized(MacroAssembler* masm, | 636 static void CallCompileOptimized(MacroAssembler* masm, |
637 bool concurrent) { | 637 bool concurrent) { |
638 FrameScope scope(masm, StackFrame::INTERNAL); | 638 FrameScope scope(masm, StackFrame::INTERNAL); |
639 // Push a copy of the function onto the stack. | 639 // Push a copy of the function onto the stack. |
640 __ Push(rdi); | 640 __ Push(rdi); |
641 // Function is also the parameter to the runtime call. | 641 // Function is also the parameter to the runtime call. |
642 __ Push(rdi); | 642 __ Push(rdi); |
643 // Whether to compile in a background thread. | 643 // Whether to compile in a background thread. |
644 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 644 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
645 | 645 |
646 __ CallRuntime(Runtime::kCompileOptimized, 2); | 646 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); |
647 // Restore receiver. | 647 // Restore receiver. |
648 __ Pop(rdi); | 648 __ Pop(rdi); |
649 } | 649 } |
650 | 650 |
651 | 651 |
652 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 652 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
653 CallCompileOptimized(masm, false); | 653 CallCompileOptimized(masm, false); |
654 GenerateTailCallToReturnedCode(masm); | 654 GenerateTailCallToReturnedCode(masm); |
655 } | 655 } |
656 | 656 |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
737 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 737 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
738 SaveFPRegsMode save_doubles) { | 738 SaveFPRegsMode save_doubles) { |
739 // Enter an internal frame. | 739 // Enter an internal frame. |
740 { | 740 { |
741 FrameScope scope(masm, StackFrame::INTERNAL); | 741 FrameScope scope(masm, StackFrame::INTERNAL); |
742 | 742 |
743 // Preserve registers across notification, this is important for compiled | 743 // Preserve registers across notification, this is important for compiled |
744 // stubs that tail call the runtime on deopts passing their parameters in | 744 // stubs that tail call the runtime on deopts passing their parameters in |
745 // registers. | 745 // registers. |
746 __ Pushad(); | 746 __ Pushad(); |
747 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 747 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); |
748 __ Popad(); | 748 __ Popad(); |
749 // Tear down internal frame. | 749 // Tear down internal frame. |
750 } | 750 } |
751 | 751 |
752 __ Pop(MemOperand(rsp, 0)); // Ignore state offset | 752 __ Pop(MemOperand(rsp, 0)); // Ignore state offset |
753 __ ret(0); // Return to IC Miss stub, continuation still on stack. | 753 __ ret(0); // Return to IC Miss stub, continuation still on stack. |
754 } | 754 } |
755 | 755 |
756 | 756 |
757 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 757 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
758 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 758 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
759 } | 759 } |
760 | 760 |
761 | 761 |
762 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 762 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
763 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 763 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
764 } | 764 } |
765 | 765 |
766 | 766 |
767 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 767 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
768 Deoptimizer::BailoutType type) { | 768 Deoptimizer::BailoutType type) { |
769 // Enter an internal frame. | 769 // Enter an internal frame. |
770 { | 770 { |
771 FrameScope scope(masm, StackFrame::INTERNAL); | 771 FrameScope scope(masm, StackFrame::INTERNAL); |
772 | 772 |
773 // Pass the deoptimization type to the runtime system. | 773 // Pass the deoptimization type to the runtime system. |
774 __ Push(Smi::FromInt(static_cast<int>(type))); | 774 __ Push(Smi::FromInt(static_cast<int>(type))); |
775 | 775 |
776 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 776 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); |
777 // Tear down internal frame. | 777 // Tear down internal frame. |
778 } | 778 } |
779 | 779 |
780 // Get the full codegen state from the stack and untag it. | 780 // Get the full codegen state from the stack and untag it. |
781 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); | 781 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); |
782 | 782 |
783 // Switch on the state. | 783 // Switch on the state. |
784 Label not_no_registers, not_tos_rax; | 784 Label not_no_registers, not_tos_rax; |
785 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); | 785 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); |
786 __ j(not_equal, ¬_no_registers, Label::kNear); | 786 __ j(not_equal, ¬_no_registers, Label::kNear); |
(...skipping 692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1479 } | 1479 } |
1480 | 1480 |
1481 | 1481 |
1482 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1482 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
1483 // We check the stack limit as indicator that recompilation might be done. | 1483 // We check the stack limit as indicator that recompilation might be done. |
1484 Label ok; | 1484 Label ok; |
1485 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 1485 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
1486 __ j(above_equal, &ok); | 1486 __ j(above_equal, &ok); |
1487 { | 1487 { |
1488 FrameScope scope(masm, StackFrame::INTERNAL); | 1488 FrameScope scope(masm, StackFrame::INTERNAL); |
1489 __ CallRuntime(Runtime::kStackGuard, 0); | 1489 __ CallRuntime(Runtime::kHiddenStackGuard, 0); |
1490 } | 1490 } |
1491 __ jmp(masm->isolate()->builtins()->OnStackReplacement(), | 1491 __ jmp(masm->isolate()->builtins()->OnStackReplacement(), |
1492 RelocInfo::CODE_TARGET); | 1492 RelocInfo::CODE_TARGET); |
1493 | 1493 |
1494 __ bind(&ok); | 1494 __ bind(&ok); |
1495 __ ret(0); | 1495 __ ret(0); |
1496 } | 1496 } |
1497 | 1497 |
1498 | 1498 |
1499 #undef __ | 1499 #undef __ |
1500 | 1500 |
1501 } } // namespace v8::internal | 1501 } } // namespace v8::internal |
1502 | 1502 |
1503 #endif // V8_TARGET_ARCH_X64 | 1503 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |