| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 759 __ bind(&switch_to_different_code_kind); | 759 __ bind(&switch_to_different_code_kind); |
| 760 __ leave(); // Leave the frame so we can tail call. | 760 __ leave(); // Leave the frame so we can tail call. |
| 761 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 761 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 762 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); | 762 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); |
| 763 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | 763 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); |
| 764 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx); | 764 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx); |
| 765 __ RecordWriteCodeEntryField(rdi, rcx, r15); | 765 __ RecordWriteCodeEntryField(rdi, rcx, r15); |
| 766 __ jmp(rcx); | 766 __ jmp(rcx); |
| 767 } | 767 } |
| 768 | 768 |
| 769 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
| 770 // Save the function and context for call to CompileBaseline. | |
| 771 __ movp(rdi, Operand(rbp, StandardFrameConstants::kFunctionOffset)); | |
| 772 __ movp(kContextRegister, | |
| 773 Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 774 | |
| 775 // Leave the frame before recompiling for baseline so that we don't count as | |
| 776 // an activation on the stack. | |
| 777 LeaveInterpreterFrame(masm, rbx, rcx); | |
| 778 | |
| 779 { | |
| 780 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
| 781 // Push return value. | |
| 782 __ Push(rax); | |
| 783 | |
| 784 // Push function as argument and compile for baseline. | |
| 785 __ Push(rdi); | |
| 786 __ CallRuntime(Runtime::kCompileBaseline); | |
| 787 | |
| 788 // Restore return value. | |
| 789 __ Pop(rax); | |
| 790 } | |
| 791 __ ret(0); | |
| 792 } | |
| 793 | |
| 794 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, | 769 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, |
| 795 Register scratch1, Register scratch2, | 770 Register scratch1, Register scratch2, |
| 796 Label* stack_overflow) { | 771 Label* stack_overflow) { |
| 797 // Check the stack for overflow. We are not trying to catch | 772 // Check the stack for overflow. We are not trying to catch |
| 798 // interruptions (e.g. debug break and preemption) here, so the "real stack | 773 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 799 // limit" is checked. | 774 // limit" is checked. |
| 800 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); | 775 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); |
| 801 __ movp(scratch2, rsp); | 776 __ movp(scratch2, rsp); |
| 802 // Make scratch2 the space we have left. The stack might already be overflowed | 777 // Make scratch2 the space we have left. The stack might already be overflowed |
| 803 // here which will cause scratch2 to become negative. | 778 // here which will cause scratch2 to become negative. |
| (...skipping 2273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3077 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { | 3052 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { |
| 3078 Generate_OnStackReplacementHelper(masm, true); | 3053 Generate_OnStackReplacementHelper(masm, true); |
| 3079 } | 3054 } |
| 3080 | 3055 |
| 3081 #undef __ | 3056 #undef __ |
| 3082 | 3057 |
| 3083 } // namespace internal | 3058 } // namespace internal |
| 3084 } // namespace v8 | 3059 } // namespace v8 |
| 3085 | 3060 |
| 3086 #endif // V8_TARGET_ARCH_X64 | 3061 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |