OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
53 } | 53 } |
54 | 54 |
55 // JumpToExternalReference expects rax to contain the number of arguments | 55 // JumpToExternalReference expects rax to contain the number of arguments |
56 // including the receiver and the extra arguments. | 56 // including the receiver and the extra arguments. |
57 __ addp(rax, Immediate(num_extra_args + 1)); | 57 __ addp(rax, Immediate(num_extra_args + 1)); |
58 | 58 |
59 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 59 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); |
60 } | 60 } |
61 | 61 |
62 | 62 |
63 static void CallRuntimePassFunction( | |
64 MacroAssembler* masm, Runtime::FunctionId function_id) { | |
65 // ----------- S t a t e ------------- | |
66 // -- rdx : new target (preserved for callee) | |
67 // -- rdi : target function (preserved for callee) | |
68 // ----------------------------------- | |
69 | |
70 FrameScope scope(masm, StackFrame::INTERNAL); | |
71 // Push a copy of the target function and the new target. | |
72 __ Push(rdi); | |
73 __ Push(rdx); | |
74 // Function is also the parameter to the runtime call. | |
75 __ Push(rdi); | |
76 | |
77 __ CallRuntime(function_id, 1); | |
78 // Restore target function and new target. | |
79 __ Pop(rdx); | |
80 __ Pop(rdi); | |
81 } | |
82 | |
83 | |
84 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 63 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
85 __ movp(kScratchRegister, | 64 __ movp(kScratchRegister, |
86 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 65 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
87 __ movp(kScratchRegister, | 66 __ movp(kScratchRegister, |
88 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); | 67 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); |
89 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); | 68 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); |
90 __ jmp(kScratchRegister); | 69 __ jmp(kScratchRegister); |
91 } | 70 } |
92 | 71 |
| 72 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
| 73 Runtime::FunctionId function_id) { |
| 74 // ----------- S t a t e ------------- |
| 75 // -- rax : argument count (preserved for callee) |
| 76 // -- rdx : new target (preserved for callee) |
| 77 // -- rdi : target function (preserved for callee) |
| 78 // ----------------------------------- |
| 79 { |
| 80 FrameScope scope(masm, StackFrame::INTERNAL); |
| 81 // Push the number of arguments to the callee. |
| 82 __ Integer32ToSmi(rax, rax); |
| 83 __ Push(rax); |
| 84 // Push a copy of the target function and the new target. |
| 85 __ Push(rdi); |
| 86 __ Push(rdx); |
| 87 // Function is also the parameter to the runtime call. |
| 88 __ Push(rdi); |
93 | 89 |
94 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | 90 __ CallRuntime(function_id, 1); |
95 __ leap(rax, FieldOperand(rax, Code::kHeaderSize)); | 91 __ movp(rbx, rax); |
96 __ jmp(rax); | 92 |
| 93 // Restore target function and new target. |
| 94 __ Pop(rdx); |
| 95 __ Pop(rdi); |
| 96 __ Pop(rax); |
| 97 __ SmiToInteger32(rax, rax); |
| 98 } |
| 99 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize)); |
| 100 __ jmp(rbx); |
97 } | 101 } |
98 | 102 |
99 | 103 |
100 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 104 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
101 // Checking whether the queued function is ready for install is optional, | 105 // Checking whether the queued function is ready for install is optional, |
102 // since we come across interrupts and stack checks elsewhere. However, | 106 // since we come across interrupts and stack checks elsewhere. However, |
103 // not checking may delay installing ready functions, and always checking | 107 // not checking may delay installing ready functions, and always checking |
104 // would be quite expensive. A good compromise is to first check against | 108 // would be quite expensive. A good compromise is to first check against |
105 // stack limit as a cue for an interrupt signal. | 109 // stack limit as a cue for an interrupt signal. |
106 Label ok; | 110 Label ok; |
107 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
108 __ j(above_equal, &ok); | 112 __ j(above_equal, &ok); |
109 | 113 |
110 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 114 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
111 GenerateTailCallToReturnedCode(masm); | |
112 | 115 |
113 __ bind(&ok); | 116 __ bind(&ok); |
114 GenerateTailCallToSharedCode(masm); | 117 GenerateTailCallToSharedCode(masm); |
115 } | 118 } |
116 | 119 |
117 | 120 |
118 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 121 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
119 bool is_api_function, | 122 bool is_api_function, |
120 bool create_implicit_receiver, | 123 bool create_implicit_receiver, |
121 bool check_derived_construct) { | 124 bool check_derived_construct) { |
(...skipping 771 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
893 // This simulates the initial call to bytecode handlers in interpreter entry | 896 // This simulates the initial call to bytecode handlers in interpreter entry |
894 // trampoline. The return will never actually be taken, but our stack walker | 897 // trampoline. The return will never actually be taken, but our stack walker |
895 // uses this address to determine whether a frame is interpreted. | 898 // uses this address to determine whether a frame is interpreted. |
896 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 899 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
897 | 900 |
898 Generate_EnterBytecodeDispatch(masm); | 901 Generate_EnterBytecodeDispatch(masm); |
899 } | 902 } |
900 | 903 |
901 | 904 |
902 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 905 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
903 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 906 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
904 GenerateTailCallToReturnedCode(masm); | |
905 } | 907 } |
906 | 908 |
907 | 909 |
908 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 910 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
909 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); | 911 GenerateTailCallToReturnedCode(masm, |
910 GenerateTailCallToReturnedCode(masm); | 912 Runtime::kCompileOptimized_NotConcurrent); |
911 } | 913 } |
912 | 914 |
913 | 915 |
914 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 916 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
915 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent); | 917 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
916 GenerateTailCallToReturnedCode(masm); | |
917 } | 918 } |
918 | 919 |
919 | 920 |
920 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 921 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
921 // For now, we are relying on the fact that make_code_young doesn't do any | 922 // For now, we are relying on the fact that make_code_young doesn't do any |
922 // garbage collection which allows us to save/restore the registers without | 923 // garbage collection which allows us to save/restore the registers without |
923 // worrying about which of them contain pointers. We also don't build an | 924 // worrying about which of them contain pointers. We also don't build an |
924 // internal frame to make the code faster, since we shouldn't have to do stack | 925 // internal frame to make the code faster, since we shouldn't have to do stack |
925 // crawls in MakeCodeYoung. This seems a bit fragile. | 926 // crawls in MakeCodeYoung. This seems a bit fragile. |
926 | 927 |
(...skipping 1921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2848 __ ret(0); | 2849 __ ret(0); |
2849 } | 2850 } |
2850 | 2851 |
2851 | 2852 |
2852 #undef __ | 2853 #undef __ |
2853 | 2854 |
2854 } // namespace internal | 2855 } // namespace internal |
2855 } // namespace v8 | 2856 } // namespace v8 |
2856 | 2857 |
2857 #endif // V8_TARGET_ARCH_X64 | 2858 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |