OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
53 __ PushReturnAddressFrom(ecx); | 53 __ PushReturnAddressFrom(ecx); |
54 } | 54 } |
55 | 55 |
56 // JumpToExternalReference expects eax to contain the number of arguments | 56 // JumpToExternalReference expects eax to contain the number of arguments |
57 // including the receiver and the extra arguments. | 57 // including the receiver and the extra arguments. |
58 __ add(eax, Immediate(num_extra_args + 1)); | 58 __ add(eax, Immediate(num_extra_args + 1)); |
59 | 59 |
60 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 60 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); |
61 } | 61 } |
62 | 62 |
63 | 63 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
64 static void CallRuntimePassFunction( | 64 Runtime::FunctionId function_id) { |
65 MacroAssembler* masm, Runtime::FunctionId function_id) { | |
66 // ----------- S t a t e ------------- | 65 // ----------- S t a t e ------------- |
| 66 // -- eax : argument count (preserved for callee) |
67 // -- edx : new target (preserved for callee) | 67 // -- edx : new target (preserved for callee) |
68 // -- edi : target function (preserved for callee) | 68 // -- edi : target function (preserved for callee) |
69 // ----------------------------------- | 69 // ----------------------------------- |
| 70 { |
| 71 FrameScope scope(masm, StackFrame::INTERNAL); |
| 72 // Push the number of arguments to the callee. |
| 73 __ SmiTag(eax); |
| 74 __ push(eax); |
| 75 // Push a copy of the target function and the new target. |
| 76 __ push(edi); |
| 77 __ push(edx); |
| 78 // Function is also the parameter to the runtime call. |
| 79 __ push(edi); |
70 | 80 |
71 FrameScope scope(masm, StackFrame::INTERNAL); | 81 __ CallRuntime(function_id, 1); |
72 // Push a copy of the target function and the new target. | 82 __ mov(ebx, eax); |
73 __ push(edi); | |
74 __ push(edx); | |
75 // Function is also the parameter to the runtime call. | |
76 __ push(edi); | |
77 | 83 |
78 __ CallRuntime(function_id, 1); | 84 // Restore target function and new target. |
79 // Restore target function and new target. | 85 __ pop(edx); |
80 __ pop(edx); | 86 __ pop(edi); |
81 __ pop(edi); | 87 __ pop(eax); |
| 88 __ SmiUntag(eax); |
| 89 } |
| 90 |
| 91 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize)); |
| 92 __ jmp(ebx); |
| 93 } |
| 94 |
| 95 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 96 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
| 97 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset)); |
| 98 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize)); |
| 99 __ jmp(ebx); |
82 } | 100 } |
83 | 101 |
84 | 102 |
85 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
86 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | |
87 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset)); | |
88 __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | |
89 __ jmp(eax); | |
90 } | |
91 | |
92 | |
93 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | |
94 __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | |
95 __ jmp(eax); | |
96 } | |
97 | |
98 | |
99 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 103 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
100 // Checking whether the queued function is ready for install is optional, | 104 // Checking whether the queued function is ready for install is optional, |
101 // since we come across interrupts and stack checks elsewhere. However, | 105 // since we come across interrupts and stack checks elsewhere. However, |
102 // not checking may delay installing ready functions, and always checking | 106 // not checking may delay installing ready functions, and always checking |
103 // would be quite expensive. A good compromise is to first check against | 107 // would be quite expensive. A good compromise is to first check against |
104 // stack limit as a cue for an interrupt signal. | 108 // stack limit as a cue for an interrupt signal. |
105 Label ok; | 109 Label ok; |
106 ExternalReference stack_limit = | 110 ExternalReference stack_limit = |
107 ExternalReference::address_of_stack_limit(masm->isolate()); | 111 ExternalReference::address_of_stack_limit(masm->isolate()); |
108 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 112 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
109 __ j(above_equal, &ok, Label::kNear); | 113 __ j(above_equal, &ok, Label::kNear); |
110 | 114 |
111 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 115 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
112 GenerateTailCallToReturnedCode(masm); | |
113 | 116 |
114 __ bind(&ok); | 117 __ bind(&ok); |
115 GenerateTailCallToSharedCode(masm); | 118 GenerateTailCallToSharedCode(masm); |
116 } | 119 } |
117 | 120 |
118 | |
119 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 121 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
120 bool is_api_function, | 122 bool is_api_function, |
121 bool create_implicit_receiver, | 123 bool create_implicit_receiver, |
122 bool check_derived_construct) { | 124 bool check_derived_construct) { |
123 // ----------- S t a t e ------------- | 125 // ----------- S t a t e ------------- |
124 // -- eax: number of arguments | 126 // -- eax: number of arguments |
125 // -- edi: constructor function | 127 // -- edi: constructor function |
126 // -- ebx: allocation site or undefined | 128 // -- ebx: allocation site or undefined |
127 // -- edx: new target | 129 // -- edx: new target |
128 // ----------------------------------- | 130 // ----------------------------------- |
(...skipping 716 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
845 // This simulates the initial call to bytecode handlers in interpreter entry | 847 // This simulates the initial call to bytecode handlers in interpreter entry |
846 // trampoline. The return will never actually be taken, but our stack walker | 848 // trampoline. The return will never actually be taken, but our stack walker |
847 // uses this address to determine whether a frame is interpreted. | 849 // uses this address to determine whether a frame is interpreted. |
848 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 850 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
849 | 851 |
850 Generate_EnterBytecodeDispatch(masm); | 852 Generate_EnterBytecodeDispatch(masm); |
851 } | 853 } |
852 | 854 |
853 | 855 |
854 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 856 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
855 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 857 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
856 GenerateTailCallToReturnedCode(masm); | |
857 } | 858 } |
858 | 859 |
859 | 860 |
860 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 861 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
861 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); | 862 GenerateTailCallToReturnedCode(masm, |
862 GenerateTailCallToReturnedCode(masm); | 863 Runtime::kCompileOptimized_NotConcurrent); |
863 } | 864 } |
864 | 865 |
865 | 866 |
866 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 867 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
867 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent); | 868 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
868 GenerateTailCallToReturnedCode(masm); | |
869 } | 869 } |
870 | 870 |
871 | 871 |
872 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 872 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
873 // For now, we are relying on the fact that make_code_young doesn't do any | 873 // For now, we are relying on the fact that make_code_young doesn't do any |
874 // garbage collection which allows us to save/restore the registers without | 874 // garbage collection which allows us to save/restore the registers without |
875 // worrying about which of them contain pointers. We also don't build an | 875 // worrying about which of them contain pointers. We also don't build an |
876 // internal frame to make the code faster, since we shouldn't have to do stack | 876 // internal frame to make the code faster, since we shouldn't have to do stack |
877 // crawls in MakeCodeYoung. This seems a bit fragile. | 877 // crawls in MakeCodeYoung. This seems a bit fragile. |
878 | 878 |
(...skipping 1922 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2801 | 2801 |
2802 __ bind(&ok); | 2802 __ bind(&ok); |
2803 __ ret(0); | 2803 __ ret(0); |
2804 } | 2804 } |
2805 | 2805 |
2806 #undef __ | 2806 #undef __ |
2807 } // namespace internal | 2807 } // namespace internal |
2808 } // namespace v8 | 2808 } // namespace v8 |
2809 | 2809 |
2810 #endif // V8_TARGET_ARCH_X87 | 2810 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |