OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
96 | 96 |
97 | 97 |
98 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, | 98 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, |
99 Condition cond) { | 99 Condition cond) { |
100 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 100 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
101 // 'code' is always generated ARM code, never THUMB code | 101 // 'code' is always generated ARM code, never THUMB code |
102 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 102 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
103 } | 103 } |
104 | 104 |
105 | 105 |
106 int MacroAssembler::CallSize(Register target, Condition cond) { | |
107 #if USE_BLX | |
108 return kInstrSize; | |
109 #else | |
110 return 2 * kInstrSize; | |
111 #endif | |
112 } | |
113 | |
114 | |
106 void MacroAssembler::Call(Register target, Condition cond) { | 115 void MacroAssembler::Call(Register target, Condition cond) { |
116 #ifdef DEBUG | |
117 int pre_position = pc_offset(); | |
118 #endif | |
119 | |
107 #if USE_BLX | 120 #if USE_BLX |
108 blx(target, cond); | 121 blx(target, cond); |
109 #else | 122 #else |
110 // set lr for return at current pc + 8 | 123 // set lr for return at current pc + 8 |
111 mov(lr, Operand(pc), LeaveCC, cond); | 124 { BlockConstPoolScope block_const_pool(this); |
112 mov(pc, Operand(target), LeaveCC, cond); | 125 mov(lr, Operand(pc), LeaveCC, cond); |
126 mov(pc, Operand(target), LeaveCC, cond); | |
127 } | |
128 #endif | |
129 | |
130 #ifdef DEBUG | |
131 int post_position = pc_offset(); | |
132 CHECK_EQ(pre_position + CallSize(target, cond), post_position); | |
113 #endif | 133 #endif |
114 } | 134 } |
115 | 135 |
116 | 136 |
117 void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode, | 137 int MacroAssembler::CallSize( |
118 Condition cond) { | 138 intptr_t target, RelocInfo::Mode rmode, Condition cond) { |
139 int size = 2 * kInstrSize; | |
140 Instr mov_instr = cond | MOV | LeaveCC; | |
141 if (!Operand(target, rmode).is_single_instruction(mov_instr)) { | |
142 size += kInstrSize; | |
Lasse Reichstein
2011/03/10 11:58:48
Is this always one instruction to add? Or could it
Søren Thygesen Gjesse
2011/03/10 12:47:02
The initial size is two and the +1 here accounts f
| |
143 } | |
144 return size; | |
145 } | |
146 | |
147 | |
148 void MacroAssembler::Call( | |
149 intptr_t target, RelocInfo::Mode rmode, Condition cond) { | |
150 #ifdef DEBUG | |
151 int pre_position = pc_offset(); | |
152 #endif | |
153 | |
119 #if USE_BLX | 154 #if USE_BLX |
120 // On ARMv5 and after the recommended call sequence is: | 155 // On ARMv5 and after the recommended call sequence is: |
121 // ldr ip, [pc, #...] | 156 // ldr ip, [pc, #...] |
122 // blx ip | 157 // blx ip |
123 | 158 |
124 // The two instructions (ldr and blx) could be separated by a constant | 159 // The two instructions (ldr and blx) could be separated by a constant |
125 // pool and the code would still work. The issue comes from the | 160 // pool and the code would still work. The issue comes from the |
126 // patching code which expect the ldr to be just above the blx. | 161 // patching code which expect the ldr to be just above the blx. |
127 { BlockConstPoolScope block_const_pool(this); | 162 { BlockConstPoolScope block_const_pool(this); |
128 // Statement positions are expected to be recorded when the target | 163 // Statement positions are expected to be recorded when the target |
129 // address is loaded. The mov method will automatically record | 164 // address is loaded. The mov method will automatically record |
130 // positions when pc is the target, since this is not the case here | 165 // positions when pc is the target, since this is not the case here |
131 // we have to do it explicitly. | 166 // we have to do it explicitly. |
132 positions_recorder()->WriteRecordedPositions(); | 167 positions_recorder()->WriteRecordedPositions(); |
133 | 168 |
134 mov(ip, Operand(target, rmode), LeaveCC, cond); | 169 mov(ip, Operand(target, rmode), LeaveCC, cond); |
135 blx(ip, cond); | 170 blx(ip, cond); |
136 } | 171 } |
137 | 172 |
138 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); | 173 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); |
139 #else | 174 #else |
140 // Set lr for return at current pc + 8. | 175 { BlockConstPoolScope block_const_pool(this); |
141 mov(lr, Operand(pc), LeaveCC, cond); | 176 // Set lr for return at current pc + 8. |
142 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | 177 mov(lr, Operand(pc), LeaveCC, cond); |
143 mov(pc, Operand(target, rmode), LeaveCC, cond); | 178 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
179 mov(pc, Operand(target, rmode), LeaveCC, cond); | |
180 } | |
181 ASSERT(kCallTargetAddressOffset == kInstrSize); | |
182 #endif | |
144 | 183 |
145 ASSERT(kCallTargetAddressOffset == kInstrSize); | 184 #ifdef DEBUG |
185 int post_position = pc_offset(); | |
186 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); | |
146 #endif | 187 #endif |
147 } | 188 } |
148 | 189 |
149 | 190 |
150 void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode, | 191 int MacroAssembler::CallSize( |
151 Condition cond) { | 192 byte* target, RelocInfo::Mode rmode, Condition cond) { |
152 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | 193 return CallSize(reinterpret_cast<intptr_t>(target), rmode); |
153 Call(reinterpret_cast<intptr_t>(target), rmode, cond); | |
154 } | 194 } |
155 | 195 |
156 | 196 |
157 void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode, | 197 void MacroAssembler::Call( |
158 Condition cond) { | 198 byte* target, RelocInfo::Mode rmode, Condition cond) { |
199 #ifdef DEBUG | |
200 int pre_position = pc_offset(); | |
201 #endif | |
202 | |
203 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | |
204 Call(reinterpret_cast<intptr_t>(target), rmode, cond); | |
205 | |
206 #ifdef DEBUG | |
207 int post_position = pc_offset(); | |
208 CHECK_EQ(pre_position + CallSize(target, rmode, cond), post_position); | |
209 #endif | |
210 } | |
211 | |
212 | |
213 int MacroAssembler::CallSize( | |
214 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | |
215 return CallSize(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
216 } | |
217 | |
218 | |
219 void MacroAssembler::Call( | |
220 Handle<Code> code, RelocInfo::Mode rmode, Condition cond) { | |
221 #ifdef DEBUG | |
222 int pre_position = pc_offset(); | |
223 #endif | |
224 | |
159 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 225 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
160 // 'code' is always generated ARM code, never THUMB code | 226 // 'code' is always generated ARM code, never THUMB code |
161 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 227 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); |
228 | |
229 #ifdef DEBUG | |
230 int post_position = pc_offset(); | |
231 CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); | |
232 #endif | |
162 } | 233 } |
163 | 234 |
164 | 235 |
165 void MacroAssembler::Ret(Condition cond) { | 236 void MacroAssembler::Ret(Condition cond) { |
166 #if USE_BX | 237 #if USE_BX |
167 bx(lr, cond); | 238 bx(lr, cond); |
168 #else | 239 #else |
169 mov(pc, Operand(lr), LeaveCC, cond); | 240 mov(pc, Operand(lr), LeaveCC, cond); |
170 #endif | 241 #endif |
171 } | 242 } |
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
777 #endif | 848 #endif |
778 } | 849 } |
779 | 850 |
780 | 851 |
781 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 852 void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
782 const ParameterCount& actual, | 853 const ParameterCount& actual, |
783 Handle<Code> code_constant, | 854 Handle<Code> code_constant, |
784 Register code_reg, | 855 Register code_reg, |
785 Label* done, | 856 Label* done, |
786 InvokeFlag flag, | 857 InvokeFlag flag, |
787 PostCallGenerator* post_call_generator) { | 858 CallWrapper* call_wrapper) { |
788 bool definitely_matches = false; | 859 bool definitely_matches = false; |
789 Label regular_invoke; | 860 Label regular_invoke; |
790 | 861 |
791 // Check whether the expected and actual arguments count match. If not, | 862 // Check whether the expected and actual arguments count match. If not, |
792 // setup registers according to contract with ArgumentsAdaptorTrampoline: | 863 // setup registers according to contract with ArgumentsAdaptorTrampoline: |
793 // r0: actual arguments count | 864 // r0: actual arguments count |
794 // r1: function (passed through to callee) | 865 // r1: function (passed through to callee) |
795 // r2: expected arguments count | 866 // r2: expected arguments count |
796 // r3: callee code entry | 867 // r3: callee code entry |
797 | 868 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
832 | 903 |
833 if (!definitely_matches) { | 904 if (!definitely_matches) { |
834 if (!code_constant.is_null()) { | 905 if (!code_constant.is_null()) { |
835 mov(r3, Operand(code_constant)); | 906 mov(r3, Operand(code_constant)); |
836 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 907 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
837 } | 908 } |
838 | 909 |
839 Handle<Code> adaptor = | 910 Handle<Code> adaptor = |
840 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | 911 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); |
841 if (flag == CALL_FUNCTION) { | 912 if (flag == CALL_FUNCTION) { |
913 if (call_wrapper != NULL) { | |
914 call_wrapper->BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); | |
915 } | |
842 Call(adaptor, RelocInfo::CODE_TARGET); | 916 Call(adaptor, RelocInfo::CODE_TARGET); |
843 if (post_call_generator != NULL) post_call_generator->Generate(); | 917 if (call_wrapper != NULL) call_wrapper->AfterCall(); |
844 b(done); | 918 b(done); |
845 } else { | 919 } else { |
846 Jump(adaptor, RelocInfo::CODE_TARGET); | 920 Jump(adaptor, RelocInfo::CODE_TARGET); |
847 } | 921 } |
848 bind(®ular_invoke); | 922 bind(®ular_invoke); |
849 } | 923 } |
850 } | 924 } |
851 | 925 |
852 | 926 |
853 void MacroAssembler::InvokeCode(Register code, | 927 void MacroAssembler::InvokeCode(Register code, |
854 const ParameterCount& expected, | 928 const ParameterCount& expected, |
855 const ParameterCount& actual, | 929 const ParameterCount& actual, |
856 InvokeFlag flag, | 930 InvokeFlag flag, |
857 PostCallGenerator* post_call_generator) { | 931 CallWrapper* call_wrapper) { |
858 Label done; | 932 Label done; |
859 | 933 |
860 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag, | 934 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag, |
861 post_call_generator); | 935 call_wrapper); |
862 if (flag == CALL_FUNCTION) { | 936 if (flag == CALL_FUNCTION) { |
937 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code)); | |
863 Call(code); | 938 Call(code); |
864 if (post_call_generator != NULL) post_call_generator->Generate(); | 939 if (call_wrapper != NULL) call_wrapper->AfterCall(); |
865 } else { | 940 } else { |
866 ASSERT(flag == JUMP_FUNCTION); | 941 ASSERT(flag == JUMP_FUNCTION); |
867 Jump(code); | 942 Jump(code); |
868 } | 943 } |
869 | 944 |
870 // Continue here if InvokePrologue does handle the invocation due to | 945 // Continue here if InvokePrologue does handle the invocation due to |
871 // mismatched parameter counts. | 946 // mismatched parameter counts. |
872 bind(&done); | 947 bind(&done); |
873 } | 948 } |
874 | 949 |
(...skipping 14 matching lines...) Expand all Loading... | |
889 | 964 |
890 // Continue here if InvokePrologue does handle the invocation due to | 965 // Continue here if InvokePrologue does handle the invocation due to |
891 // mismatched parameter counts. | 966 // mismatched parameter counts. |
892 bind(&done); | 967 bind(&done); |
893 } | 968 } |
894 | 969 |
895 | 970 |
896 void MacroAssembler::InvokeFunction(Register fun, | 971 void MacroAssembler::InvokeFunction(Register fun, |
897 const ParameterCount& actual, | 972 const ParameterCount& actual, |
898 InvokeFlag flag, | 973 InvokeFlag flag, |
899 PostCallGenerator* post_call_generator) { | 974 CallWrapper* call_wrapper) { |
900 // Contract with called JS functions requires that function is passed in r1. | 975 // Contract with called JS functions requires that function is passed in r1. |
901 ASSERT(fun.is(r1)); | 976 ASSERT(fun.is(r1)); |
902 | 977 |
903 Register expected_reg = r2; | 978 Register expected_reg = r2; |
904 Register code_reg = r3; | 979 Register code_reg = r3; |
905 | 980 |
906 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 981 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
907 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 982 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
908 ldr(expected_reg, | 983 ldr(expected_reg, |
909 FieldMemOperand(code_reg, | 984 FieldMemOperand(code_reg, |
910 SharedFunctionInfo::kFormalParameterCountOffset)); | 985 SharedFunctionInfo::kFormalParameterCountOffset)); |
911 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize)); | 986 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize)); |
912 ldr(code_reg, | 987 ldr(code_reg, |
913 FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 988 FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
914 | 989 |
915 ParameterCount expected(expected_reg); | 990 ParameterCount expected(expected_reg); |
916 InvokeCode(code_reg, expected, actual, flag, post_call_generator); | 991 InvokeCode(code_reg, expected, actual, flag, call_wrapper); |
917 } | 992 } |
918 | 993 |
919 | 994 |
920 void MacroAssembler::InvokeFunction(JSFunction* function, | 995 void MacroAssembler::InvokeFunction(JSFunction* function, |
921 const ParameterCount& actual, | 996 const ParameterCount& actual, |
922 InvokeFlag flag) { | 997 InvokeFlag flag) { |
923 ASSERT(function->is_compiled()); | 998 ASSERT(function->is_compiled()); |
924 | 999 |
925 // Get the function and setup the context. | 1000 // Get the function and setup the context. |
926 mov(r1, Operand(Handle<JSFunction>(function))); | 1001 mov(r1, Operand(Handle<JSFunction>(function))); |
(...skipping 1149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2076 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | 2151 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); |
2077 #endif | 2152 #endif |
2078 mov(r1, Operand(builtin)); | 2153 mov(r1, Operand(builtin)); |
2079 CEntryStub stub(1); | 2154 CEntryStub stub(1); |
2080 return TryTailCallStub(&stub); | 2155 return TryTailCallStub(&stub); |
2081 } | 2156 } |
2082 | 2157 |
2083 | 2158 |
2084 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 2159 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
2085 InvokeJSFlags flags, | 2160 InvokeJSFlags flags, |
2086 PostCallGenerator* post_call_generator) { | 2161 CallWrapper* call_wrapper) { |
2087 GetBuiltinEntry(r2, id); | 2162 GetBuiltinEntry(r2, id); |
2088 if (flags == CALL_JS) { | 2163 if (flags == CALL_JS) { |
2164 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(r2)); | |
2089 Call(r2); | 2165 Call(r2); |
2090 if (post_call_generator != NULL) post_call_generator->Generate(); | 2166 if (call_wrapper != NULL) call_wrapper->AfterCall(); |
2091 } else { | 2167 } else { |
2092 ASSERT(flags == JUMP_JS); | 2168 ASSERT(flags == JUMP_JS); |
2093 Jump(r2); | 2169 Jump(r2); |
2094 } | 2170 } |
2095 } | 2171 } |
2096 | 2172 |
2097 | 2173 |
2098 void MacroAssembler::GetBuiltinFunction(Register target, | 2174 void MacroAssembler::GetBuiltinFunction(Register target, |
2099 Builtins::JavaScript id) { | 2175 Builtins::JavaScript id) { |
2100 // Load the builtins object into target register. | 2176 // Load the builtins object into target register. |
(...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2700 void CodePatcher::EmitCondition(Condition cond) { | 2776 void CodePatcher::EmitCondition(Condition cond) { |
2701 Instr instr = Assembler::instr_at(masm_.pc_); | 2777 Instr instr = Assembler::instr_at(masm_.pc_); |
2702 instr = (instr & ~kCondMask) | cond; | 2778 instr = (instr & ~kCondMask) | cond; |
2703 masm_.emit(instr); | 2779 masm_.emit(instr); |
2704 } | 2780 } |
2705 | 2781 |
2706 | 2782 |
2707 } } // namespace v8::internal | 2783 } } // namespace v8::internal |
2708 | 2784 |
2709 #endif // V8_TARGET_ARCH_ARM | 2785 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |