OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // |
| 3 // Copyright IBM Corp. 2012, 2013. All rights reserved. |
| 4 // |
2 // Use of this source code is governed by a BSD-style license that can be | 5 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 6 // found in the LICENSE file. |
4 | 7 |
5 #include "src/v8.h" | 8 #include "src/v8.h" |
6 | 9 |
7 #if V8_TARGET_ARCH_ARM | 10 #if V8_TARGET_ARCH_PPC |
8 | 11 |
9 #include "src/codegen.h" | 12 #include "src/codegen.h" |
10 #include "src/debug.h" | 13 #include "src/debug.h" |
11 #include "src/deoptimizer.h" | 14 #include "src/deoptimizer.h" |
12 #include "src/full-codegen.h" | 15 #include "src/full-codegen.h" |
13 #include "src/runtime.h" | 16 #include "src/runtime.h" |
14 #include "src/stub-cache.h" | 17 #include "src/stub-cache.h" |
15 | 18 |
16 namespace v8 { | 19 namespace v8 { |
17 namespace internal { | 20 namespace internal { |
18 | 21 |
19 | 22 |
20 #define __ ACCESS_MASM(masm) | 23 #define __ ACCESS_MASM(masm) |
21 | 24 |
22 | |
23 void Builtins::Generate_Adaptor(MacroAssembler* masm, | 25 void Builtins::Generate_Adaptor(MacroAssembler* masm, |
24 CFunctionId id, | 26 CFunctionId id, |
25 BuiltinExtraArguments extra_args) { | 27 BuiltinExtraArguments extra_args) { |
26 // ----------- S t a t e ------------- | 28 // ----------- S t a t e ------------- |
27 // -- r0 : number of arguments excluding receiver | 29 // -- r3 : number of arguments excluding receiver |
28 // -- r1 : called function (only guaranteed when | 30 // -- r4 : called function (only guaranteed when |
29 // extra_args requires it) | 31 // extra_args requires it) |
30 // -- cp : context | 32 // -- cp : context |
31 // -- sp[0] : last argument | 33 // -- sp[0] : last argument |
32 // -- ... | 34 // -- ... |
33 // -- sp[4 * (argc - 1)] : first argument (argc == r0) | 35 // -- sp[4 * (argc - 1)] : first argument (argc == r0) |
34 // -- sp[4 * argc] : receiver | 36 // -- sp[4 * argc] : receiver |
35 // ----------------------------------- | 37 // ----------------------------------- |
36 | 38 |
37 // Insert extra arguments. | 39 // Insert extra arguments. |
38 int num_extra_args = 0; | 40 int num_extra_args = 0; |
39 if (extra_args == NEEDS_CALLED_FUNCTION) { | 41 if (extra_args == NEEDS_CALLED_FUNCTION) { |
40 num_extra_args = 1; | 42 num_extra_args = 1; |
41 __ push(r1); | 43 __ push(r4); |
42 } else { | 44 } else { |
43 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | 45 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); |
44 } | 46 } |
45 | 47 |
46 // JumpToExternalReference expects r0 to contain the number of arguments | 48 // JumpToExternalReference expects r0 to contain the number of arguments |
47 // including the receiver and the extra arguments. | 49 // including the receiver and the extra arguments. |
48 __ add(r0, r0, Operand(num_extra_args + 1)); | 50 __ addi(r3, r3, Operand(num_extra_args + 1)); |
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 51 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); |
50 } | 52 } |
51 | 53 |
52 | 54 |
53 // Load the built-in InternalArray function from the current context. | 55 // Load the built-in InternalArray function from the current context. |
54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | 56 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
55 Register result) { | 57 Register result) { |
56 // Load the native context. | 58 // Load the native context. |
57 | 59 |
58 __ ldr(result, | 60 __ LoadP(result, |
59 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 61 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
60 __ ldr(result, | 62 __ LoadP(result, |
61 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | 63 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); |
62 // Load the InternalArray function from the native context. | 64 // Load the InternalArray function from the native context. |
63 __ ldr(result, | 65 __ LoadP(result, |
64 MemOperand(result, | 66 MemOperand(result, |
65 Context::SlotOffset( | 67 Context::SlotOffset( |
66 Context::INTERNAL_ARRAY_FUNCTION_INDEX))); | 68 Context::INTERNAL_ARRAY_FUNCTION_INDEX))); |
67 } | 69 } |
68 | 70 |
69 | 71 |
70 // Load the built-in Array function from the current context. | 72 // Load the built-in Array function from the current context. |
71 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | 73 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
72 // Load the native context. | 74 // Load the native context. |
73 | 75 |
74 __ ldr(result, | 76 __ LoadP(result, |
75 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 77 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
76 __ ldr(result, | 78 __ LoadP(result, |
77 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | 79 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); |
78 // Load the Array function from the native context. | 80 // Load the Array function from the native context. |
79 __ ldr(result, | 81 __ LoadP(result, |
80 MemOperand(result, | 82 MemOperand(result, |
81 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | 83 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); |
82 } | 84 } |
83 | 85 |
84 | 86 |
85 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 87 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
86 // ----------- S t a t e ------------- | 88 // ----------- S t a t e ------------- |
87 // -- r0 : number of arguments | 89 // -- r3 : number of arguments |
88 // -- lr : return address | 90 // -- lr : return address |
89 // -- sp[...]: constructor arguments | 91 // -- sp[...]: constructor arguments |
90 // ----------------------------------- | 92 // ----------------------------------- |
91 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 93 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
92 | 94 |
93 // Get the InternalArray function. | 95 // Get the InternalArray function. |
94 GenerateLoadInternalArrayFunction(masm, r1); | 96 GenerateLoadInternalArrayFunction(masm, r4); |
95 | 97 |
96 if (FLAG_debug_code) { | 98 if (FLAG_debug_code) { |
97 // Initial map for the builtin InternalArray functions should be maps. | 99 // Initial map for the builtin InternalArray functions should be maps. |
98 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 100 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
99 __ SmiTst(r2); | 101 __ TestIfSmi(r5, r0); |
100 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); | 102 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); |
101 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 103 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
102 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | 104 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); |
103 } | 105 } |
104 | 106 |
105 // Run the native code for the InternalArray function called as a normal | 107 // Run the native code for the InternalArray function called as a normal |
106 // function. | 108 // function. |
107 // tail call a stub | 109 // tail call a stub |
108 InternalArrayConstructorStub stub(masm->isolate()); | 110 InternalArrayConstructorStub stub(masm->isolate()); |
109 __ TailCallStub(&stub); | 111 __ TailCallStub(&stub); |
110 } | 112 } |
111 | 113 |
112 | 114 |
113 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 115 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
114 // ----------- S t a t e ------------- | 116 // ----------- S t a t e ------------- |
115 // -- r0 : number of arguments | 117 // -- r3 : number of arguments |
116 // -- lr : return address | 118 // -- lr : return address |
117 // -- sp[...]: constructor arguments | 119 // -- sp[...]: constructor arguments |
118 // ----------------------------------- | 120 // ----------------------------------- |
119 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 121 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
120 | 122 |
121 // Get the Array function. | 123 // Get the Array function. |
122 GenerateLoadArrayFunction(masm, r1); | 124 GenerateLoadArrayFunction(masm, r4); |
123 | 125 |
124 if (FLAG_debug_code) { | 126 if (FLAG_debug_code) { |
125 // Initial map for the builtin Array functions should be maps. | 127 // Initial map for the builtin Array functions should be maps. |
126 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 128 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
127 __ SmiTst(r2); | 129 __ TestIfSmi(r5, r0); |
128 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 130 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); |
129 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 131 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
130 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 132 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
131 } | 133 } |
132 | 134 |
133 // Run the native code for the Array function called as a normal function. | 135 // Run the native code for the Array function called as a normal function. |
134 // tail call a stub | 136 // tail call a stub |
135 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 137 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
136 ArrayConstructorStub stub(masm->isolate()); | 138 ArrayConstructorStub stub(masm->isolate()); |
137 __ TailCallStub(&stub); | 139 __ TailCallStub(&stub); |
138 } | 140 } |
139 | 141 |
140 | 142 |
141 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | 143 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
142 // ----------- S t a t e ------------- | 144 // ----------- S t a t e ------------- |
143 // -- r0 : number of arguments | 145 // -- r3 : number of arguments |
144 // -- r1 : constructor function | 146 // -- r4 : constructor function |
145 // -- lr : return address | 147 // -- lr : return address |
146 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 148 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
147 // -- sp[argc * 4] : receiver | 149 // -- sp[argc * 4] : receiver |
148 // ----------------------------------- | 150 // ----------------------------------- |
149 Counters* counters = masm->isolate()->counters(); | 151 Counters* counters = masm->isolate()->counters(); |
150 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3); | 152 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6); |
151 | 153 |
152 Register function = r1; | 154 Register function = r4; |
153 if (FLAG_debug_code) { | 155 if (FLAG_debug_code) { |
154 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2); | 156 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5); |
155 __ cmp(function, Operand(r2)); | 157 __ cmp(function, r5); |
156 __ Assert(eq, kUnexpectedStringFunction); | 158 __ Assert(eq, kUnexpectedStringFunction); |
157 } | 159 } |
158 | 160 |
159 // Load the first arguments in r0 and get rid of the rest. | 161 // Load the first arguments in r3 and get rid of the rest. |
160 Label no_arguments; | 162 Label no_arguments; |
161 __ cmp(r0, Operand::Zero()); | 163 __ cmpi(r3, Operand::Zero()); |
162 __ b(eq, &no_arguments); | 164 __ beq(&no_arguments); |
163 // First args = sp[(argc - 1) * 4]. | 165 // First args = sp[(argc - 1) * 4]. |
164 __ sub(r0, r0, Operand(1)); | 166 __ subi(r3, r3, Operand(1)); |
165 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex)); | 167 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); |
| 168 __ add(sp, sp, r3); |
| 169 __ LoadP(r3, MemOperand(sp)); |
166 // sp now point to args[0], drop args[0] + receiver. | 170 // sp now point to args[0], drop args[0] + receiver. |
167 __ Drop(2); | 171 __ Drop(2); |
168 | 172 |
169 Register argument = r2; | 173 Register argument = r5; |
170 Label not_cached, argument_is_string; | 174 Label not_cached, argument_is_string; |
171 __ LookupNumberStringCache(r0, // Input. | 175 __ LookupNumberStringCache(r3, // Input. |
172 argument, // Result. | 176 argument, // Result. |
173 r3, // Scratch. | 177 r6, // Scratch. |
174 r4, // Scratch. | 178 r7, // Scratch. |
175 r5, // Scratch. | 179 r8, // Scratch. |
176 ¬_cached); | 180 ¬_cached); |
177 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4); | 181 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7); |
178 __ bind(&argument_is_string); | 182 __ bind(&argument_is_string); |
179 | 183 |
180 // ----------- S t a t e ------------- | 184 // ----------- S t a t e ------------- |
181 // -- r2 : argument converted to string | 185 // -- r5 : argument converted to string |
182 // -- r1 : constructor function | 186 // -- r4 : constructor function |
183 // -- lr : return address | 187 // -- lr : return address |
184 // ----------------------------------- | 188 // ----------------------------------- |
185 | 189 |
186 Label gc_required; | 190 Label gc_required; |
187 __ Allocate(JSValue::kSize, | 191 __ Allocate(JSValue::kSize, |
188 r0, // Result. | 192 r3, // Result. |
189 r3, // Scratch. | 193 r6, // Scratch. |
190 r4, // Scratch. | 194 r7, // Scratch. |
191 &gc_required, | 195 &gc_required, |
192 TAG_OBJECT); | 196 TAG_OBJECT); |
193 | 197 |
194 // Initialising the String Object. | 198 // Initialising the String Object. |
195 Register map = r3; | 199 Register map = r6; |
196 __ LoadGlobalFunctionInitialMap(function, map, r4); | 200 __ LoadGlobalFunctionInitialMap(function, map, r7); |
197 if (FLAG_debug_code) { | 201 if (FLAG_debug_code) { |
198 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset)); | 202 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset)); |
199 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2)); | 203 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2)); |
200 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); | 204 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); |
201 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); | 205 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); |
202 __ cmp(r4, Operand::Zero()); | 206 __ cmpi(r7, Operand::Zero()); |
203 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); | 207 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); |
204 } | 208 } |
205 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset)); | 209 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0); |
206 | 210 |
207 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); | 211 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
208 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 212 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); |
209 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); | 213 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); |
210 | 214 |
211 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset)); | 215 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0); |
212 | 216 |
213 // Ensure the object is fully initialized. | 217 // Ensure the object is fully initialized. |
214 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); | 218 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); |
215 | 219 |
216 __ Ret(); | 220 __ Ret(); |
217 | 221 |
218 // The argument was not found in the number to string cache. Check | 222 // The argument was not found in the number to string cache. Check |
219 // if it's a string already before calling the conversion builtin. | 223 // if it's a string already before calling the conversion builtin. |
220 Label convert_argument; | 224 Label convert_argument; |
221 __ bind(¬_cached); | 225 __ bind(¬_cached); |
222 __ JumpIfSmi(r0, &convert_argument); | 226 __ JumpIfSmi(r3, &convert_argument); |
223 | 227 |
224 // Is it a String? | 228 // Is it a String? |
225 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 229 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); |
226 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | 230 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset)); |
227 STATIC_ASSERT(kNotStringTag != 0); | 231 STATIC_ASSERT(kNotStringTag != 0); |
228 __ tst(r3, Operand(kIsNotStringMask)); | 232 __ andi(r0, r6, Operand(kIsNotStringMask)); |
229 __ b(ne, &convert_argument); | 233 __ bne(&convert_argument, cr0); |
230 __ mov(argument, r0); | 234 __ mr(argument, r3); |
231 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 235 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
232 __ b(&argument_is_string); | 236 __ b(&argument_is_string); |
233 | 237 |
234 // Invoke the conversion builtin and put the result into r2. | 238 // Invoke the conversion builtin and put the result into r5. |
235 __ bind(&convert_argument); | 239 __ bind(&convert_argument); |
236 __ push(function); // Preserve the function. | 240 __ push(function); // Preserve the function. |
237 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 241 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
238 { | 242 { |
239 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 243 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
240 __ push(r0); | 244 __ push(r3); |
241 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 245 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
242 } | 246 } |
243 __ pop(function); | 247 __ pop(function); |
244 __ mov(argument, r0); | 248 __ mr(argument, r3); |
245 __ b(&argument_is_string); | 249 __ b(&argument_is_string); |
246 | 250 |
247 // Load the empty string into r2, remove the receiver from the | 251 // Load the empty string into r5, remove the receiver from the |
248 // stack, and jump back to the case where the argument is a string. | 252 // stack, and jump back to the case where the argument is a string. |
249 __ bind(&no_arguments); | 253 __ bind(&no_arguments); |
250 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | 254 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
251 __ Drop(1); | 255 __ Drop(1); |
252 __ b(&argument_is_string); | 256 __ b(&argument_is_string); |
253 | 257 |
254 // At this point the argument is already a string. Call runtime to | 258 // At this point the argument is already a string. Call runtime to |
255 // create a string wrapper. | 259 // create a string wrapper. |
256 __ bind(&gc_required); | 260 __ bind(&gc_required); |
257 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); | 261 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7); |
258 { | 262 { |
259 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 263 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
260 __ push(argument); | 264 __ push(argument); |
261 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 265 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
262 } | 266 } |
263 __ Ret(); | 267 __ Ret(); |
264 } | 268 } |
265 | 269 |
266 | 270 |
267 static void CallRuntimePassFunction( | 271 static void CallRuntimePassFunction( |
268 MacroAssembler* masm, Runtime::FunctionId function_id) { | 272 MacroAssembler* masm, Runtime::FunctionId function_id) { |
269 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 273 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
270 // Push a copy of the function onto the stack. | 274 // Push a copy of the function onto the stack. |
271 __ push(r1); | |
272 // Push function as parameter to the runtime call. | 275 // Push function as parameter to the runtime call. |
273 __ Push(r1); | 276 __ Push(r4, r4); |
274 | 277 |
275 __ CallRuntime(function_id, 1); | 278 __ CallRuntime(function_id, 1); |
276 // Restore receiver. | 279 // Restore reciever. |
277 __ pop(r1); | 280 __ Pop(r4); |
278 } | 281 } |
279 | 282 |
280 | 283 |
281 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 284 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
282 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 285 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
283 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); | 286 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset)); |
284 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 287 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); |
285 __ Jump(r2); | 288 __ Jump(r5); |
286 } | 289 } |
287 | 290 |
288 | 291 |
289 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | 292 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { |
290 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 293 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
291 __ Jump(r0); | 294 __ Jump(r3); |
292 } | 295 } |
293 | 296 |
294 | 297 |
295 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 298 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
296 // Checking whether the queued function is ready for install is optional, | 299 // Checking whether the queued function is ready for install is optional, |
297 // since we come across interrupts and stack checks elsewhere. However, | 300 // since we come across interrupts and stack checks elsewhere. However, |
298 // not checking may delay installing ready functions, and always checking | 301 // not checking may delay installing ready functions, and always checking |
299 // would be quite expensive. A good compromise is to first check against | 302 // would be quite expensive. A good compromise is to first check against |
300 // stack limit as a cue for an interrupt signal. | 303 // stack limit as a cue for an interrupt signal. |
301 Label ok; | 304 Label ok; |
302 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 305 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
303 __ cmp(sp, Operand(ip)); | 306 __ cmpl(sp, ip); |
304 __ b(hs, &ok); | 307 __ bge(&ok); |
305 | 308 |
306 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 309 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
307 GenerateTailCallToReturnedCode(masm); | 310 GenerateTailCallToReturnedCode(masm); |
308 | 311 |
309 __ bind(&ok); | 312 __ bind(&ok); |
310 GenerateTailCallToSharedCode(masm); | 313 GenerateTailCallToSharedCode(masm); |
311 } | 314 } |
312 | 315 |
313 | 316 |
314 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 317 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
315 bool is_api_function, | 318 bool is_api_function, |
316 bool create_memento) { | 319 bool create_memento) { |
317 // ----------- S t a t e ------------- | 320 // ----------- S t a t e ------------- |
318 // -- r0 : number of arguments | 321 // -- r3 : number of arguments |
319 // -- r1 : constructor function | 322 // -- r4 : constructor function |
320 // -- r2 : allocation site or undefined | 323 // -- r5 : allocation site or undefined |
321 // -- lr : return address | 324 // -- lr : return address |
322 // -- sp[...]: constructor arguments | 325 // -- sp[...]: constructor arguments |
323 // ----------------------------------- | 326 // ----------------------------------- |
324 | 327 |
325 // Should never create mementos for api functions. | 328 // Should never create mementos for api functions. |
326 ASSERT(!is_api_function || !create_memento); | 329 ASSERT(!is_api_function || !create_memento); |
327 | 330 |
328 Isolate* isolate = masm->isolate(); | 331 Isolate* isolate = masm->isolate(); |
329 | 332 |
330 // Enter a construct frame. | 333 // Enter a construct frame. |
331 { | 334 { |
332 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); | 335 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
333 | 336 |
334 if (create_memento) { | 337 if (create_memento) { |
335 __ AssertUndefinedOrAllocationSite(r2, r3); | 338 __ AssertUndefinedOrAllocationSite(r5, r6); |
336 __ push(r2); | 339 __ push(r5); |
337 } | 340 } |
338 | 341 |
339 // Preserve the two incoming parameters on the stack. | 342 // Preserve the two incoming parameters on the stack. |
340 __ SmiTag(r0); | 343 __ SmiTag(r3); |
341 __ push(r0); // Smi-tagged arguments count. | 344 __ push(r3); // Smi-tagged arguments count. |
342 __ push(r1); // Constructor function. | 345 __ push(r4); // Constructor function. |
343 | 346 |
344 // Try to allocate the object without transitioning into C code. If any of | 347 // Try to allocate the object without transitioning into C code. If any of |
345 // the preconditions is not met, the code bails out to the runtime call. | 348 // the preconditions is not met, the code bails out to the runtime call. |
346 Label rt_call, allocated; | 349 Label rt_call, allocated; |
347 if (FLAG_inline_new) { | 350 if (FLAG_inline_new) { |
348 Label undo_allocation; | 351 Label undo_allocation; |
349 ExternalReference debug_step_in_fp = | 352 ExternalReference debug_step_in_fp = |
350 ExternalReference::debug_step_in_fp_address(isolate); | 353 ExternalReference::debug_step_in_fp_address(isolate); |
351 __ mov(r2, Operand(debug_step_in_fp)); | 354 __ mov(r5, Operand(debug_step_in_fp)); |
352 __ ldr(r2, MemOperand(r2)); | 355 __ LoadP(r5, MemOperand(r5)); |
353 __ tst(r2, r2); | 356 __ cmpi(r5, Operand::Zero()); |
354 __ b(ne, &rt_call); | 357 __ bne(&rt_call); |
355 | 358 |
356 // Load the initial map and verify that it is in fact a map. | 359 // Load the initial map and verify that it is in fact a map. |
357 // r1: constructor function | 360 // r4: constructor function |
358 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 361 __ LoadP(r5, FieldMemOperand(r4, |
359 __ JumpIfSmi(r2, &rt_call); | 362 JSFunction::kPrototypeOrInitialMapOffset)); |
360 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 363 __ JumpIfSmi(r5, &rt_call); |
361 __ b(ne, &rt_call); | 364 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
| 365 __ bne(&rt_call); |
362 | 366 |
363 // Check that the constructor is not constructing a JSFunction (see | 367 // Check that the constructor is not constructing a JSFunction (see |
364 // comments in Runtime_NewObject in runtime.cc). In which case the | 368 // comments in Runtime_NewObject in runtime.cc). In which case the |
365 // initial map's instance type would be JS_FUNCTION_TYPE. | 369 // initial map's instance type would be JS_FUNCTION_TYPE. |
366 // r1: constructor function | 370 // r4: constructor function |
367 // r2: initial map | 371 // r5: initial map |
368 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); | 372 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE); |
369 __ b(eq, &rt_call); | 373 __ beq(&rt_call); |
370 | 374 |
371 if (!is_api_function) { | 375 if (!is_api_function) { |
372 Label allocate; | 376 Label allocate; |
373 MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset); | 377 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset); |
374 // Check if slack tracking is enabled. | 378 // Check if slack tracking is enabled. |
375 __ ldr(r4, bit_field3); | 379 __ lwz(r7, bit_field3); |
376 __ DecodeField<Map::ConstructionCount>(r3, r4); | 380 __ DecodeField<Map::ConstructionCount>(r11, r7); |
377 __ cmp(r3, Operand(JSFunction::kNoSlackTracking)); | 381 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); |
378 __ b(eq, &allocate); | 382 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking |
| 383 __ beq(&allocate); |
379 // Decrease generous allocation count. | 384 // Decrease generous allocation count. |
380 __ sub(r4, r4, Operand(1 << Map::ConstructionCount::kShift)); | 385 __ Add(r7, r7, -(1 << Map::ConstructionCount::kShift), r0); |
381 __ str(r4, bit_field3); | 386 __ stw(r7, bit_field3); |
382 __ cmp(r3, Operand(JSFunction::kFinishSlackTracking)); | 387 __ cmpi(r11, Operand(JSFunction::kFinishSlackTracking)); |
383 __ b(ne, &allocate); | 388 __ bne(&allocate); |
384 | 389 |
385 __ push(r1); | 390 __ push(r4); |
386 | 391 |
387 __ Push(r2, r1); // r1 = constructor | 392 __ Push(r5, r4); // r4 = constructor |
388 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 393 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
389 | 394 |
390 __ pop(r2); | 395 __ Pop(r4, r5); |
391 __ pop(r1); | |
392 | 396 |
393 __ bind(&allocate); | 397 __ bind(&allocate); |
394 } | 398 } |
395 | 399 |
396 // Now allocate the JSObject on the heap. | 400 // Now allocate the JSObject on the heap. |
397 // r1: constructor function | 401 // r4: constructor function |
398 // r2: initial map | 402 // r5: initial map |
399 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); | 403 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset)); |
400 if (create_memento) { | 404 if (create_memento) { |
401 __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize)); | 405 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize)); |
402 } | 406 } |
403 | 407 |
404 __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS); | 408 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS); |
405 | 409 |
406 // Allocated the JSObject, now initialize the fields. Map is set to | 410 // Allocated the JSObject, now initialize the fields. Map is set to |
407 // initial map and properties and elements are set to empty fixed array. | 411 // initial map and properties and elements are set to empty fixed array. |
408 // r1: constructor function | 412 // r4: constructor function |
409 // r2: initial map | 413 // r5: initial map |
410 // r3: object size (not including memento if create_memento) | 414 // r6: object size (not including memento if create_memento) |
411 // r4: JSObject (not tagged) | 415 // r7: JSObject (not tagged) |
412 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); | 416 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex); |
413 __ mov(r5, r4); | 417 __ mr(r8, r7); |
414 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); | 418 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset)); |
415 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); | 419 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset)); |
416 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); | 420 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset)); |
417 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); | 421 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize)); |
418 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); | 422 |
419 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); | 423 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2)); |
| 424 __ add(r9, r7, r9); // End of object. |
420 | 425 |
421 // Fill all the in-object properties with the appropriate filler. | 426 // Fill all the in-object properties with the appropriate filler. |
422 // r1: constructor function | 427 // r4: constructor function |
423 // r2: initial map | 428 // r5: initial map |
424 // r3: object size (in words, including memento if create_memento) | 429 // r6: object size (in words, including memento if create_memento) |
425 // r4: JSObject (not tagged) | 430 // r7: JSObject (not tagged) |
426 // r5: First in-object property of JSObject (not tagged) | 431 // r8: First in-object property of JSObject (not tagged) |
| 432 // r9: End of object |
427 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); | 433 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); |
428 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | 434 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex); |
429 | 435 |
430 if (!is_api_function) { | 436 if (!is_api_function) { |
431 Label no_inobject_slack_tracking; | 437 Label no_inobject_slack_tracking; |
432 | 438 |
433 // Check if slack tracking is enabled. | 439 // Check if slack tracking is enabled. |
434 __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset)); | 440 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); |
435 __ DecodeField<Map::ConstructionCount>(ip); | 441 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking |
436 __ cmp(ip, Operand(JSFunction::kNoSlackTracking)); | 442 __ beq(&no_inobject_slack_tracking); |
437 __ b(eq, &no_inobject_slack_tracking); | |
438 | 443 |
439 // Allocate object with a slack. | 444 // Allocate object with a slack. |
440 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); | 445 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset)); |
441 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, | |
442 kBitsPerByte); | |
443 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2)); | |
444 // r0: offset of first field after pre-allocated fields | |
445 if (FLAG_debug_code) { | 446 if (FLAG_debug_code) { |
446 __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. | 447 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); |
447 __ cmp(r0, ip); | 448 __ add(r0, r8, r0); |
| 449 // r0: offset of first field after pre-allocated fields |
| 450 __ cmp(r0, r9); |
448 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); | 451 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); |
449 } | 452 } |
450 __ InitializeFieldsWithFiller(r5, r0, r6); | 453 { Label done; |
| 454 __ cmpi(r3, Operand::Zero()); |
| 455 __ beq(&done); |
| 456 __ InitializeNFieldsWithFiller(r8, r3, r10); |
| 457 __ bind(&done); |
| 458 } |
451 // To allow for truncation. | 459 // To allow for truncation. |
452 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex); | 460 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex); |
453 // Fill the remaining fields with one pointer filler map. | 461 // Fill the remaining fields with one pointer filler map. |
454 | 462 |
455 __ bind(&no_inobject_slack_tracking); | 463 __ bind(&no_inobject_slack_tracking); |
456 } | 464 } |
457 | 465 |
458 if (create_memento) { | 466 if (create_memento) { |
459 __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize)); | 467 __ subi(r3, r9, Operand(AllocationMemento::kSize)); |
460 __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object. | 468 __ InitializeFieldsWithFiller(r8, r3, r10); |
461 __ InitializeFieldsWithFiller(r5, r0, r6); | |
462 | 469 |
463 // Fill in memento fields. | 470 // Fill in memento fields. |
464 // r5: points to the allocated but uninitialized memento. | 471 // r8: points to the allocated but uninitialized memento. |
465 __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex); | 472 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex); |
466 ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); | 473 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset)); |
467 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); | |
468 // Load the AllocationSite | 474 // Load the AllocationSite |
469 __ ldr(r6, MemOperand(sp, 2 * kPointerSize)); | 475 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize)); |
470 ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); | 476 __ StoreP(r10, |
471 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); | 477 MemOperand(r8, AllocationMemento::kAllocationSiteOffset)); |
| 478 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset + |
| 479 kPointerSize)); |
472 } else { | 480 } else { |
473 __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. | 481 __ InitializeFieldsWithFiller(r8, r9, r10); |
474 __ InitializeFieldsWithFiller(r5, r0, r6); | |
475 } | 482 } |
476 | 483 |
477 // Add the object tag to make the JSObject real, so that we can continue | 484 // Add the object tag to make the JSObject real, so that we can continue |
478 // and jump into the continuation code at any time from now on. Any | 485 // and jump into the continuation code at any time from now on. Any |
479 // failures need to undo the allocation, so that the heap is in a | 486 // failures need to undo the allocation, so that the heap is in a |
480 // consistent state and verifiable. | 487 // consistent state and verifiable. |
481 __ add(r4, r4, Operand(kHeapObjectTag)); | 488 __ addi(r7, r7, Operand(kHeapObjectTag)); |
482 | 489 |
483 // Check if a non-empty properties array is needed. Continue with | 490 // Check if a non-empty properties array is needed. Continue with |
484 // allocated object if not fall through to runtime call if it is. | 491 // allocated object if not fall through to runtime call if it is. |
485 // r1: constructor function | 492 // r4: constructor function |
486 // r4: JSObject | 493 // r7: JSObject |
487 // r5: start of next object (not tagged) | 494 // r8: start of next object (not tagged) |
488 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset)); | 495 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset)); |
489 // The field instance sizes contains both pre-allocated property fields | 496 // The field instance sizes contains both pre-allocated property fields |
490 // and in-object properties. | 497 // and in-object properties. |
491 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); | 498 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset)); |
492 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, | 499 __ add(r6, r6, r0); |
493 kBitsPerByte); | 500 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset)); |
494 __ add(r3, r3, Operand(r6)); | 501 __ sub(r6, r6, r0, LeaveOE, SetRC); |
495 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte, | |
496 kBitsPerByte); | |
497 __ sub(r3, r3, Operand(r6), SetCC); | |
498 | 502 |
499 // Done if no extra properties are to be allocated. | 503 // Done if no extra properties are to be allocated. |
500 __ b(eq, &allocated); | 504 __ beq(&allocated, cr0); |
501 __ Assert(pl, kPropertyAllocationCountFailed); | 505 __ Assert(ge, kPropertyAllocationCountFailed, cr0); |
502 | 506 |
503 // Scale the number of elements by pointer size and add the header for | 507 // Scale the number of elements by pointer size and add the header for |
504 // FixedArrays to the start of the next object calculation from above. | 508 // FixedArrays to the start of the next object calculation from above. |
505 // r1: constructor | 509 // r4: constructor |
506 // r3: number of elements in properties array | 510 // r6: number of elements in properties array |
507 // r4: JSObject | 511 // r7: JSObject |
508 // r5: start of next object | 512 // r8: start of next object |
509 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize)); | 513 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize)); |
510 __ Allocate( | 514 __ Allocate( |
511 r0, | 515 r3, |
| 516 r8, |
| 517 r9, |
512 r5, | 518 r5, |
513 r6, | |
514 r2, | |
515 &undo_allocation, | 519 &undo_allocation, |
516 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); | 520 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); |
517 | 521 |
518 // Initialize the FixedArray. | 522 // Initialize the FixedArray. |
519 // r1: constructor | 523 // r4: constructor |
520 // r3: number of elements in properties array | 524 // r6: number of elements in properties array |
521 // r4: JSObject | 525 // r7: JSObject |
522 // r5: FixedArray (not tagged) | 526 // r8: FixedArray (not tagged) |
523 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex); | 527 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); |
524 __ mov(r2, r5); | 528 __ mr(r5, r8); |
525 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); | 529 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); |
526 __ str(r6, MemOperand(r2, kPointerSize, PostIndex)); | 530 __ StoreP(r9, MemOperand(r5)); |
527 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); | 531 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); |
528 __ SmiTag(r0, r3); | 532 __ SmiTag(r3, r6); |
529 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); | 533 __ StorePU(r3, MemOperand(r5, kPointerSize)); |
| 534 __ addi(r5, r5, Operand(kPointerSize)); |
530 | 535 |
531 // Initialize the fields to undefined. | 536 // Initialize the fields to undefined. |
532 // r1: constructor function | 537 // r4: constructor function |
533 // r2: First element of FixedArray (not tagged) | 538 // r5: First element of FixedArray (not tagged) |
534 // r3: number of elements in properties array | 539 // r6: number of elements in properties array |
535 // r4: JSObject | 540 // r7: JSObject |
536 // r5: FixedArray (not tagged) | 541 // r8: FixedArray (not tagged) |
537 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object. | |
538 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); | 542 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); |
539 { Label loop, entry; | 543 { Label done; |
540 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 544 __ cmpi(r6, Operand::Zero()); |
541 __ b(&entry); | 545 __ beq(&done); |
542 __ bind(&loop); | 546 if (!is_api_function || create_memento) { |
543 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); | 547 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex); |
544 __ bind(&entry); | 548 } else if (FLAG_debug_code) { |
545 __ cmp(r2, r6); | 549 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex); |
546 __ b(lt, &loop); | 550 __ cmp(r10, r11); |
| 551 __ Assert(eq, kUndefinedValueNotLoaded); |
| 552 } |
| 553 __ InitializeNFieldsWithFiller(r5, r6, r10); |
| 554 __ bind(&done); |
547 } | 555 } |
548 | 556 |
549 // Store the initialized FixedArray into the properties field of | 557 // Store the initialized FixedArray into the properties field of |
550 // the JSObject | 558 // the JSObject |
551 // r1: constructor function | 559 // r4: constructor function |
552 // r4: JSObject | 560 // r7: JSObject |
553 // r5: FixedArray (not tagged) | 561 // r8: FixedArray (not tagged) |
554 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag. | 562 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag. |
555 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset)); | 563 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0); |
556 | 564 |
557 // Continue with JSObject being successfully allocated | 565 // Continue with JSObject being successfully allocated |
558 // r1: constructor function | 566 // r4: constructor function |
559 // r4: JSObject | 567 // r7: JSObject |
560 __ jmp(&allocated); | 568 __ b(&allocated); |
561 | 569 |
562 // Undo the setting of the new top so that the heap is verifiable. For | 570 // Undo the setting of the new top so that the heap is verifiable. For |
563 // example, the map's unused properties potentially do not match the | 571 // example, the map's unused properties potentially do not match the |
564 // allocated objects unused properties. | 572 // allocated objects unused properties. |
565 // r4: JSObject (previous new top) | 573 // r7: JSObject (previous new top) |
566 __ bind(&undo_allocation); | 574 __ bind(&undo_allocation); |
567 __ UndoAllocationInNewSpace(r4, r5); | 575 __ UndoAllocationInNewSpace(r7, r8); |
568 } | 576 } |
569 | 577 |
570 // Allocate the new receiver object using the runtime call. | 578 // Allocate the new receiver object using the runtime call. |
571 // r1: constructor function | 579 // r4: constructor function |
572 __ bind(&rt_call); | 580 __ bind(&rt_call); |
573 if (create_memento) { | 581 if (create_memento) { |
574 // Get the cell or allocation site. | 582 // Get the cell or allocation site. |
575 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); | 583 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize)); |
576 __ push(r2); | 584 __ push(r5); |
577 } | 585 } |
578 | 586 |
579 __ push(r1); // argument for Runtime_NewObject | 587 __ push(r4); // argument for Runtime_NewObject |
580 if (create_memento) { | 588 if (create_memento) { |
581 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | 589 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); |
582 } else { | 590 } else { |
583 __ CallRuntime(Runtime::kNewObject, 1); | 591 __ CallRuntime(Runtime::kNewObject, 1); |
584 } | 592 } |
585 __ mov(r4, r0); | 593 __ mr(r7, r3); |
586 | 594 |
587 // If we ended up using the runtime, and we want a memento, then the | 595 // If we ended up using the runtime, and we want a memento, then the |
588 // runtime call made it for us, and we shouldn't do create count | 596 // runtime call made it for us, and we shouldn't do create count |
589 // increment. | 597 // increment. |
590 Label count_incremented; | 598 Label count_incremented; |
591 if (create_memento) { | 599 if (create_memento) { |
592 __ jmp(&count_incremented); | 600 __ b(&count_incremented); |
593 } | 601 } |
594 | 602 |
595 // Receiver for constructor call allocated. | 603 // Receiver for constructor call allocated. |
596 // r4: JSObject | 604 // r7: JSObject |
597 __ bind(&allocated); | 605 __ bind(&allocated); |
598 | 606 |
599 if (create_memento) { | 607 if (create_memento) { |
600 __ ldr(r2, MemOperand(sp, kPointerSize * 2)); | 608 __ LoadP(r5, MemOperand(sp, kPointerSize * 2)); |
601 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 609 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); |
602 __ cmp(r2, r5); | 610 __ cmp(r5, r8); |
603 __ b(eq, &count_incremented); | 611 __ beq(&count_incremented); |
604 // r2 is an AllocationSite. We are creating a memento from it, so we | 612 // r5 is an AllocationSite. We are creating a memento from it, so we |
605 // need to increment the memento create count. | 613 // need to increment the memento create count. |
606 __ ldr(r3, FieldMemOperand(r2, | 614 __ LoadP(r6, FieldMemOperand(r5, |
607 AllocationSite::kPretenureCreateCountOffset)); | 615 AllocationSite::kPretenureCreateCountOffset)); |
608 __ add(r3, r3, Operand(Smi::FromInt(1))); | 616 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0); |
609 __ str(r3, FieldMemOperand(r2, | 617 __ StoreP(r6, FieldMemOperand(r5, |
610 AllocationSite::kPretenureCreateCountOffset)); | 618 AllocationSite::kPretenureCreateCountOffset), |
| 619 r0); |
611 __ bind(&count_incremented); | 620 __ bind(&count_incremented); |
612 } | 621 } |
613 | 622 |
614 __ push(r4); | 623 __ Push(r7, r7); |
615 __ push(r4); | |
616 | 624 |
617 // Reload the number of arguments and the constructor from the stack. | 625 // Reload the number of arguments and the constructor from the stack. |
618 // sp[0]: receiver | 626 // sp[0]: receiver |
619 // sp[1]: receiver | 627 // sp[1]: receiver |
620 // sp[2]: constructor function | 628 // sp[2]: constructor function |
621 // sp[3]: number of arguments (smi-tagged) | 629 // sp[3]: number of arguments (smi-tagged) |
622 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); | 630 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
623 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); | 631 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize)); |
624 | 632 |
625 // Set up pointer to last argument. | 633 // Set up pointer to last argument. |
626 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | 634 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset)); |
627 | 635 |
628 // Set up number of arguments for function call below | 636 // Set up number of arguments for function call below |
629 __ SmiUntag(r0, r3); | 637 __ SmiUntag(r3, r6); |
630 | 638 |
631 // Copy arguments and receiver to the expression stack. | 639 // Copy arguments and receiver to the expression stack. |
632 // r0: number of arguments | 640 // r3: number of arguments |
633 // r1: constructor function | 641 // r4: constructor function |
634 // r2: address of last argument (caller sp) | 642 // r5: address of last argument (caller sp) |
635 // r3: number of arguments (smi-tagged) | 643 // r6: number of arguments (smi-tagged) |
636 // sp[0]: receiver | 644 // sp[0]: receiver |
637 // sp[1]: receiver | 645 // sp[1]: receiver |
638 // sp[2]: constructor function | 646 // sp[2]: constructor function |
639 // sp[3]: number of arguments (smi-tagged) | 647 // sp[3]: number of arguments (smi-tagged) |
640 Label loop, entry; | 648 Label loop, no_args; |
641 __ b(&entry); | 649 __ cmpi(r3, Operand::Zero()); |
| 650 __ beq(&no_args); |
| 651 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 652 __ mtctr(r3); |
642 __ bind(&loop); | 653 __ bind(&loop); |
643 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1)); | 654 __ subi(ip, ip, Operand(kPointerSize)); |
644 __ push(ip); | 655 __ LoadPX(r0, MemOperand(r5, ip)); |
645 __ bind(&entry); | 656 __ push(r0); |
646 __ sub(r3, r3, Operand(2), SetCC); | 657 __ bdnz(&loop); |
647 __ b(ge, &loop); | 658 __ bind(&no_args); |
648 | 659 |
649 // Call the function. | 660 // Call the function. |
650 // r0: number of arguments | 661 // r3: number of arguments |
651 // r1: constructor function | 662 // r4: constructor function |
652 if (is_api_function) { | 663 if (is_api_function) { |
653 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 664 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
654 Handle<Code> code = | 665 Handle<Code> code = |
655 masm->isolate()->builtins()->HandleApiCallConstruct(); | 666 masm->isolate()->builtins()->HandleApiCallConstruct(); |
656 __ Call(code, RelocInfo::CODE_TARGET); | 667 __ Call(code, RelocInfo::CODE_TARGET); |
657 } else { | 668 } else { |
658 ParameterCount actual(r0); | 669 ParameterCount actual(r3); |
659 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); | 670 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
660 } | 671 } |
661 | 672 |
662 // Store offset of return address for deoptimizer. | 673 // Store offset of return address for deoptimizer. |
663 if (!is_api_function) { | 674 if (!is_api_function) { |
664 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | 675 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); |
665 } | 676 } |
666 | 677 |
667 // Restore context from the frame. | 678 // Restore context from the frame. |
668 // r0: result | 679 // r3: result |
669 // sp[0]: receiver | 680 // sp[0]: receiver |
670 // sp[1]: constructor function | 681 // sp[1]: constructor function |
671 // sp[2]: number of arguments (smi-tagged) | 682 // sp[2]: number of arguments (smi-tagged) |
672 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 683 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
673 | 684 |
674 // If the result is an object (in the ECMA sense), we should get rid | 685 // If the result is an object (in the ECMA sense), we should get rid |
675 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | 686 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 |
676 // on page 74. | 687 // on page 74. |
677 Label use_receiver, exit; | 688 Label use_receiver, exit; |
678 | 689 |
679 // If the result is a smi, it is *not* an object in the ECMA sense. | 690 // If the result is a smi, it is *not* an object in the ECMA sense. |
680 // r0: result | 691 // r3: result |
681 // sp[0]: receiver (newly allocated object) | 692 // sp[0]: receiver (newly allocated object) |
682 // sp[1]: constructor function | 693 // sp[1]: constructor function |
683 // sp[2]: number of arguments (smi-tagged) | 694 // sp[2]: number of arguments (smi-tagged) |
684 __ JumpIfSmi(r0, &use_receiver); | 695 __ JumpIfSmi(r3, &use_receiver); |
685 | 696 |
686 // If the type of the result (stored in its map) is less than | 697 // If the type of the result (stored in its map) is less than |
687 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. | 698 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. |
688 __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE); | 699 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE); |
689 __ b(ge, &exit); | 700 __ bge(&exit); |
690 | 701 |
691 // Throw away the result of the constructor invocation and use the | 702 // Throw away the result of the constructor invocation and use the |
692 // on-stack receiver as the result. | 703 // on-stack receiver as the result. |
693 __ bind(&use_receiver); | 704 __ bind(&use_receiver); |
694 __ ldr(r0, MemOperand(sp)); | 705 __ LoadP(r3, MemOperand(sp)); |
695 | 706 |
696 // Remove receiver from the stack, remove caller arguments, and | 707 // Remove receiver from the stack, remove caller arguments, and |
697 // return. | 708 // return. |
698 __ bind(&exit); | 709 __ bind(&exit); |
699 // r0: result | 710 // r3: result |
700 // sp[0]: receiver (newly allocated object) | 711 // sp[0]: receiver (newly allocated object) |
701 // sp[1]: constructor function | 712 // sp[1]: constructor function |
702 // sp[2]: number of arguments (smi-tagged) | 713 // sp[2]: number of arguments (smi-tagged) |
703 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); | 714 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
704 | 715 |
705 // Leave construct frame. | 716 // Leave construct frame. |
706 } | 717 } |
707 | 718 |
708 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); | 719 __ SmiToPtrArrayOffset(r4, r4); |
709 __ add(sp, sp, Operand(kPointerSize)); | 720 __ add(sp, sp, r4); |
710 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); | 721 __ addi(sp, sp, Operand(kPointerSize)); |
711 __ Jump(lr); | 722 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); |
| 723 __ blr(); |
712 } | 724 } |
713 | 725 |
714 | 726 |
715 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | 727 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
716 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); | 728 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); |
717 } | 729 } |
718 | 730 |
719 | 731 |
720 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | 732 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
721 Generate_JSConstructStubHelper(masm, true, false); | 733 Generate_JSConstructStubHelper(masm, true, false); |
722 } | 734 } |
723 | 735 |
724 | 736 |
725 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 737 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
726 bool is_construct) { | 738 bool is_construct) { |
727 // Called from Generate_JS_Entry | 739 // Called from Generate_JS_Entry |
728 // r0: code entry | 740 // r3: code entry |
729 // r1: function | 741 // r4: function |
730 // r2: receiver | 742 // r5: receiver |
731 // r3: argc | 743 // r6: argc |
732 // r4: argv | 744 // r7: argv |
733 // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered | 745 // r0,r8-r9, cp may be clobbered |
734 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 746 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
735 | 747 |
736 // Clear the context before we push it when entering the internal frame. | 748 // Clear the context before we push it when entering the internal frame. |
737 __ mov(cp, Operand::Zero()); | 749 __ li(cp, Operand::Zero()); |
738 | 750 |
739 // Enter an internal frame. | 751 // Enter an internal frame. |
740 { | 752 { |
741 FrameScope scope(masm, StackFrame::INTERNAL); | 753 FrameScope scope(masm, StackFrame::INTERNAL); |
742 | 754 |
743 // Set up the context from the function argument. | 755 // Set up the context from the function argument. |
744 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 756 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
745 | 757 |
746 __ InitializeRootRegister(); | 758 __ InitializeRootRegister(); |
747 | 759 |
748 // Push the function and the receiver onto the stack. | 760 // Push the function and the receiver onto the stack. |
749 __ push(r1); | 761 __ push(r4); |
750 __ push(r2); | 762 __ push(r5); |
751 | 763 |
752 // Copy arguments to the stack in a loop. | 764 // Copy arguments to the stack in a loop. |
753 // r1: function | 765 // r4: function |
754 // r3: argc | 766 // r6: argc |
755 // r4: argv, i.e. points to first arg | 767 // r7: argv, i.e. points to first arg |
756 Label loop, entry; | 768 Label loop, entry; |
757 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); | 769 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); |
758 // r2 points past last arg. | 770 __ add(r5, r7, r0); |
| 771 // r5 points past last arg. |
759 __ b(&entry); | 772 __ b(&entry); |
760 __ bind(&loop); | 773 __ bind(&loop); |
761 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter | 774 __ LoadP(r8, MemOperand(r7)); // read next parameter |
762 __ ldr(r0, MemOperand(r0)); // dereference handle | 775 __ addi(r7, r7, Operand(kPointerSize)); |
| 776 __ LoadP(r0, MemOperand(r8)); // dereference handle |
763 __ push(r0); // push parameter | 777 __ push(r0); // push parameter |
764 __ bind(&entry); | 778 __ bind(&entry); |
765 __ cmp(r4, r2); | 779 __ cmp(r7, r5); |
766 __ b(ne, &loop); | 780 __ bne(&loop); |
767 | 781 |
768 // Initialize all JavaScript callee-saved registers, since they will be seen | 782 // Initialize all JavaScript callee-saved registers, since they will be seen |
769 // by the garbage collector as part of handlers. | 783 // by the garbage collector as part of handlers. |
770 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 784 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); |
771 __ mov(r5, Operand(r4)); | 785 __ mr(r14, r7); |
772 __ mov(r6, Operand(r4)); | 786 __ mr(r15, r7); |
773 if (!FLAG_enable_ool_constant_pool) { | 787 __ mr(r16, r7); |
774 __ mov(r8, Operand(r4)); | 788 __ mr(r17, r7); |
775 } | |
776 if (kR9Available == 1) { | |
777 __ mov(r9, Operand(r4)); | |
778 } | |
779 | 789 |
780 // Invoke the code and pass argc as r0. | 790 // Invoke the code and pass argc as r3. |
781 __ mov(r0, Operand(r3)); | 791 __ mr(r3, r6); |
782 if (is_construct) { | 792 if (is_construct) { |
783 // No type feedback cell is available | 793 // No type feedback cell is available |
784 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 794 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
785 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); | 795 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); |
786 __ CallStub(&stub); | 796 __ CallStub(&stub); |
787 } else { | 797 } else { |
788 ParameterCount actual(r0); | 798 ParameterCount actual(r3); |
789 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); | 799 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
790 } | 800 } |
791 // Exit the JS frame and remove the parameters (except function), and | 801 // Exit the JS frame and remove the parameters (except function), and |
792 // return. | 802 // return. |
793 // Respect ABI stack constraint. | |
794 } | 803 } |
795 __ Jump(lr); | 804 __ blr(); |
796 | 805 |
797 // r0: result | 806 // r3: result |
798 } | 807 } |
799 | 808 |
800 | 809 |
801 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 810 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
802 Generate_JSEntryTrampolineHelper(masm, false); | 811 Generate_JSEntryTrampolineHelper(masm, false); |
803 } | 812 } |
804 | 813 |
805 | 814 |
806 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 815 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
807 Generate_JSEntryTrampolineHelper(masm, true); | 816 Generate_JSEntryTrampolineHelper(masm, true); |
808 } | 817 } |
809 | 818 |
810 | 819 |
811 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 820 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
812 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 821 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
813 GenerateTailCallToReturnedCode(masm); | 822 GenerateTailCallToReturnedCode(masm); |
814 } | 823 } |
815 | 824 |
816 | 825 |
817 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 826 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
818 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 827 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
819 // Push a copy of the function onto the stack. | 828 // Push a copy of the function onto the stack. |
820 __ push(r1); | |
821 // Push function as parameter to the runtime call. | 829 // Push function as parameter to the runtime call. |
822 __ Push(r1); | 830 __ Push(r4, r4); |
823 // Whether to compile in a background thread. | 831 // Whether to compile in a background thread. |
824 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 832 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
825 | 833 |
826 __ CallRuntime(Runtime::kCompileOptimized, 2); | 834 __ CallRuntime(Runtime::kCompileOptimized, 2); |
827 // Restore receiver. | 835 // Restore receiver. |
828 __ pop(r1); | 836 __ pop(r4); |
829 } | 837 } |
830 | 838 |
831 | 839 |
832 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 840 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
833 CallCompileOptimized(masm, false); | 841 CallCompileOptimized(masm, false); |
834 GenerateTailCallToReturnedCode(masm); | 842 GenerateTailCallToReturnedCode(masm); |
835 } | 843 } |
836 | 844 |
837 | 845 |
838 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 846 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
839 CallCompileOptimized(masm, true); | 847 CallCompileOptimized(masm, true); |
840 GenerateTailCallToReturnedCode(masm); | 848 GenerateTailCallToReturnedCode(masm); |
841 } | 849 } |
842 | 850 |
843 | 851 |
844 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 852 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
845 // For now, we are relying on the fact that make_code_young doesn't do any | 853 // For now, we are relying on the fact that make_code_young doesn't do any |
846 // garbage collection which allows us to save/restore the registers without | 854 // garbage collection which allows us to save/restore the registers without |
847 // worrying about which of them contain pointers. We also don't build an | 855 // worrying about which of them contain pointers. We also don't build an |
848 // internal frame to make the code faster, since we shouldn't have to do stack | 856 // internal frame to make the code faster, since we shouldn't have to do stack |
849 // crawls in MakeCodeYoung. This seems a bit fragile. | 857 // crawls in MakeCodeYoung. This seems a bit fragile. |
850 | 858 |
| 859 __ mflr(r3); |
| 860 // Adjust r3 to point to the start of the PlatformCodeAge sequence |
| 861 __ subi(r3, r3, Operand(kCodeAgingPatchDelta)); |
| 862 |
851 // The following registers must be saved and restored when calling through to | 863 // The following registers must be saved and restored when calling through to |
852 // the runtime: | 864 // the runtime: |
853 // r0 - contains return address (beginning of patch sequence) | 865 // r3 - contains return address (beginning of patch sequence) |
854 // r1 - isolate | 866 // r4 - isolate |
| 867 // ip - return address |
855 FrameScope scope(masm, StackFrame::MANUAL); | 868 FrameScope scope(masm, StackFrame::MANUAL); |
856 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); | 869 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
857 __ PrepareCallCFunction(2, 0, r2); | 870 __ PrepareCallCFunction(2, 0, r5); |
858 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); | 871 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); |
859 __ CallCFunction( | 872 __ CallCFunction( |
860 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | 873 ExternalReference::get_make_code_young_function(masm->isolate()), 2); |
861 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); | 874 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
862 __ mov(pc, r0); | 875 __ mtlr(ip); |
| 876 __ Jump(r3); |
863 } | 877 } |
864 | 878 |
865 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | 879 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
866 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | 880 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
867 MacroAssembler* masm) { \ | 881 MacroAssembler* masm) { \ |
868 GenerateMakeCodeYoungAgainCommon(masm); \ | 882 GenerateMakeCodeYoungAgainCommon(masm); \ |
869 } \ | 883 } \ |
870 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | 884 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
871 MacroAssembler* masm) { \ | 885 MacroAssembler* masm) { \ |
872 GenerateMakeCodeYoungAgainCommon(masm); \ | 886 GenerateMakeCodeYoungAgainCommon(masm); \ |
873 } | 887 } |
874 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | 888 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
875 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | 889 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
876 | 890 |
877 | 891 |
878 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | 892 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
879 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | 893 // For now, we are relying on the fact that make_code_young doesn't do any |
880 // that make_code_young doesn't do any garbage collection which allows us to | 894 // garbage collection which allows us to save/restore the registers without |
881 // save/restore the registers without worrying about which of them contain | 895 // worrying about which of them contain pointers. We also don't build an |
882 // pointers. | 896 // internal frame to make the code faster, since we shouldn't have to do stack |
| 897 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 898 |
| 899 __ mflr(r3); |
| 900 // Adjust r3 to point to the start of the PlatformCodeAge sequence |
| 901 __ subi(r3, r3, Operand(kCodeAgingPatchDelta)); |
883 | 902 |
884 // The following registers must be saved and restored when calling through to | 903 // The following registers must be saved and restored when calling through to |
885 // the runtime: | 904 // the runtime: |
886 // r0 - contains return address (beginning of patch sequence) | 905 // r3 - contains return address (beginning of patch sequence) |
887 // r1 - isolate | 906 // r4 - isolate |
| 907 // ip - return address |
888 FrameScope scope(masm, StackFrame::MANUAL); | 908 FrameScope scope(masm, StackFrame::MANUAL); |
889 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); | 909 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
890 __ PrepareCallCFunction(2, 0, r2); | 910 __ PrepareCallCFunction(2, 0, r5); |
891 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); | 911 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); |
892 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function( | 912 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function( |
893 masm->isolate()), 2); | 913 masm->isolate()), 2); |
894 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); | 914 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
| 915 __ mtlr(ip); |
895 | 916 |
896 // Perform prologue operations usually performed by the young code stub. | 917 // Perform prologue operations usually performed by the young code stub. |
897 __ PushFixedFrame(r1); | 918 __ PushFixedFrame(r4); |
898 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 919 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
899 | 920 |
900 // Jump to point after the code-age stub. | 921 // Jump to point after the code-age stub. |
901 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength)); | 922 __ addi(r3, r3, Operand(kNoCodeAgeSequenceLength)); |
902 __ mov(pc, r0); | 923 __ Jump(r3); |
903 } | 924 } |
904 | 925 |
905 | 926 |
906 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | 927 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
907 GenerateMakeCodeYoungAgainCommon(masm); | 928 GenerateMakeCodeYoungAgainCommon(masm); |
908 } | 929 } |
909 | 930 |
910 | 931 |
911 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 932 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
912 SaveFPRegsMode save_doubles) { | 933 SaveFPRegsMode save_doubles) { |
913 { | 934 { |
914 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 935 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
915 | 936 |
916 // Preserve registers across notification, this is important for compiled | 937 // Preserve registers across notification, this is important for compiled |
917 // stubs that tail call the runtime on deopts passing their parameters in | 938 // stubs that tail call the runtime on deopts passing their parameters in |
918 // registers. | 939 // registers. |
919 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); | 940 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
920 // Pass the function and deoptimization type to the runtime system. | 941 // Pass the function and deoptimization type to the runtime system. |
921 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 942 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
922 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); | 943 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
923 } | 944 } |
924 | 945 |
925 __ add(sp, sp, Operand(kPointerSize)); // Ignore state | 946 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state |
926 __ mov(pc, lr); // Jump to miss handler | 947 __ blr(); // Jump to miss handler |
927 } | 948 } |
928 | 949 |
929 | 950 |
930 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 951 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
931 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 952 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
932 } | 953 } |
933 | 954 |
934 | 955 |
935 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 956 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
936 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 957 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
937 } | 958 } |
938 | 959 |
939 | 960 |
940 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 961 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
941 Deoptimizer::BailoutType type) { | 962 Deoptimizer::BailoutType type) { |
942 { | 963 { |
943 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 964 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
944 // Pass the function and deoptimization type to the runtime system. | 965 // Pass the function and deoptimization type to the runtime system. |
945 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); | 966 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); |
946 __ push(r0); | 967 __ push(r3); |
947 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 968 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
948 } | 969 } |
949 | 970 |
950 // Get the full codegen state from the stack and untag it -> r6. | 971 // Get the full codegen state from the stack and untag it -> r9. |
951 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); | 972 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); |
952 __ SmiUntag(r6); | 973 __ SmiUntag(r9); |
953 // Switch on the state. | 974 // Switch on the state. |
954 Label with_tos_register, unknown_state; | 975 Label with_tos_register, unknown_state; |
955 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); | 976 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS)); |
956 __ b(ne, &with_tos_register); | 977 __ bne(&with_tos_register); |
957 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. | 978 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state. |
958 __ Ret(); | 979 __ Ret(); |
959 | 980 |
960 __ bind(&with_tos_register); | 981 __ bind(&with_tos_register); |
961 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); | 982 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
962 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG)); | 983 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG)); |
963 __ b(ne, &unknown_state); | 984 __ bne(&unknown_state); |
964 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state. | 985 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. |
965 __ Ret(); | 986 __ Ret(); |
966 | 987 |
967 __ bind(&unknown_state); | 988 __ bind(&unknown_state); |
968 __ stop("no cases left"); | 989 __ stop("no cases left"); |
969 } | 990 } |
970 | 991 |
971 | 992 |
972 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 993 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
973 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 994 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
974 } | 995 } |
975 | 996 |
976 | 997 |
977 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | 998 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
978 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | 999 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
979 } | 1000 } |
980 | 1001 |
981 | 1002 |
982 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 1003 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
983 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1004 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
984 } | 1005 } |
985 | 1006 |
986 | 1007 |
987 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1008 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
988 // Lookup the function in the JavaScript frame. | 1009 // Lookup the function in the JavaScript frame. |
989 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1010 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
990 { | 1011 { |
991 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1012 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
992 // Pass function as argument. | 1013 // Pass function as argument. |
993 __ push(r0); | 1014 __ push(r3); |
994 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1015 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
995 } | 1016 } |
996 | 1017 |
997 // If the code object is null, just return to the unoptimized code. | 1018 // If the code object is null, just return to the unoptimized code. |
998 Label skip; | 1019 Label skip; |
999 __ cmp(r0, Operand(Smi::FromInt(0))); | 1020 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
1000 __ b(ne, &skip); | 1021 __ bne(&skip); |
1001 __ Ret(); | 1022 __ Ret(); |
1002 | 1023 |
1003 __ bind(&skip); | 1024 __ bind(&skip); |
1004 | 1025 |
1005 // Load deoptimization data from the code object. | 1026 // Load deoptimization data from the code object. |
1006 // <deopt_data> = <code>[#deoptimization_data_offset] | 1027 // <deopt_data> = <code>[#deoptimization_data_offset] |
1007 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset)); | 1028 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); |
1008 | 1029 |
| 1030 #if V8_OOL_CONSTANT_POOL |
1009 { ConstantPoolUnavailableScope constant_pool_unavailable(masm); | 1031 { ConstantPoolUnavailableScope constant_pool_unavailable(masm); |
1010 if (FLAG_enable_ool_constant_pool) { | 1032 __ LoadP(kConstantPoolRegister, |
1011 __ ldr(pp, FieldMemOperand(r0, Code::kConstantPoolOffset)); | 1033 FieldMemOperand(r3, Code::kConstantPoolOffset)); |
1012 } | 1034 #endif |
1013 | 1035 |
1014 // Load the OSR entrypoint offset from the deoptimization data. | 1036 // Load the OSR entrypoint offset from the deoptimization data. |
1015 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | 1037 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
1016 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt( | 1038 __ LoadP(r4, FieldMemOperand(r4, FixedArray::OffsetOfElementAt( |
1017 DeoptimizationInputData::kOsrPcOffsetIndex))); | 1039 DeoptimizationInputData::kOsrPcOffsetIndex))); |
| 1040 __ SmiUntag(r4); |
1018 | 1041 |
1019 // Compute the target address = code_obj + header_size + osr_offset | 1042 // Compute the target address = code_obj + header_size + osr_offset |
1020 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | 1043 // <entry_addr> = <code_obj> + #header_size + <osr_offset> |
1021 __ add(r0, r0, Operand::SmiUntag(r1)); | 1044 __ add(r3, r3, r4); |
1022 __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1045 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1046 __ mtlr(r0); |
1023 | 1047 |
1024 // And "return" to the OSR entry point of the function. | 1048 // And "return" to the OSR entry point of the function. |
1025 __ Ret(); | 1049 __ Ret(); |
| 1050 #if V8_OOL_CONSTANT_POOL |
1026 } | 1051 } |
| 1052 #endif |
1027 } | 1053 } |
1028 | 1054 |
1029 | 1055 |
1030 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1056 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
1031 // We check the stack limit as indicator that recompilation might be done. | 1057 // We check the stack limit as indicator that recompilation might be done. |
1032 Label ok; | 1058 Label ok; |
1033 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 1059 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
1034 __ cmp(sp, Operand(ip)); | 1060 __ cmpl(sp, ip); |
1035 __ b(hs, &ok); | 1061 __ bge(&ok); |
1036 { | 1062 { |
1037 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1063 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1038 __ CallRuntime(Runtime::kStackGuard, 0); | 1064 __ CallRuntime(Runtime::kStackGuard, 0); |
1039 } | 1065 } |
1040 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1066 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
1041 RelocInfo::CODE_TARGET); | 1067 RelocInfo::CODE_TARGET); |
1042 | 1068 |
1043 __ bind(&ok); | 1069 __ bind(&ok); |
1044 __ Ret(); | 1070 __ Ret(); |
1045 } | 1071 } |
1046 | 1072 |
1047 | 1073 |
1048 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 1074 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
1049 // 1. Make sure we have at least one argument. | 1075 // 1. Make sure we have at least one argument. |
1050 // r0: actual number of arguments | 1076 // r3: actual number of arguments |
1051 { Label done; | 1077 { Label done; |
1052 __ cmp(r0, Operand::Zero()); | 1078 __ cmpi(r3, Operand::Zero()); |
1053 __ b(ne, &done); | 1079 __ bne(&done); |
1054 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 1080 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
1055 __ push(r2); | 1081 __ push(r5); |
1056 __ add(r0, r0, Operand(1)); | 1082 __ addi(r3, r3, Operand(1)); |
1057 __ bind(&done); | 1083 __ bind(&done); |
1058 } | 1084 } |
1059 | 1085 |
1060 // 2. Get the function to call (passed as receiver) from the stack, check | 1086 // 2. Get the function to call (passed as receiver) from the stack, check |
1061 // if it is a function. | 1087 // if it is a function. |
1062 // r0: actual number of arguments | 1088 // r3: actual number of arguments |
1063 Label slow, non_function; | 1089 Label slow, non_function; |
1064 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); | 1090 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2)); |
1065 __ JumpIfSmi(r1, &non_function); | 1091 __ add(r4, sp, r4); |
1066 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); | 1092 __ LoadP(r4, MemOperand(r4)); |
1067 __ b(ne, &slow); | 1093 __ JumpIfSmi(r4, &non_function); |
| 1094 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
| 1095 __ bne(&slow); |
1068 | 1096 |
1069 // 3a. Patch the first argument if necessary when calling a function. | 1097 // 3a. Patch the first argument if necessary when calling a function. |
1070 // r0: actual number of arguments | 1098 // r3: actual number of arguments |
1071 // r1: function | 1099 // r4: function |
1072 Label shift_arguments; | 1100 Label shift_arguments; |
1073 __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION | 1101 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION |
1074 { Label convert_to_object, use_global_proxy, patch_receiver; | 1102 { Label convert_to_object, use_global_proxy, patch_receiver; |
1075 // Change context eagerly in case we need the global receiver. | 1103 // Change context eagerly in case we need the global receiver. |
1076 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 1104 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
1077 | 1105 |
1078 // Do not transform the receiver for strict mode functions. | 1106 // Do not transform the receiver for strict mode functions. |
1079 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1107 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
1080 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); | 1108 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); |
1081 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1109 __ TestBit(r6, |
1082 kSmiTagSize))); | 1110 #if V8_TARGET_ARCH_PPC64 |
1083 __ b(ne, &shift_arguments); | 1111 SharedFunctionInfo::kStrictModeFunction, |
| 1112 #else |
| 1113 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize, |
| 1114 #endif |
| 1115 r0); |
| 1116 __ bne(&shift_arguments, cr0); |
1084 | 1117 |
1085 // Do not transform the receiver for native (Compilerhints already in r3). | 1118 // Do not transform the receiver for native (Compilerhints already in r6). |
1086 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1119 __ TestBit(r6, |
1087 __ b(ne, &shift_arguments); | 1120 #if V8_TARGET_ARCH_PPC64 |
| 1121 SharedFunctionInfo::kNative, |
| 1122 #else |
| 1123 SharedFunctionInfo::kNative + kSmiTagSize, |
| 1124 #endif |
| 1125 r0); |
| 1126 __ bne(&shift_arguments, cr0); |
1088 | 1127 |
1089 // Compute the receiver in sloppy mode. | 1128 // Compute the receiver in sloppy mode. |
1090 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | 1129 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
1091 __ ldr(r2, MemOperand(r2, -kPointerSize)); | 1130 __ add(r5, sp, ip); |
1092 // r0: actual number of arguments | 1131 __ LoadP(r5, MemOperand(r5, -kPointerSize)); |
1093 // r1: function | 1132 // r3: actual number of arguments |
1094 // r2: first argument | 1133 // r4: function |
1095 __ JumpIfSmi(r2, &convert_to_object); | 1134 // r5: first argument |
| 1135 __ JumpIfSmi(r5, &convert_to_object); |
1096 | 1136 |
1097 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1137 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
1098 __ cmp(r2, r3); | 1138 __ cmp(r5, r6); |
1099 __ b(eq, &use_global_proxy); | 1139 __ beq(&use_global_proxy); |
1100 __ LoadRoot(r3, Heap::kNullValueRootIndex); | 1140 __ LoadRoot(r6, Heap::kNullValueRootIndex); |
1101 __ cmp(r2, r3); | 1141 __ cmp(r5, r6); |
1102 __ b(eq, &use_global_proxy); | 1142 __ beq(&use_global_proxy); |
1103 | 1143 |
1104 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1144 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1105 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); | 1145 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE); |
1106 __ b(ge, &shift_arguments); | 1146 __ bge(&shift_arguments); |
1107 | 1147 |
1108 __ bind(&convert_to_object); | 1148 __ bind(&convert_to_object); |
1109 | 1149 |
1110 { | 1150 { |
1111 // Enter an internal frame in order to preserve argument count. | 1151 // Enter an internal frame in order to preserve argument count. |
1112 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1152 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1113 __ SmiTag(r0); | 1153 __ SmiTag(r3); |
1114 __ push(r0); | 1154 __ Push(r3, r5); |
| 1155 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1156 __ mr(r5, r3); |
1115 | 1157 |
1116 __ push(r2); | 1158 __ pop(r3); |
1117 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1159 __ SmiUntag(r3); |
1118 __ mov(r2, r0); | |
1119 | |
1120 __ pop(r0); | |
1121 __ SmiUntag(r0); | |
1122 | 1160 |
1123 // Exit the internal frame. | 1161 // Exit the internal frame. |
1124 } | 1162 } |
1125 | 1163 |
1126 // Restore the function to r1, and the flag to r4. | 1164 // Restore the function to r4, and the flag to r7. |
1127 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); | 1165 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2)); |
1128 __ mov(r4, Operand::Zero()); | 1166 __ add(r7, sp, r7); |
1129 __ jmp(&patch_receiver); | 1167 __ LoadP(r4, MemOperand(r7)); |
| 1168 __ li(r7, Operand::Zero()); |
| 1169 __ b(&patch_receiver); |
1130 | 1170 |
1131 __ bind(&use_global_proxy); | 1171 __ bind(&use_global_proxy); |
1132 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 1172 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
1133 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); | 1173 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset)); |
1134 | 1174 |
1135 __ bind(&patch_receiver); | 1175 __ bind(&patch_receiver); |
1136 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); | 1176 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
1137 __ str(r2, MemOperand(r3, -kPointerSize)); | 1177 __ add(r6, sp, ip); |
| 1178 __ StoreP(r5, MemOperand(r6, -kPointerSize)); |
1138 | 1179 |
1139 __ jmp(&shift_arguments); | 1180 __ b(&shift_arguments); |
1140 } | 1181 } |
1141 | 1182 |
1142 // 3b. Check for function proxy. | 1183 // 3b. Check for function proxy. |
1143 __ bind(&slow); | 1184 __ bind(&slow); |
1144 __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy | 1185 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy |
1145 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); | 1186 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE)); |
1146 __ b(eq, &shift_arguments); | 1187 __ beq(&shift_arguments); |
1147 __ bind(&non_function); | 1188 __ bind(&non_function); |
1148 __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function | 1189 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function |
1149 | 1190 |
1150 // 3c. Patch the first argument when calling a non-function. The | 1191 // 3c. Patch the first argument when calling a non-function. The |
1151 // CALL_NON_FUNCTION builtin expects the non-function callee as | 1192 // CALL_NON_FUNCTION builtin expects the non-function callee as |
1152 // receiver, so overwrite the first argument which will ultimately | 1193 // receiver, so overwrite the first argument which will ultimately |
1153 // become the receiver. | 1194 // become the receiver. |
1154 // r0: actual number of arguments | 1195 // r3: actual number of arguments |
1155 // r1: function | 1196 // r4: function |
1156 // r4: call type (0: JS function, 1: function proxy, 2: non-function) | 1197 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
1157 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | 1198 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
1158 __ str(r1, MemOperand(r2, -kPointerSize)); | 1199 __ add(r5, sp, ip); |
| 1200 __ StoreP(r4, MemOperand(r5, -kPointerSize)); |
1159 | 1201 |
1160 // 4. Shift arguments and return address one slot down on the stack | 1202 // 4. Shift arguments and return address one slot down on the stack |
1161 // (overwriting the original receiver). Adjust argument count to make | 1203 // (overwriting the original receiver). Adjust argument count to make |
1162 // the original first argument the new receiver. | 1204 // the original first argument the new receiver. |
1163 // r0: actual number of arguments | 1205 // r3: actual number of arguments |
1164 // r1: function | 1206 // r4: function |
1165 // r4: call type (0: JS function, 1: function proxy, 2: non-function) | 1207 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
1166 __ bind(&shift_arguments); | 1208 __ bind(&shift_arguments); |
1167 { Label loop; | 1209 { Label loop; |
1168 // Calculate the copy start address (destination). Copy end address is sp. | 1210 // Calculate the copy start address (destination). Copy end address is sp. |
1169 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | 1211 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 1212 __ add(r5, sp, ip); |
1170 | 1213 |
1171 __ bind(&loop); | 1214 __ bind(&loop); |
1172 __ ldr(ip, MemOperand(r2, -kPointerSize)); | 1215 __ LoadP(ip, MemOperand(r5, -kPointerSize)); |
1173 __ str(ip, MemOperand(r2)); | 1216 __ StoreP(ip, MemOperand(r5)); |
1174 __ sub(r2, r2, Operand(kPointerSize)); | 1217 __ subi(r5, r5, Operand(kPointerSize)); |
1175 __ cmp(r2, sp); | 1218 __ cmp(r5, sp); |
1176 __ b(ne, &loop); | 1219 __ bne(&loop); |
1177 // Adjust the actual number of arguments and remove the top element | 1220 // Adjust the actual number of arguments and remove the top element |
1178 // (which is a copy of the last argument). | 1221 // (which is a copy of the last argument). |
1179 __ sub(r0, r0, Operand(1)); | 1222 __ subi(r3, r3, Operand(1)); |
1180 __ pop(); | 1223 __ pop(); |
1181 } | 1224 } |
1182 | 1225 |
1183 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, | 1226 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
1184 // or a function proxy via CALL_FUNCTION_PROXY. | 1227 // or a function proxy via CALL_FUNCTION_PROXY. |
1185 // r0: actual number of arguments | 1228 // r3: actual number of arguments |
1186 // r1: function | 1229 // r4: function |
1187 // r4: call type (0: JS function, 1: function proxy, 2: non-function) | 1230 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
1188 { Label function, non_proxy; | 1231 { Label function, non_proxy; |
1189 __ tst(r4, r4); | 1232 __ cmpi(r7, Operand::Zero()); |
1190 __ b(eq, &function); | 1233 __ beq(&function); |
1191 // Expected number of arguments is 0 for CALL_NON_FUNCTION. | 1234 // Expected number of arguments is 0 for CALL_NON_FUNCTION. |
1192 __ mov(r2, Operand::Zero()); | 1235 __ li(r5, Operand::Zero()); |
1193 __ cmp(r4, Operand(1)); | 1236 __ cmpi(r7, Operand(1)); |
1194 __ b(ne, &non_proxy); | 1237 __ bne(&non_proxy); |
1195 | 1238 |
1196 __ push(r1); // re-add proxy object as additional argument | 1239 __ push(r4); // re-add proxy object as additional argument |
1197 __ add(r0, r0, Operand(1)); | 1240 __ addi(r3, r3, Operand(1)); |
1198 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); | 1241 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY); |
1199 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1242 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1200 RelocInfo::CODE_TARGET); | 1243 RelocInfo::CODE_TARGET); |
1201 | 1244 |
1202 __ bind(&non_proxy); | 1245 __ bind(&non_proxy); |
1203 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); | 1246 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION); |
1204 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1247 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1205 RelocInfo::CODE_TARGET); | 1248 RelocInfo::CODE_TARGET); |
1206 __ bind(&function); | 1249 __ bind(&function); |
1207 } | 1250 } |
1208 | 1251 |
1209 // 5b. Get the code to call from the function and check that the number of | 1252 // 5b. Get the code to call from the function and check that the number of |
1210 // expected arguments matches what we're providing. If so, jump | 1253 // expected arguments matches what we're providing. If so, jump |
1211 // (tail-call) to the code in register edx without checking arguments. | 1254 // (tail-call) to the code in register edx without checking arguments. |
1212 // r0: actual number of arguments | 1255 // r3: actual number of arguments |
1213 // r1: function | 1256 // r4: function |
1214 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1257 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
1215 __ ldr(r2, | 1258 __ LoadWordArith(r5, |
1216 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); | 1259 FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset)); |
1217 __ SmiUntag(r2); | 1260 #if !V8_TARGET_ARCH_PPC64 |
1218 __ cmp(r2, r0); // Check formal and actual parameter counts. | 1261 __ SmiUntag(r5); |
| 1262 #endif |
| 1263 __ cmp(r5, r3); // Check formal and actual parameter counts. |
1219 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1264 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1220 RelocInfo::CODE_TARGET, | 1265 RelocInfo::CODE_TARGET, |
1221 ne); | 1266 ne); |
1222 | 1267 |
1223 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 1268 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); |
1224 ParameterCount expected(0); | 1269 ParameterCount expected(0); |
1225 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | 1270 __ InvokeCode(r6, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
1226 } | 1271 } |
1227 | 1272 |
1228 | 1273 |
1229 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1274 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1230 const int kIndexOffset = | 1275 const int kIndexOffset = |
1231 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1276 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
1232 const int kLimitOffset = | 1277 const int kLimitOffset = |
1233 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | 1278 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
1234 const int kArgsOffset = 2 * kPointerSize; | 1279 const int kArgsOffset = 2 * kPointerSize; |
1235 const int kRecvOffset = 3 * kPointerSize; | 1280 const int kRecvOffset = 3 * kPointerSize; |
1236 const int kFunctionOffset = 4 * kPointerSize; | 1281 const int kFunctionOffset = 4 * kPointerSize; |
1237 | 1282 |
1238 { | 1283 { |
1239 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 1284 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
1240 | 1285 |
1241 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1286 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
1242 __ push(r0); | 1287 __ push(r3); |
1243 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array | 1288 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array |
1244 __ push(r0); | 1289 __ push(r3); |
1245 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1290 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
1246 | 1291 |
1247 // Check the stack for overflow. We are not trying to catch | 1292 // Check the stack for overflow. We are not trying to catch |
1248 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1293 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1249 // limit" is checked. | 1294 // limit" is checked. |
1250 Label okay; | 1295 Label okay; |
1251 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); | 1296 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
1252 // Make r2 the space we have left. The stack might already be overflowed | 1297 // Make r5 the space we have left. The stack might already be overflowed |
1253 // here which will cause r2 to become negative. | 1298 // here which will cause r5 to become negative. |
1254 __ sub(r2, sp, r2); | 1299 __ sub(r5, sp, r5); |
1255 // Check if the arguments will overflow the stack. | 1300 // Check if the arguments will overflow the stack. |
1256 __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0)); | 1301 __ SmiToPtrArrayOffset(r0, r3); |
1257 __ b(gt, &okay); // Signed comparison. | 1302 __ cmp(r5, r0); |
| 1303 __ bgt(&okay); // Signed comparison. |
1258 | 1304 |
1259 // Out of stack space. | 1305 // Out of stack space. |
1260 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1306 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
1261 __ Push(r1, r0); | 1307 __ Push(r4, r3); |
1262 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | 1308 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
1263 // End of stack check. | 1309 // End of stack check. |
1264 | 1310 |
1265 // Push current limit and index. | 1311 // Push current limit and index. |
1266 __ bind(&okay); | 1312 __ bind(&okay); |
1267 __ push(r0); // limit | 1313 __ li(r4, Operand::Zero()); |
1268 __ mov(r1, Operand::Zero()); // initial index | 1314 __ Push(r3, r4); // limit and initial index. |
1269 __ push(r1); | |
1270 | 1315 |
1271 // Get the receiver. | 1316 // Get the receiver. |
1272 __ ldr(r0, MemOperand(fp, kRecvOffset)); | 1317 __ LoadP(r3, MemOperand(fp, kRecvOffset)); |
1273 | 1318 |
1274 // Check that the function is a JS function (otherwise it must be a proxy). | 1319 // Check that the function is a JS function (otherwise it must be a proxy). |
1275 Label push_receiver; | 1320 Label push_receiver; |
1276 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1321 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
1277 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); | 1322 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
1278 __ b(ne, &push_receiver); | 1323 __ bne(&push_receiver); |
1279 | 1324 |
1280 // Change context eagerly to get the right global object if necessary. | 1325 // Change context eagerly to get the right global object if necessary. |
1281 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 1326 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
1282 // Load the shared function info while the function is still in r1. | 1327 // Load the shared function info while the function is still in r4. |
1283 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1328 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
1284 | 1329 |
1285 // Compute the receiver. | 1330 // Compute the receiver. |
1286 // Do not transform the receiver for strict mode functions. | 1331 // Do not transform the receiver for strict mode functions. |
1287 Label call_to_object, use_global_proxy; | 1332 Label call_to_object, use_global_proxy; |
1288 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); | 1333 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); |
1289 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1334 __ TestBit(r5, |
1290 kSmiTagSize))); | 1335 #if V8_TARGET_ARCH_PPC64 |
1291 __ b(ne, &push_receiver); | 1336 SharedFunctionInfo::kStrictModeFunction, |
| 1337 #else |
| 1338 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize, |
| 1339 #endif |
| 1340 r0); |
| 1341 __ bne(&push_receiver, cr0); |
1292 | 1342 |
1293 // Do not transform the receiver for strict mode functions. | 1343 // Do not transform the receiver for strict mode functions. |
1294 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1344 __ TestBit(r5, |
1295 __ b(ne, &push_receiver); | 1345 #if V8_TARGET_ARCH_PPC64 |
| 1346 SharedFunctionInfo::kNative, |
| 1347 #else |
| 1348 SharedFunctionInfo::kNative + kSmiTagSize, |
| 1349 #endif |
| 1350 r0); |
| 1351 __ bne(&push_receiver, cr0); |
1296 | 1352 |
1297 // Compute the receiver in sloppy mode. | 1353 // Compute the receiver in sloppy mode. |
1298 __ JumpIfSmi(r0, &call_to_object); | 1354 __ JumpIfSmi(r3, &call_to_object); |
1299 __ LoadRoot(r1, Heap::kNullValueRootIndex); | 1355 __ LoadRoot(r4, Heap::kNullValueRootIndex); |
1300 __ cmp(r0, r1); | 1356 __ cmp(r3, r4); |
1301 __ b(eq, &use_global_proxy); | 1357 __ beq(&use_global_proxy); |
1302 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | 1358 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
1303 __ cmp(r0, r1); | 1359 __ cmp(r3, r4); |
1304 __ b(eq, &use_global_proxy); | 1360 __ beq(&use_global_proxy); |
1305 | 1361 |
1306 // Check if the receiver is already a JavaScript object. | 1362 // Check if the receiver is already a JavaScript object. |
1307 // r0: receiver | 1363 // r3: receiver |
1308 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1364 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1309 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); | 1365 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); |
1310 __ b(ge, &push_receiver); | 1366 __ bge(&push_receiver); |
1311 | 1367 |
1312 // Convert the receiver to a regular object. | 1368 // Convert the receiver to a regular object. |
1313 // r0: receiver | 1369 // r3: receiver |
1314 __ bind(&call_to_object); | 1370 __ bind(&call_to_object); |
1315 __ push(r0); | 1371 __ push(r3); |
1316 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1372 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1317 __ b(&push_receiver); | 1373 __ b(&push_receiver); |
1318 | 1374 |
1319 __ bind(&use_global_proxy); | 1375 __ bind(&use_global_proxy); |
1320 __ ldr(r0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 1376 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
1321 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalProxyOffset)); | 1377 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset)); |
1322 | 1378 |
1323 // Push the receiver. | 1379 // Push the receiver. |
1324 // r0: receiver | 1380 // r3: receiver |
1325 __ bind(&push_receiver); | 1381 __ bind(&push_receiver); |
1326 __ push(r0); | 1382 __ push(r3); |
1327 | 1383 |
1328 // Copy all arguments from the array to the stack. | 1384 // Copy all arguments from the array to the stack. |
1329 Label entry, loop; | 1385 Label entry, loop; |
1330 __ ldr(r0, MemOperand(fp, kIndexOffset)); | 1386 __ LoadP(r3, MemOperand(fp, kIndexOffset)); |
1331 __ b(&entry); | 1387 __ b(&entry); |
1332 | 1388 |
1333 // Load the current argument from the arguments array and push it to the | 1389 // Load the current argument from the arguments array and push it to the |
1334 // stack. | 1390 // stack. |
1335 // r0: current argument index | 1391 // r3: current argument index |
1336 __ bind(&loop); | 1392 __ bind(&loop); |
1337 __ ldr(r1, MemOperand(fp, kArgsOffset)); | 1393 __ LoadP(r4, MemOperand(fp, kArgsOffset)); |
1338 __ Push(r1, r0); | 1394 __ Push(r4, r3); |
1339 | 1395 |
1340 // Call the runtime to access the property in the arguments array. | 1396 // Call the runtime to access the property in the arguments array. |
1341 __ CallRuntime(Runtime::kGetProperty, 2); | 1397 __ CallRuntime(Runtime::kGetProperty, 2); |
1342 __ push(r0); | 1398 __ push(r3); |
1343 | 1399 |
1344 // Use inline caching to access the arguments. | 1400 // Use inline caching to access the arguments. |
1345 __ ldr(r0, MemOperand(fp, kIndexOffset)); | 1401 __ LoadP(r3, MemOperand(fp, kIndexOffset)); |
1346 __ add(r0, r0, Operand(1 << kSmiTagSize)); | 1402 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0); |
1347 __ str(r0, MemOperand(fp, kIndexOffset)); | 1403 __ StoreP(r3, MemOperand(fp, kIndexOffset)); |
1348 | 1404 |
1349 // Test if the copy loop has finished copying all the elements from the | 1405 // Test if the copy loop has finished copying all the elements from the |
1350 // arguments object. | 1406 // arguments object. |
1351 __ bind(&entry); | 1407 __ bind(&entry); |
1352 __ ldr(r1, MemOperand(fp, kLimitOffset)); | 1408 __ LoadP(r4, MemOperand(fp, kLimitOffset)); |
1353 __ cmp(r0, r1); | 1409 __ cmp(r3, r4); |
1354 __ b(ne, &loop); | 1410 __ bne(&loop); |
1355 | 1411 |
1356 // Call the function. | 1412 // Call the function. |
1357 Label call_proxy; | 1413 Label call_proxy; |
1358 ParameterCount actual(r0); | 1414 ParameterCount actual(r3); |
1359 __ SmiUntag(r0); | 1415 __ SmiUntag(r3); |
1360 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1416 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
1361 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); | 1417 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
1362 __ b(ne, &call_proxy); | 1418 __ bne(&call_proxy); |
1363 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); | 1419 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
1364 | 1420 |
1365 frame_scope.GenerateLeaveFrame(); | 1421 frame_scope.GenerateLeaveFrame(); |
1366 __ add(sp, sp, Operand(3 * kPointerSize)); | 1422 __ addi(sp, sp, Operand(3 * kPointerSize)); |
1367 __ Jump(lr); | 1423 __ blr(); |
1368 | 1424 |
1369 // Call the function proxy. | 1425 // Call the function proxy. |
1370 __ bind(&call_proxy); | 1426 __ bind(&call_proxy); |
1371 __ push(r1); // add function proxy as last argument | 1427 __ push(r4); // add function proxy as last argument |
1372 __ add(r0, r0, Operand(1)); | 1428 __ addi(r3, r3, Operand(1)); |
1373 __ mov(r2, Operand::Zero()); | 1429 __ li(r5, Operand::Zero()); |
1374 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); | 1430 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY); |
1375 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1431 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1376 RelocInfo::CODE_TARGET); | 1432 RelocInfo::CODE_TARGET); |
1377 | 1433 |
1378 // Tear down the internal frame and remove function, receiver and args. | 1434 // Tear down the internal frame and remove function, receiver and args. |
1379 } | 1435 } |
1380 __ add(sp, sp, Operand(3 * kPointerSize)); | 1436 __ addi(sp, sp, Operand(3 * kPointerSize)); |
1381 __ Jump(lr); | 1437 __ blr(); |
1382 } | 1438 } |
1383 | 1439 |
1384 | 1440 |
1385 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1441 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
1386 Label* stack_overflow) { | 1442 Label* stack_overflow) { |
1387 // ----------- S t a t e ------------- | 1443 // ----------- S t a t e ------------- |
1388 // -- r0 : actual number of arguments | 1444 // -- r3 : actual number of arguments |
1389 // -- r1 : function (passed through to callee) | 1445 // -- r4 : function (passed through to callee) |
1390 // -- r2 : expected number of arguments | 1446 // -- r5 : expected number of arguments |
1391 // ----------------------------------- | 1447 // ----------------------------------- |
1392 // Check the stack for overflow. We are not trying to catch | 1448 // Check the stack for overflow. We are not trying to catch |
1393 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1449 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1394 // limit" is checked. | 1450 // limit" is checked. |
1395 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 1451 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); |
1396 // Make r5 the space we have left. The stack might already be overflowed | 1452 // Make r8 the space we have left. The stack might already be overflowed |
1397 // here which will cause r5 to become negative. | 1453 // here which will cause r8 to become negative. |
1398 __ sub(r5, sp, r5); | 1454 __ sub(r8, sp, r8); |
1399 // Check if the arguments will overflow the stack. | 1455 // Check if the arguments will overflow the stack. |
1400 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2)); | 1456 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); |
1401 __ b(le, stack_overflow); // Signed comparison. | 1457 __ cmp(r8, r0); |
| 1458 __ ble(stack_overflow); // Signed comparison. |
1402 } | 1459 } |
1403 | 1460 |
1404 | 1461 |
1405 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1462 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
1406 __ SmiTag(r0); | 1463 __ SmiTag(r3); |
1407 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1464 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
1408 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | | 1465 __ mflr(r0); |
1409 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | | 1466 __ push(r0); |
1410 fp.bit() | lr.bit()); | 1467 #if V8_OOL_CONSTANT_POOL |
1411 __ add(fp, sp, | 1468 __ Push(fp, kConstantPoolRegister, r7, r4, r3); |
| 1469 #else |
| 1470 __ Push(fp, r7, r4, r3); |
| 1471 #endif |
| 1472 __ addi(fp, sp, |
1412 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); | 1473 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); |
1413 } | 1474 } |
1414 | 1475 |
1415 | 1476 |
1416 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1477 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
1417 // ----------- S t a t e ------------- | 1478 // ----------- S t a t e ------------- |
1418 // -- r0 : result being passed through | 1479 // -- r3 : result being passed through |
1419 // ----------------------------------- | 1480 // ----------------------------------- |
1420 // Get the number of arguments passed (as a smi), tear down the frame and | 1481 // Get the number of arguments passed (as a smi), tear down the frame and |
1421 // then tear down the parameters. | 1482 // then tear down the parameters. |
1422 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 1483 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
1423 kPointerSize))); | 1484 kPointerSize))); |
1424 | 1485 #if V8_OOL_CONSTANT_POOL |
1425 if (FLAG_enable_ool_constant_pool) { | 1486 __ addi(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); |
1426 __ add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); | 1487 __ LoadP(kConstantPoolRegister, MemOperand(sp)); |
1427 __ ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit()); | 1488 __ LoadP(fp, MemOperand(sp, kPointerSize)); |
1428 } else { | 1489 __ LoadP(r0, MemOperand(sp, 2 * kPointerSize)); |
1429 __ mov(sp, fp);; | 1490 int slots = 3; // adjust for kConstantPoolRegister + fp + lr below |
1430 __ ldm(ia_w, sp, fp.bit() | lr.bit()); | 1491 #else |
1431 } | 1492 __ mr(sp, fp); |
1432 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1)); | 1493 __ LoadP(fp, MemOperand(sp)); |
1433 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver | 1494 __ LoadP(r0, MemOperand(sp, kPointerSize)); |
| 1495 int slots = 2; // adjust for fp + lr below |
| 1496 #endif |
| 1497 __ mtlr(r0); |
| 1498 __ SmiToPtrArrayOffset(r0, r4); |
| 1499 __ add(sp, sp, r0); |
| 1500 __ addi(sp, sp, |
| 1501 Operand((1 + slots) * kPointerSize)); // adjust for receiver + others |
1434 } | 1502 } |
1435 | 1503 |
1436 | 1504 |
1437 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 1505 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
1438 // ----------- S t a t e ------------- | 1506 // ----------- S t a t e ------------- |
1439 // -- r0 : actual number of arguments | 1507 // -- r3 : actual number of arguments |
1440 // -- r1 : function (passed through to callee) | 1508 // -- r4 : function (passed through to callee) |
1441 // -- r2 : expected number of arguments | 1509 // -- r5 : expected number of arguments |
1442 // ----------------------------------- | 1510 // ----------------------------------- |
1443 | 1511 |
1444 Label stack_overflow; | 1512 Label stack_overflow; |
1445 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 1513 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
1446 Label invoke, dont_adapt_arguments; | 1514 Label invoke, dont_adapt_arguments; |
1447 | 1515 |
1448 Label enough, too_few; | 1516 Label enough, too_few; |
1449 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 1517 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); |
1450 __ cmp(r0, r2); | 1518 __ cmp(r3, r5); |
1451 __ b(lt, &too_few); | 1519 __ blt(&too_few); |
1452 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | 1520 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
1453 __ b(eq, &dont_adapt_arguments); | 1521 __ beq(&dont_adapt_arguments); |
1454 | 1522 |
1455 { // Enough parameters: actual >= expected | 1523 { // Enough parameters: actual >= expected |
1456 __ bind(&enough); | 1524 __ bind(&enough); |
1457 EnterArgumentsAdaptorFrame(masm); | 1525 EnterArgumentsAdaptorFrame(masm); |
1458 | 1526 |
1459 // Calculate copy start address into r0 and copy end address into r2. | 1527 // Calculate copy start address into r3 and copy end address into r5. |
1460 // r0: actual number of arguments as a smi | 1528 // r3: actual number of arguments as a smi |
1461 // r1: function | 1529 // r4: function |
1462 // r2: expected number of arguments | 1530 // r5: expected number of arguments |
1463 // r3: code entry to call | 1531 // r6: code entry to call |
1464 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); | 1532 __ SmiToPtrArrayOffset(r3, r3); |
| 1533 __ add(r3, r3, fp); |
1465 // adjust for return address and receiver | 1534 // adjust for return address and receiver |
1466 __ add(r0, r0, Operand(2 * kPointerSize)); | 1535 __ addi(r3, r3, Operand(2 * kPointerSize)); |
1467 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2)); | 1536 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2)); |
| 1537 __ sub(r5, r3, r5); |
1468 | 1538 |
1469 // Copy the arguments (including the receiver) to the new stack frame. | 1539 // Copy the arguments (including the receiver) to the new stack frame. |
1470 // r0: copy start address | 1540 // r3: copy start address |
1471 // r1: function | 1541 // r4: function |
1472 // r2: copy end address | 1542 // r5: copy end address |
1473 // r3: code entry to call | 1543 // r6: code entry to call |
1474 | 1544 |
1475 Label copy; | 1545 Label copy; |
1476 __ bind(©); | 1546 __ bind(©); |
1477 __ ldr(ip, MemOperand(r0, 0)); | 1547 __ LoadP(ip, MemOperand(r3, 0)); |
1478 __ push(ip); | 1548 __ push(ip); |
1479 __ cmp(r0, r2); // Compare before moving to next argument. | 1549 __ cmp(r3, r5); // Compare before moving to next argument. |
1480 __ sub(r0, r0, Operand(kPointerSize)); | 1550 __ subi(r3, r3, Operand(kPointerSize)); |
1481 __ b(ne, ©); | 1551 __ bne(©); |
1482 | 1552 |
1483 __ b(&invoke); | 1553 __ b(&invoke); |
1484 } | 1554 } |
1485 | 1555 |
1486 { // Too few parameters: Actual < expected | 1556 { // Too few parameters: Actual < expected |
1487 __ bind(&too_few); | 1557 __ bind(&too_few); |
1488 EnterArgumentsAdaptorFrame(masm); | 1558 EnterArgumentsAdaptorFrame(masm); |
1489 | 1559 |
1490 // Calculate copy start address into r0 and copy end address is fp. | 1560 // Calculate copy start address into r0 and copy end address is fp. |
1491 // r0: actual number of arguments as a smi | 1561 // r3: actual number of arguments as a smi |
1492 // r1: function | 1562 // r4: function |
1493 // r2: expected number of arguments | 1563 // r5: expected number of arguments |
1494 // r3: code entry to call | 1564 // r6: code entry to call |
1495 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); | 1565 __ SmiToPtrArrayOffset(r3, r3); |
| 1566 __ add(r3, r3, fp); |
1496 | 1567 |
1497 // Copy the arguments (including the receiver) to the new stack frame. | 1568 // Copy the arguments (including the receiver) to the new stack frame. |
1498 // r0: copy start address | 1569 // r3: copy start address |
1499 // r1: function | 1570 // r4: function |
1500 // r2: expected number of arguments | 1571 // r5: expected number of arguments |
1501 // r3: code entry to call | 1572 // r6: code entry to call |
1502 Label copy; | 1573 Label copy; |
1503 __ bind(©); | 1574 __ bind(©); |
1504 // Adjust load for return address and receiver. | 1575 // Adjust load for return address and receiver. |
1505 __ ldr(ip, MemOperand(r0, 2 * kPointerSize)); | 1576 __ LoadP(ip, MemOperand(r3, 2 * kPointerSize)); |
1506 __ push(ip); | 1577 __ push(ip); |
1507 __ cmp(r0, fp); // Compare before moving to next argument. | 1578 __ cmp(r3, fp); // Compare before moving to next argument. |
1508 __ sub(r0, r0, Operand(kPointerSize)); | 1579 __ subi(r3, r3, Operand(kPointerSize)); |
1509 __ b(ne, ©); | 1580 __ bne(©); |
1510 | 1581 |
1511 // Fill the remaining expected arguments with undefined. | 1582 // Fill the remaining expected arguments with undefined. |
1512 // r1: function | 1583 // r4: function |
1513 // r2: expected number of arguments | 1584 // r5: expected number of arguments |
1514 // r3: code entry to call | 1585 // r6: code entry to call |
1515 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1586 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
1516 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2)); | 1587 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2)); |
| 1588 __ sub(r5, fp, r5); |
1517 // Adjust for frame. | 1589 // Adjust for frame. |
1518 __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | 1590 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
1519 2 * kPointerSize)); | 1591 2 * kPointerSize)); |
1520 | 1592 |
1521 Label fill; | 1593 Label fill; |
1522 __ bind(&fill); | 1594 __ bind(&fill); |
1523 __ push(ip); | 1595 __ push(ip); |
1524 __ cmp(sp, r2); | 1596 __ cmp(sp, r5); |
1525 __ b(ne, &fill); | 1597 __ bne(&fill); |
1526 } | 1598 } |
1527 | 1599 |
1528 // Call the entry point. | 1600 // Call the entry point. |
1529 __ bind(&invoke); | 1601 __ bind(&invoke); |
1530 __ Call(r3); | 1602 __ Call(r6); |
1531 | 1603 |
1532 // Store offset of return address for deoptimizer. | 1604 // Store offset of return address for deoptimizer. |
1533 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | 1605 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); |
1534 | 1606 |
1535 // Exit frame and return. | 1607 // Exit frame and return. |
1536 LeaveArgumentsAdaptorFrame(masm); | 1608 LeaveArgumentsAdaptorFrame(masm); |
1537 __ Jump(lr); | 1609 __ blr(); |
1538 | 1610 |
1539 | 1611 |
1540 // ------------------------------------------- | 1612 // ------------------------------------------- |
1541 // Dont adapt arguments. | 1613 // Dont adapt arguments. |
1542 // ------------------------------------------- | 1614 // ------------------------------------------- |
1543 __ bind(&dont_adapt_arguments); | 1615 __ bind(&dont_adapt_arguments); |
1544 __ Jump(r3); | 1616 __ Jump(r6); |
1545 | 1617 |
1546 __ bind(&stack_overflow); | 1618 __ bind(&stack_overflow); |
1547 { | 1619 { |
1548 FrameScope frame(masm, StackFrame::MANUAL); | 1620 FrameScope frame(masm, StackFrame::MANUAL); |
1549 EnterArgumentsAdaptorFrame(masm); | 1621 EnterArgumentsAdaptorFrame(masm); |
1550 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | 1622 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
1551 __ bkpt(0); | 1623 __ bkpt(0); |
1552 } | 1624 } |
1553 } | 1625 } |
1554 | 1626 |
1555 | 1627 |
1556 #undef __ | 1628 #undef __ |
1557 | 1629 |
1558 } } // namespace v8::internal | 1630 } } // namespace v8::internal |
1559 | 1631 |
1560 #endif // V8_TARGET_ARCH_ARM | 1632 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |