OLD | NEW |
| (Empty) |
1 // Copyright 2014 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #if V8_TARGET_ARCH_PPC | |
6 | |
7 #include "src/codegen.h" | |
8 #include "src/debug/debug.h" | |
9 #include "src/deoptimizer.h" | |
10 #include "src/full-codegen/full-codegen.h" | |
11 #include "src/runtime/runtime.h" | |
12 | |
13 namespace v8 { | |
14 namespace internal { | |
15 | |
16 | |
17 #define __ ACCESS_MASM(masm) | |
18 | |
19 | |
20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | |
21 ExitFrameType exit_frame_type) { | |
22 // ----------- S t a t e ------------- | |
23 // -- r3 : number of arguments excluding receiver | |
24 // -- r4 : target | |
25 // -- r6 : new.target | |
26 // -- sp[0] : last argument | |
27 // -- ... | |
28 // -- sp[4 * (argc - 1)] : first argument | |
29 // -- sp[4 * argc] : receiver | |
30 // ----------------------------------- | |
31 __ AssertFunction(r4); | |
32 | |
33 // Make sure we operate in the context of the called function (for example | |
34 // ConstructStubs implemented in C++ will be run in the context of the caller | |
35 // instead of the callee, due to the way that [[Construct]] is defined for | |
36 // ordinary functions). | |
37 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | |
38 | |
39 // JumpToExternalReference expects r3 to contain the number of arguments | |
40 // including the receiver and the extra arguments. | |
41 const int num_extra_args = 3; | |
42 __ addi(r3, r3, Operand(num_extra_args + 1)); | |
43 | |
44 // Insert extra arguments. | |
45 __ SmiTag(r3); | |
46 __ Push(r3, r4, r6); | |
47 __ SmiUntag(r3); | |
48 | |
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), | |
50 exit_frame_type == BUILTIN_EXIT); | |
51 } | |
52 | |
53 | |
54 // Load the built-in InternalArray function from the current context. | |
55 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
56 Register result) { | |
57 // Load the InternalArray function from the current native context. | |
58 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | |
59 } | |
60 | |
61 | |
62 // Load the built-in Array function from the current context. | |
63 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
64 // Load the Array function from the current native context. | |
65 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | |
66 } | |
67 | |
68 | |
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
70 // ----------- S t a t e ------------- | |
71 // -- r3 : number of arguments | |
72 // -- lr : return address | |
73 // -- sp[...]: constructor arguments | |
74 // ----------------------------------- | |
75 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
76 | |
77 // Get the InternalArray function. | |
78 GenerateLoadInternalArrayFunction(masm, r4); | |
79 | |
80 if (FLAG_debug_code) { | |
81 // Initial map for the builtin InternalArray functions should be maps. | |
82 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | |
83 __ TestIfSmi(r5, r0); | |
84 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); | |
85 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | |
86 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | |
87 } | |
88 | |
89 // Run the native code for the InternalArray function called as a normal | |
90 // function. | |
91 // tail call a stub | |
92 InternalArrayConstructorStub stub(masm->isolate()); | |
93 __ TailCallStub(&stub); | |
94 } | |
95 | |
96 | |
97 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
98 // ----------- S t a t e ------------- | |
99 // -- r3 : number of arguments | |
100 // -- lr : return address | |
101 // -- sp[...]: constructor arguments | |
102 // ----------------------------------- | |
103 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
104 | |
105 // Get the Array function. | |
106 GenerateLoadArrayFunction(masm, r4); | |
107 | |
108 if (FLAG_debug_code) { | |
109 // Initial map for the builtin Array functions should be maps. | |
110 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | |
111 __ TestIfSmi(r5, r0); | |
112 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); | |
113 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | |
114 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | |
115 } | |
116 | |
117 __ mr(r6, r4); | |
118 // Run the native code for the Array function called as a normal function. | |
119 // tail call a stub | |
120 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
121 ArrayConstructorStub stub(masm->isolate()); | |
122 __ TailCallStub(&stub); | |
123 } | |
124 | |
125 | |
126 // static | |
127 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | |
128 // ----------- S t a t e ------------- | |
129 // -- r3 : number of arguments | |
130 // -- r4 : function | |
131 // -- cp : context | |
132 // -- lr : return address | |
133 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
134 // -- sp[argc * 4] : receiver | |
135 // ----------------------------------- | |
136 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt; | |
137 Heap::RootListIndex const root_index = | |
138 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | |
139 : Heap::kMinusInfinityValueRootIndex; | |
140 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1; | |
141 | |
142 // Load the accumulator with the default return value (either -Infinity or | |
143 // +Infinity), with the tagged value in r8 and the double value in d1. | |
144 __ LoadRoot(r8, root_index); | |
145 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); | |
146 | |
147 // Setup state for loop | |
148 // r5: address of arg[0] + kPointerSize | |
149 // r6: number of slots to drop at exit (arguments + receiver) | |
150 __ addi(r7, r3, Operand(1)); | |
151 | |
152 Label done_loop, loop; | |
153 __ mr(r7, r3); | |
154 __ bind(&loop); | |
155 { | |
156 // Check if all parameters done. | |
157 __ subi(r7, r7, Operand(1)); | |
158 __ cmpi(r7, Operand::Zero()); | |
159 __ blt(&done_loop); | |
160 | |
161 // Load the next parameter tagged value into r5. | |
162 __ ShiftLeftImm(r5, r7, Operand(kPointerSizeLog2)); | |
163 __ LoadPX(r5, MemOperand(sp, r5)); | |
164 | |
165 // Load the double value of the parameter into d2, maybe converting the | |
166 // parameter to a number first using the ToNumber builtin if necessary. | |
167 Label convert, convert_smi, convert_number, done_convert; | |
168 __ bind(&convert); | |
169 __ JumpIfSmi(r5, &convert_smi); | |
170 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset)); | |
171 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number); | |
172 { | |
173 // Parameter is not a Number, use the ToNumber builtin to convert it. | |
174 FrameScope scope(masm, StackFrame::MANUAL); | |
175 __ SmiTag(r3); | |
176 __ SmiTag(r7); | |
177 __ EnterBuiltinFrame(cp, r4, r3); | |
178 __ Push(r7, r8); | |
179 __ mr(r3, r5); | |
180 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
181 __ mr(r5, r3); | |
182 __ Pop(r7, r8); | |
183 __ LeaveBuiltinFrame(cp, r4, r3); | |
184 __ SmiUntag(r7); | |
185 __ SmiUntag(r3); | |
186 { | |
187 // Restore the double accumulator value (d1). | |
188 Label done_restore; | |
189 __ SmiToDouble(d1, r8); | |
190 __ JumpIfSmi(r8, &done_restore); | |
191 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); | |
192 __ bind(&done_restore); | |
193 } | |
194 } | |
195 __ b(&convert); | |
196 __ bind(&convert_number); | |
197 __ lfd(d2, FieldMemOperand(r5, HeapNumber::kValueOffset)); | |
198 __ b(&done_convert); | |
199 __ bind(&convert_smi); | |
200 __ SmiToDouble(d2, r5); | |
201 __ bind(&done_convert); | |
202 | |
203 // Perform the actual comparison with the accumulator value on the left hand | |
204 // side (d1) and the next parameter value on the right hand side (d2). | |
205 Label compare_nan, compare_swap; | |
206 __ fcmpu(d1, d2); | |
207 __ bunordered(&compare_nan); | |
208 __ b(cond_done, &loop); | |
209 __ b(CommuteCondition(cond_done), &compare_swap); | |
210 | |
211 // Left and right hand side are equal, check for -0 vs. +0. | |
212 __ TestDoubleIsMinusZero(reg, r9, r0); | |
213 __ bne(&loop); | |
214 | |
215 // Update accumulator. Result is on the right hand side. | |
216 __ bind(&compare_swap); | |
217 __ fmr(d1, d2); | |
218 __ mr(r8, r5); | |
219 __ b(&loop); | |
220 | |
221 // At least one side is NaN, which means that the result will be NaN too. | |
222 // We still need to visit the rest of the arguments. | |
223 __ bind(&compare_nan); | |
224 __ LoadRoot(r8, Heap::kNanValueRootIndex); | |
225 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); | |
226 __ b(&loop); | |
227 } | |
228 | |
229 __ bind(&done_loop); | |
230 // Drop all slots, including the receiver. | |
231 __ addi(r3, r3, Operand(1)); | |
232 __ Drop(r3); | |
233 __ mr(r3, r8); | |
234 __ Ret(); | |
235 } | |
236 | |
237 // static | |
238 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | |
239 // ----------- S t a t e ------------- | |
240 // -- r3 : number of arguments | |
241 // -- r4 : constructor function | |
242 // -- cp : context | |
243 // -- lr : return address | |
244 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
245 // -- sp[argc * 4] : receiver | |
246 // ----------------------------------- | |
247 | |
248 // 1. Load the first argument into r3. | |
249 Label no_arguments; | |
250 { | |
251 __ mr(r5, r3); // Store argc in r5. | |
252 __ cmpi(r3, Operand::Zero()); | |
253 __ beq(&no_arguments); | |
254 __ subi(r3, r3, Operand(1)); | |
255 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); | |
256 __ LoadPX(r3, MemOperand(sp, r3)); | |
257 } | |
258 | |
259 // 2a. Convert the first argument to a number. | |
260 { | |
261 FrameScope scope(masm, StackFrame::MANUAL); | |
262 __ SmiTag(r5); | |
263 __ EnterBuiltinFrame(cp, r4, r5); | |
264 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
265 __ LeaveBuiltinFrame(cp, r4, r5); | |
266 __ SmiUntag(r5); | |
267 } | |
268 | |
269 { | |
270 // Drop all arguments including the receiver. | |
271 __ Drop(r5); | |
272 __ Ret(1); | |
273 } | |
274 | |
275 // 2b. No arguments, return +0. | |
276 __ bind(&no_arguments); | |
277 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | |
278 __ Ret(1); | |
279 } | |
280 | |
281 | |
282 // static | |
283 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | |
284 // ----------- S t a t e ------------- | |
285 // -- r3 : number of arguments | |
286 // -- r4 : constructor function | |
287 // -- r6 : new target | |
288 // -- cp : context | |
289 // -- lr : return address | |
290 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
291 // -- sp[argc * 4] : receiver | |
292 // ----------------------------------- | |
293 | |
294 // 1. Make sure we operate in the context of the called function. | |
295 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | |
296 | |
297 // 2. Load the first argument into r5. | |
298 { | |
299 Label no_arguments, done; | |
300 __ mr(r9, r3); // Store argc in r9. | |
301 __ cmpi(r3, Operand::Zero()); | |
302 __ beq(&no_arguments); | |
303 __ subi(r3, r3, Operand(1)); | |
304 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | |
305 __ LoadPX(r5, MemOperand(sp, r5)); | |
306 __ b(&done); | |
307 __ bind(&no_arguments); | |
308 __ LoadSmiLiteral(r5, Smi::FromInt(0)); | |
309 __ bind(&done); | |
310 } | |
311 | |
312 // 3. Make sure r5 is a number. | |
313 { | |
314 Label done_convert; | |
315 __ JumpIfSmi(r5, &done_convert); | |
316 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE); | |
317 __ beq(&done_convert); | |
318 { | |
319 FrameScope scope(masm, StackFrame::MANUAL); | |
320 __ SmiTag(r9); | |
321 __ EnterBuiltinFrame(cp, r4, r9); | |
322 __ Push(r6); | |
323 __ mr(r3, r5); | |
324 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
325 __ mr(r5, r3); | |
326 __ Pop(r6); | |
327 __ LeaveBuiltinFrame(cp, r4, r9); | |
328 __ SmiUntag(r9); | |
329 } | |
330 __ bind(&done_convert); | |
331 } | |
332 | |
333 // 4. Check if new target and constructor differ. | |
334 Label drop_frame_and_ret, new_object; | |
335 __ cmp(r4, r6); | |
336 __ bne(&new_object); | |
337 | |
338 // 5. Allocate a JSValue wrapper for the number. | |
339 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); | |
340 __ b(&drop_frame_and_ret); | |
341 | |
342 // 6. Fallback to the runtime to create new object. | |
343 __ bind(&new_object); | |
344 { | |
345 FrameScope scope(masm, StackFrame::MANUAL); | |
346 __ SmiTag(r9); | |
347 __ EnterBuiltinFrame(cp, r4, r9); | |
348 __ Push(r5); // first argument | |
349 FastNewObjectStub stub(masm->isolate()); | |
350 __ CallStub(&stub); | |
351 __ Pop(r5); | |
352 __ LeaveBuiltinFrame(cp, r4, r9); | |
353 __ SmiUntag(r9); | |
354 } | |
355 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | |
356 | |
357 __ bind(&drop_frame_and_ret); | |
358 { | |
359 __ Drop(r9); | |
360 __ Ret(1); | |
361 } | |
362 } | |
363 | |
364 | |
365 // static | |
366 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | |
367 // ----------- S t a t e ------------- | |
368 // -- r3 : number of arguments | |
369 // -- r4 : constructor function | |
370 // -- cp : context | |
371 // -- lr : return address | |
372 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
373 // -- sp[argc * 4] : receiver | |
374 // ----------------------------------- | |
375 | |
376 // 1. Load the first argument into r3. | |
377 Label no_arguments; | |
378 { | |
379 __ mr(r5, r3); // Store argc in r5. | |
380 __ cmpi(r3, Operand::Zero()); | |
381 __ beq(&no_arguments); | |
382 __ subi(r3, r3, Operand(1)); | |
383 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); | |
384 __ LoadPX(r3, MemOperand(sp, r3)); | |
385 } | |
386 | |
387 // 2a. At least one argument, return r3 if it's a string, otherwise | |
388 // dispatch to appropriate conversion. | |
389 Label drop_frame_and_ret, to_string, symbol_descriptive_string; | |
390 { | |
391 __ JumpIfSmi(r3, &to_string); | |
392 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | |
393 __ CompareObjectType(r3, r6, r6, FIRST_NONSTRING_TYPE); | |
394 __ bgt(&to_string); | |
395 __ beq(&symbol_descriptive_string); | |
396 __ b(&drop_frame_and_ret); | |
397 } | |
398 | |
399 // 2b. No arguments, return the empty string (and pop the receiver). | |
400 __ bind(&no_arguments); | |
401 { | |
402 __ LoadRoot(r3, Heap::kempty_stringRootIndex); | |
403 __ Ret(1); | |
404 } | |
405 | |
406 // 3a. Convert r3 to a string. | |
407 __ bind(&to_string); | |
408 { | |
409 FrameScope scope(masm, StackFrame::MANUAL); | |
410 ToStringStub stub(masm->isolate()); | |
411 __ SmiTag(r5); | |
412 __ EnterBuiltinFrame(cp, r4, r5); | |
413 __ CallStub(&stub); | |
414 __ LeaveBuiltinFrame(cp, r4, r5); | |
415 __ SmiUntag(r5); | |
416 } | |
417 __ b(&drop_frame_and_ret); | |
418 | |
419 // 3b. Convert symbol in r3 to a string. | |
420 __ bind(&symbol_descriptive_string); | |
421 { | |
422 __ Drop(r5); | |
423 __ Drop(1); | |
424 __ Push(r3); | |
425 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | |
426 } | |
427 | |
428 __ bind(&drop_frame_and_ret); | |
429 { | |
430 __ Drop(r5); | |
431 __ Ret(1); | |
432 } | |
433 } | |
434 | |
435 // static | |
436 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | |
437 // ----------- S t a t e ------------- | |
438 // -- r3 : number of arguments | |
439 // -- r4 : constructor function | |
440 // -- r6 : new target | |
441 // -- cp : context | |
442 // -- lr : return address | |
443 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
444 // -- sp[argc * 4] : receiver | |
445 // ----------------------------------- | |
446 | |
447 // 1. Make sure we operate in the context of the called function. | |
448 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | |
449 | |
450 // 2. Load the first argument into r5. | |
451 { | |
452 Label no_arguments, done; | |
453 __ mr(r9, r3); // Store argc in r9. | |
454 __ cmpi(r3, Operand::Zero()); | |
455 __ beq(&no_arguments); | |
456 __ subi(r3, r3, Operand(1)); | |
457 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | |
458 __ LoadPX(r5, MemOperand(sp, r5)); | |
459 __ b(&done); | |
460 __ bind(&no_arguments); | |
461 __ LoadRoot(r5, Heap::kempty_stringRootIndex); | |
462 __ bind(&done); | |
463 } | |
464 | |
465 // 3. Make sure r5 is a string. | |
466 { | |
467 Label convert, done_convert; | |
468 __ JumpIfSmi(r5, &convert); | |
469 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE); | |
470 __ blt(&done_convert); | |
471 __ bind(&convert); | |
472 { | |
473 FrameScope scope(masm, StackFrame::MANUAL); | |
474 ToStringStub stub(masm->isolate()); | |
475 __ SmiTag(r9); | |
476 __ EnterBuiltinFrame(cp, r4, r9); | |
477 __ Push(r6); | |
478 __ mr(r3, r5); | |
479 __ CallStub(&stub); | |
480 __ mr(r5, r3); | |
481 __ Pop(r6); | |
482 __ LeaveBuiltinFrame(cp, r4, r9); | |
483 __ SmiUntag(r9); | |
484 } | |
485 __ bind(&done_convert); | |
486 } | |
487 | |
488 // 4. Check if new target and constructor differ. | |
489 Label drop_frame_and_ret, new_object; | |
490 __ cmp(r4, r6); | |
491 __ bne(&new_object); | |
492 | |
493 // 5. Allocate a JSValue wrapper for the string. | |
494 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); | |
495 __ b(&drop_frame_and_ret); | |
496 | |
497 // 6. Fallback to the runtime to create new object. | |
498 __ bind(&new_object); | |
499 { | |
500 FrameScope scope(masm, StackFrame::MANUAL); | |
501 __ SmiTag(r9); | |
502 __ EnterBuiltinFrame(cp, r4, r9); | |
503 __ Push(r5); // first argument | |
504 FastNewObjectStub stub(masm->isolate()); | |
505 __ CallStub(&stub); | |
506 __ Pop(r5); | |
507 __ LeaveBuiltinFrame(cp, r4, r9); | |
508 __ SmiUntag(r9); | |
509 } | |
510 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | |
511 | |
512 __ bind(&drop_frame_and_ret); | |
513 { | |
514 __ Drop(r9); | |
515 __ Ret(1); | |
516 } | |
517 } | |
518 | |
519 | |
520 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
521 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
522 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); | |
523 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
524 __ JumpToJSEntry(ip); | |
525 } | |
526 | |
527 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | |
528 Runtime::FunctionId function_id) { | |
529 // ----------- S t a t e ------------- | |
530 // -- r3 : argument count (preserved for callee) | |
531 // -- r4 : target function (preserved for callee) | |
532 // -- r6 : new target (preserved for callee) | |
533 // ----------------------------------- | |
534 { | |
535 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
536 // Push the number of arguments to the callee. | |
537 // Push a copy of the target function and the new target. | |
538 // Push function as parameter to the runtime call. | |
539 __ SmiTag(r3); | |
540 __ Push(r3, r4, r6, r4); | |
541 | |
542 __ CallRuntime(function_id, 1); | |
543 __ mr(r5, r3); | |
544 | |
545 // Restore target function and new target. | |
546 __ Pop(r3, r4, r6); | |
547 __ SmiUntag(r3); | |
548 } | |
549 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
550 __ JumpToJSEntry(ip); | |
551 } | |
552 | |
553 | |
554 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
555 // Checking whether the queued function is ready for install is optional, | |
556 // since we come across interrupts and stack checks elsewhere. However, | |
557 // not checking may delay installing ready functions, and always checking | |
558 // would be quite expensive. A good compromise is to first check against | |
559 // stack limit as a cue for an interrupt signal. | |
560 Label ok; | |
561 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | |
562 __ cmpl(sp, ip); | |
563 __ bge(&ok); | |
564 | |
565 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | |
566 | |
567 __ bind(&ok); | |
568 GenerateTailCallToSharedCode(masm); | |
569 } | |
570 | |
571 | |
572 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
573 bool is_api_function, | |
574 bool create_implicit_receiver, | |
575 bool check_derived_construct) { | |
576 // ----------- S t a t e ------------- | |
577 // -- r3 : number of arguments | |
578 // -- r4 : constructor function | |
579 // -- r5 : allocation site or undefined | |
580 // -- r6 : new target | |
581 // -- cp : context | |
582 // -- lr : return address | |
583 // -- sp[...]: constructor arguments | |
584 // ----------------------------------- | |
585 | |
586 Isolate* isolate = masm->isolate(); | |
587 | |
588 // Enter a construct frame. | |
589 { | |
590 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); | |
591 | |
592 // Preserve the incoming parameters on the stack. | |
593 __ AssertUndefinedOrAllocationSite(r5, r7); | |
594 | |
595 if (!create_implicit_receiver) { | |
596 __ SmiTag(r7, r3, SetRC); | |
597 __ Push(cp, r5, r7); | |
598 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
599 } else { | |
600 __ SmiTag(r3); | |
601 __ Push(cp, r5, r3); | |
602 | |
603 // Allocate the new receiver object. | |
604 __ Push(r4, r6); | |
605 FastNewObjectStub stub(masm->isolate()); | |
606 __ CallStub(&stub); | |
607 __ mr(r7, r3); | |
608 __ Pop(r4, r6); | |
609 | |
610 // ----------- S t a t e ------------- | |
611 // -- r4: constructor function | |
612 // -- r6: new target | |
613 // -- r7: newly allocated object | |
614 // ----------------------------------- | |
615 | |
616 // Retrieve smi-tagged arguments count from the stack. | |
617 __ LoadP(r3, MemOperand(sp)); | |
618 __ SmiUntag(r3, SetRC); | |
619 | |
620 // Push the allocated receiver to the stack. We need two copies | |
621 // because we may have to return the original one and the calling | |
622 // conventions dictate that the called function pops the receiver. | |
623 __ Push(r7, r7); | |
624 } | |
625 | |
626 // Set up pointer to last argument. | |
627 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | |
628 | |
629 // Copy arguments and receiver to the expression stack. | |
630 // r3: number of arguments | |
631 // r4: constructor function | |
632 // r5: address of last argument (caller sp) | |
633 // r6: new target | |
634 // cr0: condition indicating whether r3 is zero | |
635 // sp[0]: receiver | |
636 // sp[1]: receiver | |
637 // sp[2]: number of arguments (smi-tagged) | |
638 Label loop, no_args; | |
639 __ beq(&no_args, cr0); | |
640 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); | |
641 __ sub(sp, sp, ip); | |
642 __ mtctr(r3); | |
643 __ bind(&loop); | |
644 __ subi(ip, ip, Operand(kPointerSize)); | |
645 __ LoadPX(r0, MemOperand(r5, ip)); | |
646 __ StorePX(r0, MemOperand(sp, ip)); | |
647 __ bdnz(&loop); | |
648 __ bind(&no_args); | |
649 | |
650 // Call the function. | |
651 // r3: number of arguments | |
652 // r4: constructor function | |
653 // r6: new target | |
654 | |
655 ParameterCount actual(r3); | |
656 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION, | |
657 CheckDebugStepCallWrapper()); | |
658 | |
659 // Store offset of return address for deoptimizer. | |
660 if (create_implicit_receiver && !is_api_function) { | |
661 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
662 } | |
663 | |
664 // Restore context from the frame. | |
665 // r3: result | |
666 // sp[0]: receiver | |
667 // sp[1]: number of arguments (smi-tagged) | |
668 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); | |
669 | |
670 if (create_implicit_receiver) { | |
671 // If the result is an object (in the ECMA sense), we should get rid | |
672 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
673 // on page 74. | |
674 Label use_receiver, exit; | |
675 | |
676 // If the result is a smi, it is *not* an object in the ECMA sense. | |
677 // r3: result | |
678 // sp[0]: receiver | |
679 // sp[1]: number of arguments (smi-tagged) | |
680 __ JumpIfSmi(r3, &use_receiver); | |
681 | |
682 // If the type of the result (stored in its map) is less than | |
683 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | |
684 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE); | |
685 __ bge(&exit); | |
686 | |
687 // Throw away the result of the constructor invocation and use the | |
688 // on-stack receiver as the result. | |
689 __ bind(&use_receiver); | |
690 __ LoadP(r3, MemOperand(sp)); | |
691 | |
692 // Remove receiver from the stack, remove caller arguments, and | |
693 // return. | |
694 __ bind(&exit); | |
695 // r3: result | |
696 // sp[0]: receiver (newly allocated object) | |
697 // sp[1]: number of arguments (smi-tagged) | |
698 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); | |
699 } else { | |
700 __ LoadP(r4, MemOperand(sp)); | |
701 } | |
702 | |
703 // Leave construct frame. | |
704 } | |
705 | |
706 // ES6 9.2.2. Step 13+ | |
707 // Check that the result is not a Smi, indicating that the constructor result | |
708 // from a derived class is neither undefined nor an Object. | |
709 if (check_derived_construct) { | |
710 Label dont_throw; | |
711 __ JumpIfNotSmi(r3, &dont_throw); | |
712 { | |
713 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
714 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | |
715 } | |
716 __ bind(&dont_throw); | |
717 } | |
718 | |
719 __ SmiToPtrArrayOffset(r4, r4); | |
720 __ add(sp, sp, r4); | |
721 __ addi(sp, sp, Operand(kPointerSize)); | |
722 if (create_implicit_receiver) { | |
723 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); | |
724 } | |
725 __ blr(); | |
726 } | |
727 | |
728 | |
729 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
730 Generate_JSConstructStubHelper(masm, false, true, false); | |
731 } | |
732 | |
733 | |
734 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
735 Generate_JSConstructStubHelper(masm, true, false, false); | |
736 } | |
737 | |
738 | |
739 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | |
740 Generate_JSConstructStubHelper(masm, false, false, false); | |
741 } | |
742 | |
743 | |
744 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | |
745 MacroAssembler* masm) { | |
746 Generate_JSConstructStubHelper(masm, false, false, true); | |
747 } | |
748 | |
749 // static | |
750 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | |
751 // ----------- S t a t e ------------- | |
752 // -- r3 : the value to pass to the generator | |
753 // -- r4 : the JSGeneratorObject to resume | |
754 // -- r5 : the resume mode (tagged) | |
755 // -- lr : return address | |
756 // ----------------------------------- | |
757 __ AssertGeneratorObject(r4); | |
758 | |
759 // Store input value into generator object. | |
760 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset), | |
761 r0); | |
762 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6, | |
763 kLRHasNotBeenSaved, kDontSaveFPRegs); | |
764 | |
765 // Store resume mode into generator object. | |
766 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0); | |
767 | |
768 // Load suspended function and context. | |
769 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset)); | |
770 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); | |
771 | |
772 // Flood function if we are stepping. | |
773 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; | |
774 Label stepping_prepared; | |
775 ExternalReference last_step_action = | |
776 ExternalReference::debug_last_step_action_address(masm->isolate()); | |
777 STATIC_ASSERT(StepFrame > StepIn); | |
778 __ mov(ip, Operand(last_step_action)); | |
779 __ LoadByte(ip, MemOperand(ip), r0); | |
780 __ extsb(ip, ip); | |
781 __ cmpi(ip, Operand(StepIn)); | |
782 __ bge(&prepare_step_in_if_stepping); | |
783 | |
784 // Flood function if we need to continue stepping in the suspended generator. | |
785 | |
786 ExternalReference debug_suspended_generator = | |
787 ExternalReference::debug_suspended_generator_address(masm->isolate()); | |
788 | |
789 __ mov(ip, Operand(debug_suspended_generator)); | |
790 __ LoadP(ip, MemOperand(ip)); | |
791 __ cmp(ip, r4); | |
792 __ beq(&prepare_step_in_suspended_generator); | |
793 __ bind(&stepping_prepared); | |
794 | |
795 // Push receiver. | |
796 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset)); | |
797 __ Push(ip); | |
798 | |
799 // ----------- S t a t e ------------- | |
800 // -- r4 : the JSGeneratorObject to resume | |
801 // -- r5 : the resume mode (tagged) | |
802 // -- r7 : generator function | |
803 // -- cp : generator context | |
804 // -- lr : return address | |
805 // -- sp[0] : generator receiver | |
806 // ----------------------------------- | |
807 | |
808 // Push holes for arguments to generator function. Since the parser forced | |
809 // context allocation for any variables in generators, the actual argument | |
810 // values have already been copied into the context and these dummy values | |
811 // will never be used. | |
812 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset)); | |
813 __ LoadWordArith( | |
814 r3, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset)); | |
815 { | |
816 Label loop, done_loop; | |
817 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
818 #if V8_TARGET_ARCH_PPC64 | |
819 __ cmpi(r3, Operand::Zero()); | |
820 __ beq(&done_loop); | |
821 #else | |
822 __ SmiUntag(r3, SetRC); | |
823 __ beq(&done_loop, cr0); | |
824 #endif | |
825 __ mtctr(r3); | |
826 __ bind(&loop); | |
827 __ push(ip); | |
828 __ bdnz(&loop); | |
829 __ bind(&done_loop); | |
830 } | |
831 | |
832 // Dispatch on the kind of generator object. | |
833 Label old_generator; | |
834 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); | |
835 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE); | |
836 __ bne(&old_generator); | |
837 | |
838 // New-style (ignition/turbofan) generator object | |
839 { | |
840 // We abuse new.target both to indicate that this is a resume call and to | |
841 // pass in the generator object. In ordinary calls, new.target is always | |
842 // undefined because generator functions are non-constructable. | |
843 __ mr(r6, r4); | |
844 __ mr(r4, r7); | |
845 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); | |
846 __ JumpToJSEntry(ip); | |
847 } | |
848 | |
849 // Old-style (full-codegen) generator object | |
850 __ bind(&old_generator); | |
851 { | |
852 // Enter a new JavaScript frame, and initialize its slots as they were when | |
853 // the generator was suspended. | |
854 FrameScope scope(masm, StackFrame::MANUAL); | |
855 __ PushStandardFrame(r7); | |
856 | |
857 // Restore the operand stack. | |
858 __ LoadP(r3, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset)); | |
859 __ LoadP(r6, FieldMemOperand(r3, FixedArray::kLengthOffset)); | |
860 __ addi(r3, r3, | |
861 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | |
862 { | |
863 Label loop, done_loop; | |
864 __ SmiUntag(r6, SetRC); | |
865 __ beq(&done_loop, cr0); | |
866 __ mtctr(r6); | |
867 __ bind(&loop); | |
868 __ LoadPU(ip, MemOperand(r3, kPointerSize)); | |
869 __ Push(ip); | |
870 __ bdnz(&loop); | |
871 __ bind(&done_loop); | |
872 } | |
873 | |
874 // Reset operand stack so we don't leak. | |
875 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex); | |
876 __ StoreP(ip, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset), | |
877 r0); | |
878 | |
879 // Resume the generator function at the continuation. | |
880 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset)); | |
881 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset)); | |
882 __ addi(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
883 { | |
884 ConstantPoolUnavailableScope constant_pool_unavailable(masm); | |
885 if (FLAG_enable_embedded_constant_pool) { | |
886 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r6); | |
887 } | |
888 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset)); | |
889 __ SmiUntag(r5); | |
890 __ add(r6, r6, r5); | |
891 __ LoadSmiLiteral(r5, | |
892 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
893 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset), | |
894 r0); | |
895 __ mr(r3, r4); // Continuation expects generator object in r3. | |
896 __ Jump(r6); | |
897 } | |
898 } | |
899 | |
900 __ bind(&prepare_step_in_if_stepping); | |
901 { | |
902 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
903 __ Push(r4, r5, r7); | |
904 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); | |
905 __ Pop(r4, r5); | |
906 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); | |
907 } | |
908 __ b(&stepping_prepared); | |
909 | |
910 __ bind(&prepare_step_in_suspended_generator); | |
911 { | |
912 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
913 __ Push(r4, r5); | |
914 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); | |
915 __ Pop(r4, r5); | |
916 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); | |
917 } | |
918 __ b(&stepping_prepared); | |
919 } | |
920 | |
921 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | |
922 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
923 __ push(r4); | |
924 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | |
925 } | |
926 | |
927 | |
928 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | |
929 | |
930 | |
931 // Clobbers r5; preserves all other registers. | |
932 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | |
933 IsTagged argc_is_tagged) { | |
934 // Check the stack for overflow. We are not trying to catch | |
935 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
936 // limit" is checked. | |
937 Label okay; | |
938 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | |
939 // Make r5 the space we have left. The stack might already be overflowed | |
940 // here which will cause r5 to become negative. | |
941 __ sub(r5, sp, r5); | |
942 // Check if the arguments will overflow the stack. | |
943 if (argc_is_tagged == kArgcIsSmiTagged) { | |
944 __ SmiToPtrArrayOffset(r0, argc); | |
945 } else { | |
946 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | |
947 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); | |
948 } | |
949 __ cmp(r5, r0); | |
950 __ bgt(&okay); // Signed comparison. | |
951 | |
952 // Out of stack space. | |
953 __ CallRuntime(Runtime::kThrowStackOverflow); | |
954 | |
955 __ bind(&okay); | |
956 } | |
957 | |
958 | |
959 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
960 bool is_construct) { | |
961 // Called from Generate_JS_Entry | |
962 // r3: new.target | |
963 // r4: function | |
964 // r5: receiver | |
965 // r6: argc | |
966 // r7: argv | |
967 // r0,r8-r9, cp may be clobbered | |
968 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
969 | |
970 // Enter an internal frame. | |
971 { | |
972 FrameScope scope(masm, StackFrame::INTERNAL); | |
973 | |
974 // Setup the context (we need to use the caller context from the isolate). | |
975 ExternalReference context_address(Isolate::kContextAddress, | |
976 masm->isolate()); | |
977 __ mov(cp, Operand(context_address)); | |
978 __ LoadP(cp, MemOperand(cp)); | |
979 | |
980 __ InitializeRootRegister(); | |
981 | |
982 // Push the function and the receiver onto the stack. | |
983 __ Push(r4, r5); | |
984 | |
985 // Check if we have enough stack space to push all arguments. | |
986 // Clobbers r5. | |
987 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt); | |
988 | |
989 // Copy arguments to the stack in a loop. | |
990 // r4: function | |
991 // r6: argc | |
992 // r7: argv, i.e. points to first arg | |
993 Label loop, entry; | |
994 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); | |
995 __ add(r5, r7, r0); | |
996 // r5 points past last arg. | |
997 __ b(&entry); | |
998 __ bind(&loop); | |
999 __ LoadP(r8, MemOperand(r7)); // read next parameter | |
1000 __ addi(r7, r7, Operand(kPointerSize)); | |
1001 __ LoadP(r0, MemOperand(r8)); // dereference handle | |
1002 __ push(r0); // push parameter | |
1003 __ bind(&entry); | |
1004 __ cmp(r7, r5); | |
1005 __ bne(&loop); | |
1006 | |
1007 // Setup new.target and argc. | |
1008 __ mr(r7, r3); | |
1009 __ mr(r3, r6); | |
1010 __ mr(r6, r7); | |
1011 | |
1012 // Initialize all JavaScript callee-saved registers, since they will be seen | |
1013 // by the garbage collector as part of handlers. | |
1014 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); | |
1015 __ mr(r14, r7); | |
1016 __ mr(r15, r7); | |
1017 __ mr(r16, r7); | |
1018 __ mr(r17, r7); | |
1019 | |
1020 // Invoke the code. | |
1021 Handle<Code> builtin = is_construct | |
1022 ? masm->isolate()->builtins()->Construct() | |
1023 : masm->isolate()->builtins()->Call(); | |
1024 __ Call(builtin, RelocInfo::CODE_TARGET); | |
1025 | |
1026 // Exit the JS frame and remove the parameters (except function), and | |
1027 // return. | |
1028 } | |
1029 __ blr(); | |
1030 | |
1031 // r3: result | |
1032 } | |
1033 | |
1034 | |
1035 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
1036 Generate_JSEntryTrampolineHelper(masm, false); | |
1037 } | |
1038 | |
1039 | |
1040 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
1041 Generate_JSEntryTrampolineHelper(masm, true); | |
1042 } | |
1043 | |
1044 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { | |
1045 Register args_count = scratch; | |
1046 | |
1047 // Get the arguments + receiver count. | |
1048 __ LoadP(args_count, | |
1049 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1050 __ lwz(args_count, | |
1051 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); | |
1052 | |
1053 // Leave the frame (also dropping the register file). | |
1054 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1055 | |
1056 __ add(sp, sp, args_count); | |
1057 } | |
1058 | |
1059 // Generate code for entering a JS function with the interpreter. | |
1060 // On entry to the function the receiver and arguments have been pushed on the | |
1061 // stack left to right. The actual argument count matches the formal parameter | |
1062 // count expected by the function. | |
1063 // | |
1064 // The live registers are: | |
1065 // o r4: the JS function object being called. | |
1066 // o r6: the new target | |
1067 // o cp: our context | |
1068 // o pp: the caller's constant pool pointer (if enabled) | |
1069 // o fp: the caller's frame pointer | |
1070 // o sp: stack pointer | |
1071 // o lr: return address | |
1072 // | |
1073 // The function builds an interpreter frame. See InterpreterFrameConstants in | |
1074 // frames.h for its layout. | |
1075 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | |
1076 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
1077 | |
1078 // Open a frame scope to indicate that there is a frame on the stack. The | |
1079 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
1080 // the frame (that is done below). | |
1081 FrameScope frame_scope(masm, StackFrame::MANUAL); | |
1082 __ PushStandardFrame(r4); | |
1083 | |
1084 // Get the bytecode array from the function object (or from the DebugInfo if | |
1085 // it is present) and load it into kInterpreterBytecodeArrayRegister. | |
1086 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
1087 Label array_done; | |
1088 Register debug_info = r5; | |
1089 DCHECK(!debug_info.is(r3)); | |
1090 __ LoadP(debug_info, | |
1091 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset)); | |
1092 // Load original bytecode array or the debug copy. | |
1093 __ LoadP(kInterpreterBytecodeArrayRegister, | |
1094 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset)); | |
1095 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0); | |
1096 __ beq(&array_done); | |
1097 __ LoadP(kInterpreterBytecodeArrayRegister, | |
1098 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
1099 __ bind(&array_done); | |
1100 | |
1101 // Check function data field is actually a BytecodeArray object. | |
1102 Label bytecode_array_not_present; | |
1103 __ CompareRoot(kInterpreterBytecodeArrayRegister, | |
1104 Heap::kUndefinedValueRootIndex); | |
1105 __ beq(&bytecode_array_not_present); | |
1106 | |
1107 if (FLAG_debug_code) { | |
1108 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); | |
1109 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
1110 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, | |
1111 BYTECODE_ARRAY_TYPE); | |
1112 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
1113 } | |
1114 | |
1115 // Load initial bytecode offset. | |
1116 __ mov(kInterpreterBytecodeOffsetRegister, | |
1117 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | |
1118 | |
1119 // Push new.target, bytecode array and Smi tagged bytecode array offset. | |
1120 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister); | |
1121 __ Push(r6, kInterpreterBytecodeArrayRegister, r3); | |
1122 | |
1123 // Allocate the local and temporary register file on the stack. | |
1124 { | |
1125 // Load frame size (word) from the BytecodeArray object. | |
1126 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister, | |
1127 BytecodeArray::kFrameSizeOffset)); | |
1128 | |
1129 // Do a stack check to ensure we don't go over the limit. | |
1130 Label ok; | |
1131 __ sub(r6, sp, r5); | |
1132 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); | |
1133 __ cmpl(r6, r0); | |
1134 __ bge(&ok); | |
1135 __ CallRuntime(Runtime::kThrowStackOverflow); | |
1136 __ bind(&ok); | |
1137 | |
1138 // If ok, push undefined as the initial value for all register file entries. | |
1139 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | |
1140 Label loop, no_args; | |
1141 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | |
1142 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC); | |
1143 __ beq(&no_args, cr0); | |
1144 __ mtctr(r5); | |
1145 __ bind(&loop); | |
1146 __ push(r6); | |
1147 __ bdnz(&loop); | |
1148 __ bind(&no_args); | |
1149 } | |
1150 | |
1151 // Load accumulator and dispatch table into registers. | |
1152 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | |
1153 __ mov(kInterpreterDispatchTableRegister, | |
1154 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1155 masm->isolate()))); | |
1156 | |
1157 // Dispatch to the first bytecode handler for the function. | |
1158 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, | |
1159 kInterpreterBytecodeOffsetRegister)); | |
1160 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); | |
1161 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | |
1162 __ Call(ip); | |
1163 | |
1164 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); | |
1165 | |
1166 // The return value is in r3. | |
1167 LeaveInterpreterFrame(masm, r5); | |
1168 __ blr(); | |
1169 | |
1170 // If the bytecode array is no longer present, then the underlying function | |
1171 // has been switched to a different kind of code and we heal the closure by | |
1172 // switching the code entry field over to the new code object as well. | |
1173 __ bind(&bytecode_array_not_present); | |
1174 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1175 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
1176 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset)); | |
1177 __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1178 __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0); | |
1179 __ RecordWriteCodeEntryField(r4, r7, r8); | |
1180 __ JumpToJSEntry(r7); | |
1181 } | |
1182 | |
1183 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
1184 // Save the function and context for call to CompileBaseline. | |
1185 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); | |
1186 __ LoadP(kContextRegister, | |
1187 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
1188 | |
1189 // Leave the frame before recompiling for baseline so that we don't count as | |
1190 // an activation on the stack. | |
1191 LeaveInterpreterFrame(masm, r5); | |
1192 | |
1193 { | |
1194 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1195 // Push return value. | |
1196 __ push(r3); | |
1197 | |
1198 // Push function as argument and compile for baseline. | |
1199 __ push(r4); | |
1200 __ CallRuntime(Runtime::kCompileBaseline); | |
1201 | |
1202 // Restore return value. | |
1203 __ pop(r3); | |
1204 } | |
1205 __ blr(); | |
1206 } | |
1207 | |
1208 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, | |
1209 Register count, Register scratch) { | |
1210 Label loop; | |
1211 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU | |
1212 __ mtctr(count); | |
1213 __ bind(&loop); | |
1214 __ LoadPU(scratch, MemOperand(index, -kPointerSize)); | |
1215 __ push(scratch); | |
1216 __ bdnz(&loop); | |
1217 } | |
1218 | |
1219 // static | |
1220 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | |
1221 MacroAssembler* masm, TailCallMode tail_call_mode, | |
1222 CallableType function_type) { | |
1223 // ----------- S t a t e ------------- | |
1224 // -- r3 : the number of arguments (not including the receiver) | |
1225 // -- r5 : the address of the first argument to be pushed. Subsequent | |
1226 // arguments should be consecutive above this, in the same order as | |
1227 // they are to be pushed onto the stack. | |
1228 // -- r4 : the target to call (can be any Object). | |
1229 // ----------------------------------- | |
1230 | |
1231 // Calculate number of arguments (add one for receiver). | |
1232 __ addi(r6, r3, Operand(1)); | |
1233 | |
1234 // Push the arguments. | |
1235 Generate_InterpreterPushArgs(masm, r5, r6, r7); | |
1236 | |
1237 // Call the target. | |
1238 if (function_type == CallableType::kJSFunction) { | |
1239 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | |
1240 tail_call_mode), | |
1241 RelocInfo::CODE_TARGET); | |
1242 } else { | |
1243 DCHECK_EQ(function_type, CallableType::kAny); | |
1244 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
1245 tail_call_mode), | |
1246 RelocInfo::CODE_TARGET); | |
1247 } | |
1248 } | |
1249 | |
1250 // static | |
1251 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | |
1252 // ----------- S t a t e ------------- | |
1253 // -- r3 : argument count (not including receiver) | |
1254 // -- r6 : new target | |
1255 // -- r4 : constructor to call | |
1256 // -- r5 : address of the first argument | |
1257 // ----------------------------------- | |
1258 | |
1259 // Push a slot for the receiver to be constructed. | |
1260 __ li(r0, Operand::Zero()); | |
1261 __ push(r0); | |
1262 | |
1263 // Push the arguments (skip if none). | |
1264 Label skip; | |
1265 __ cmpi(r3, Operand::Zero()); | |
1266 __ beq(&skip); | |
1267 Generate_InterpreterPushArgs(masm, r5, r3, r7); | |
1268 __ bind(&skip); | |
1269 | |
1270 // Call the constructor with r3, r4, and r6 unmodified. | |
1271 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
1272 } | |
1273 | |
1274 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
1275 // Set the return address to the correct point in the interpreter entry | |
1276 // trampoline. | |
1277 Smi* interpreter_entry_return_pc_offset( | |
1278 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | |
1279 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); | |
1280 __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline()); | |
1281 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() + | |
1282 Code::kHeaderSize - kHeapObjectTag)); | |
1283 __ mtlr(r0); | |
1284 | |
1285 // Initialize the dispatch table register. | |
1286 __ mov(kInterpreterDispatchTableRegister, | |
1287 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1288 masm->isolate()))); | |
1289 | |
1290 // Get the bytecode array pointer from the frame. | |
1291 __ LoadP(kInterpreterBytecodeArrayRegister, | |
1292 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1293 | |
1294 if (FLAG_debug_code) { | |
1295 // Check function data field is actually a BytecodeArray object. | |
1296 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); | |
1297 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
1298 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg, | |
1299 BYTECODE_ARRAY_TYPE); | |
1300 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
1301 } | |
1302 | |
1303 // Get the target bytecode offset from the frame. | |
1304 __ LoadP(kInterpreterBytecodeOffsetRegister, | |
1305 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | |
1306 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | |
1307 | |
1308 // Dispatch to the target bytecode. | |
1309 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, | |
1310 kInterpreterBytecodeOffsetRegister)); | |
1311 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); | |
1312 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | |
1313 __ Jump(ip); | |
1314 } | |
1315 | |
1316 | |
1317 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | |
1318 // ----------- S t a t e ------------- | |
1319 // -- r3 : argument count (preserved for callee) | |
1320 // -- r6 : new target (preserved for callee) | |
1321 // -- r4 : target function (preserved for callee) | |
1322 // ----------------------------------- | |
1323 // First lookup code, maybe we don't need to compile! | |
1324 Label gotta_call_runtime; | |
1325 Label maybe_call_runtime; | |
1326 Label try_shared; | |
1327 Label loop_top, loop_bottom; | |
1328 | |
1329 Register closure = r4; | |
1330 Register map = r9; | |
1331 Register index = r5; | |
1332 __ LoadP(map, | |
1333 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1334 __ LoadP(map, | |
1335 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
1336 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset)); | |
1337 __ CmpSmiLiteral(index, Smi::FromInt(2), r0); | |
1338 __ blt(&gotta_call_runtime); | |
1339 | |
1340 // Find literals. | |
1341 // r10 : native context | |
1342 // r5 : length / index | |
1343 // r9 : optimized code map | |
1344 // r6 : new target | |
1345 // r4 : closure | |
1346 Register native_context = r10; | |
1347 __ LoadP(native_context, NativeContextMemOperand()); | |
1348 | |
1349 __ bind(&loop_top); | |
1350 Register temp = r11; | |
1351 Register array_pointer = r8; | |
1352 | |
1353 // Does the native context match? | |
1354 __ SmiToPtrArrayOffset(array_pointer, index); | |
1355 __ add(array_pointer, map, array_pointer); | |
1356 __ LoadP(temp, FieldMemOperand(array_pointer, | |
1357 SharedFunctionInfo::kOffsetToPreviousContext)); | |
1358 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1359 __ cmp(temp, native_context); | |
1360 __ bne(&loop_bottom); | |
1361 // OSR id set to none? | |
1362 __ LoadP(temp, | |
1363 FieldMemOperand(array_pointer, | |
1364 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); | |
1365 const int bailout_id = BailoutId::None().ToInt(); | |
1366 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0); | |
1367 __ bne(&loop_bottom); | |
1368 // Literals available? | |
1369 __ LoadP(temp, | |
1370 FieldMemOperand(array_pointer, | |
1371 SharedFunctionInfo::kOffsetToPreviousLiterals)); | |
1372 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1373 __ JumpIfSmi(temp, &gotta_call_runtime); | |
1374 | |
1375 // Save the literals in the closure. | |
1376 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0); | |
1377 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7, | |
1378 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1379 OMIT_SMI_CHECK); | |
1380 | |
1381 // Code available? | |
1382 Register entry = r7; | |
1383 __ LoadP(entry, | |
1384 FieldMemOperand(array_pointer, | |
1385 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | |
1386 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1387 __ JumpIfSmi(entry, &maybe_call_runtime); | |
1388 | |
1389 // Found literals and code. Get them into the closure and return. | |
1390 // Store code entry in the closure. | |
1391 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1392 | |
1393 Label install_optimized_code_and_tailcall; | |
1394 __ bind(&install_optimized_code_and_tailcall); | |
1395 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); | |
1396 __ RecordWriteCodeEntryField(closure, entry, r8); | |
1397 | |
1398 // Link the closure into the optimized function list. | |
1399 // r7 : code entry | |
1400 // r10: native context | |
1401 // r4 : closure | |
1402 __ LoadP( | |
1403 r8, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1404 __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset), | |
1405 r0); | |
1406 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp, | |
1407 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1408 OMIT_SMI_CHECK); | |
1409 const int function_list_offset = | |
1410 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
1411 __ StoreP( | |
1412 closure, | |
1413 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0); | |
1414 // Save closure before the write barrier. | |
1415 __ mr(r8, closure); | |
1416 __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp, | |
1417 kLRHasNotBeenSaved, kDontSaveFPRegs); | |
1418 __ JumpToJSEntry(entry); | |
1419 | |
1420 __ bind(&loop_bottom); | |
1421 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength), | |
1422 r0); | |
1423 __ CmpSmiLiteral(index, Smi::FromInt(1), r0); | |
1424 __ bgt(&loop_top); | |
1425 | |
1426 // We found neither literals nor code. | |
1427 __ b(&gotta_call_runtime); | |
1428 | |
1429 __ bind(&maybe_call_runtime); | |
1430 | |
1431 // Last possibility. Check the context free optimized code map entry. | |
1432 __ LoadP(entry, | |
1433 FieldMemOperand(map, FixedArray::kHeaderSize + | |
1434 SharedFunctionInfo::kSharedCodeIndex)); | |
1435 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1436 __ JumpIfSmi(entry, &try_shared); | |
1437 | |
1438 // Store code entry in the closure. | |
1439 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1440 __ b(&install_optimized_code_and_tailcall); | |
1441 | |
1442 __ bind(&try_shared); | |
1443 // Is the full code valid? | |
1444 __ LoadP(entry, | |
1445 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1446 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
1447 __ lwz(r8, FieldMemOperand(entry, Code::kFlagsOffset)); | |
1448 __ DecodeField<Code::KindField>(r8); | |
1449 __ cmpi(r8, Operand(Code::BUILTIN)); | |
1450 __ beq(&gotta_call_runtime); | |
1451 // Yes, install the full code. | |
1452 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1453 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); | |
1454 __ RecordWriteCodeEntryField(closure, entry, r8); | |
1455 __ JumpToJSEntry(entry); | |
1456 | |
1457 __ bind(&gotta_call_runtime); | |
1458 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1459 } | |
1460 | |
1461 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | |
1462 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | |
1463 } | |
1464 | |
1465 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
1466 GenerateTailCallToReturnedCode(masm, | |
1467 Runtime::kCompileOptimized_NotConcurrent); | |
1468 } | |
1469 | |
1470 | |
1471 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
1472 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | |
1473 } | |
1474 | |
1475 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | |
1476 // ----------- S t a t e ------------- | |
1477 // -- r3 : argument count (preserved for callee) | |
1478 // -- r4 : new target (preserved for callee) | |
1479 // -- r6 : target function (preserved for callee) | |
1480 // ----------------------------------- | |
1481 Label failed; | |
1482 { | |
1483 FrameScope scope(masm, StackFrame::INTERNAL); | |
1484 // Push a copy of the target function and the new target. | |
1485 // Push function as parameter to the runtime call. | |
1486 __ SmiTag(r3); | |
1487 __ Push(r3, r4, r6, r4); | |
1488 | |
1489 // Copy arguments from caller (stdlib, foreign, heap). | |
1490 for (int i = 2; i >= 0; --i) { | |
1491 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + | |
1492 i * kPointerSize)); | |
1493 __ push(r4); | |
1494 } | |
1495 // Call runtime, on success unwind frame, and parent frame. | |
1496 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); | |
1497 // A smi 0 is returned on failure, an object on success. | |
1498 __ JumpIfSmi(r3, &failed); | |
1499 scope.GenerateLeaveFrame(); | |
1500 __ Drop(4); | |
1501 __ Ret(); | |
1502 | |
1503 __ bind(&failed); | |
1504 // Restore target function and new target. | |
1505 __ Pop(r3, r4, r6); | |
1506 __ SmiUntag(r3); | |
1507 } | |
1508 // On failure, tail call back to regular js. | |
1509 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1510 } | |
1511 | |
1512 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
1513 // For now, we are relying on the fact that make_code_young doesn't do any | |
1514 // garbage collection which allows us to save/restore the registers without | |
1515 // worrying about which of them contain pointers. We also don't build an | |
1516 // internal frame to make the code faster, since we shouldn't have to do stack | |
1517 // crawls in MakeCodeYoung. This seems a bit fragile. | |
1518 | |
1519 // Point r3 at the start of the PlatformCodeAge sequence. | |
1520 __ mr(r3, ip); | |
1521 | |
1522 // The following registers must be saved and restored when calling through to | |
1523 // the runtime: | |
1524 // r3 - contains return address (beginning of patch sequence) | |
1525 // r4 - isolate | |
1526 // r6 - new target | |
1527 // lr - return address | |
1528 FrameScope scope(masm, StackFrame::MANUAL); | |
1529 __ mflr(r0); | |
1530 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | |
1531 __ PrepareCallCFunction(2, 0, r5); | |
1532 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1533 __ CallCFunction( | |
1534 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
1535 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | |
1536 __ mtlr(r0); | |
1537 __ mr(ip, r3); | |
1538 __ Jump(ip); | |
1539 } | |
1540 | |
1541 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
1542 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
1543 MacroAssembler* masm) { \ | |
1544 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1545 } \ | |
1546 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
1547 MacroAssembler* masm) { \ | |
1548 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1549 } | |
1550 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
1551 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
1552 | |
1553 | |
1554 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
1555 // For now, we are relying on the fact that make_code_young doesn't do any | |
1556 // garbage collection which allows us to save/restore the registers without | |
1557 // worrying about which of them contain pointers. We also don't build an | |
1558 // internal frame to make the code faster, since we shouldn't have to do stack | |
1559 // crawls in MakeCodeYoung. This seems a bit fragile. | |
1560 | |
1561 // Point r3 at the start of the PlatformCodeAge sequence. | |
1562 __ mr(r3, ip); | |
1563 | |
1564 // The following registers must be saved and restored when calling through to | |
1565 // the runtime: | |
1566 // r3 - contains return address (beginning of patch sequence) | |
1567 // r4 - isolate | |
1568 // r6 - new target | |
1569 // lr - return address | |
1570 FrameScope scope(masm, StackFrame::MANUAL); | |
1571 __ mflr(r0); | |
1572 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | |
1573 __ PrepareCallCFunction(2, 0, r5); | |
1574 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1575 __ CallCFunction( | |
1576 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | |
1577 2); | |
1578 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); | |
1579 __ mtlr(r0); | |
1580 __ mr(ip, r3); | |
1581 | |
1582 // Perform prologue operations usually performed by the young code stub. | |
1583 __ PushStandardFrame(r4); | |
1584 | |
1585 // Jump to point after the code-age stub. | |
1586 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength)); | |
1587 __ Jump(r3); | |
1588 } | |
1589 | |
1590 | |
1591 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
1592 GenerateMakeCodeYoungAgainCommon(masm); | |
1593 } | |
1594 | |
1595 | |
1596 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | |
1597 Generate_MarkCodeAsExecutedOnce(masm); | |
1598 } | |
1599 | |
1600 | |
1601 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
1602 SaveFPRegsMode save_doubles) { | |
1603 { | |
1604 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
1605 | |
1606 // Preserve registers across notification, this is important for compiled | |
1607 // stubs that tail call the runtime on deopts passing their parameters in | |
1608 // registers. | |
1609 __ MultiPush(kJSCallerSaved | kCalleeSaved); | |
1610 // Pass the function and deoptimization type to the runtime system. | |
1611 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | |
1612 __ MultiPop(kJSCallerSaved | kCalleeSaved); | |
1613 } | |
1614 | |
1615 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state | |
1616 __ blr(); // Jump to miss handler | |
1617 } | |
1618 | |
1619 | |
1620 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
1621 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
1622 } | |
1623 | |
1624 | |
1625 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
1626 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
1627 } | |
1628 | |
1629 | |
1630 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
1631 Deoptimizer::BailoutType type) { | |
1632 { | |
1633 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
1634 // Pass the function and deoptimization type to the runtime system. | |
1635 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); | |
1636 __ push(r3); | |
1637 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
1638 } | |
1639 | |
1640 // Get the full codegen state from the stack and untag it -> r9. | |
1641 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); | |
1642 __ SmiUntag(r9); | |
1643 // Switch on the state. | |
1644 Label with_tos_register, unknown_state; | |
1645 __ cmpi( | |
1646 r9, | |
1647 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS))); | |
1648 __ bne(&with_tos_register); | |
1649 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state. | |
1650 __ Ret(); | |
1651 | |
1652 __ bind(&with_tos_register); | |
1653 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code()); | |
1654 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); | |
1655 __ cmpi( | |
1656 r9, | |
1657 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER))); | |
1658 __ bne(&unknown_state); | |
1659 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. | |
1660 __ Ret(); | |
1661 | |
1662 __ bind(&unknown_state); | |
1663 __ stop("no cases left"); | |
1664 } | |
1665 | |
1666 | |
1667 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
1668 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
1669 } | |
1670 | |
1671 | |
1672 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
1673 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
1674 } | |
1675 | |
1676 | |
1677 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
1678 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
1679 } | |
1680 | |
1681 | |
1682 // Clobbers registers {r7, r8, r9, r10}. | |
1683 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | |
1684 Register function_template_info, | |
1685 Label* receiver_check_failed) { | |
1686 Register signature = r7; | |
1687 Register map = r8; | |
1688 Register constructor = r9; | |
1689 Register scratch = r10; | |
1690 | |
1691 // If there is no signature, return the holder. | |
1692 __ LoadP(signature, FieldMemOperand(function_template_info, | |
1693 FunctionTemplateInfo::kSignatureOffset)); | |
1694 Label receiver_check_passed; | |
1695 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, | |
1696 &receiver_check_passed); | |
1697 | |
1698 // Walk the prototype chain. | |
1699 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1700 Label prototype_loop_start; | |
1701 __ bind(&prototype_loop_start); | |
1702 | |
1703 // Get the constructor, if any. | |
1704 __ GetMapConstructor(constructor, map, scratch, scratch); | |
1705 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE)); | |
1706 Label next_prototype; | |
1707 __ bne(&next_prototype); | |
1708 Register type = constructor; | |
1709 __ LoadP(type, | |
1710 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | |
1711 __ LoadP(type, | |
1712 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | |
1713 | |
1714 // Loop through the chain of inheriting function templates. | |
1715 Label function_template_loop; | |
1716 __ bind(&function_template_loop); | |
1717 | |
1718 // If the signatures match, we have a compatible receiver. | |
1719 __ cmp(signature, type); | |
1720 __ beq(&receiver_check_passed); | |
1721 | |
1722 // If the current type is not a FunctionTemplateInfo, load the next prototype | |
1723 // in the chain. | |
1724 __ JumpIfSmi(type, &next_prototype); | |
1725 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); | |
1726 __ bne(&next_prototype); | |
1727 | |
1728 // Otherwise load the parent function template and iterate. | |
1729 __ LoadP(type, | |
1730 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | |
1731 __ b(&function_template_loop); | |
1732 | |
1733 // Load the next prototype. | |
1734 __ bind(&next_prototype); | |
1735 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset)); | |
1736 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC); | |
1737 __ beq(receiver_check_failed, cr0); | |
1738 | |
1739 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | |
1740 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1741 // Iterate. | |
1742 __ b(&prototype_loop_start); | |
1743 | |
1744 __ bind(&receiver_check_passed); | |
1745 } | |
1746 | |
1747 | |
1748 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | |
1749 // ----------- S t a t e ------------- | |
1750 // -- r3 : number of arguments excluding receiver | |
1751 // -- r4 : callee | |
1752 // -- lr : return address | |
1753 // -- sp[0] : last argument | |
1754 // -- ... | |
1755 // -- sp[4 * (argc - 1)] : first argument | |
1756 // -- sp[4 * argc] : receiver | |
1757 // ----------------------------------- | |
1758 | |
1759 | |
1760 // Load the FunctionTemplateInfo. | |
1761 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
1762 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); | |
1763 | |
1764 // Do the compatible receiver check. | |
1765 Label receiver_check_failed; | |
1766 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2)); | |
1767 __ LoadPX(r5, MemOperand(sp, r11)); | |
1768 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed); | |
1769 | |
1770 // Get the callback offset from the FunctionTemplateInfo, and jump to the | |
1771 // beginning of the code. | |
1772 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset)); | |
1773 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset)); | |
1774 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1775 __ JumpToJSEntry(ip); | |
1776 | |
1777 // Compatible receiver check failed: throw an Illegal Invocation exception. | |
1778 __ bind(&receiver_check_failed); | |
1779 // Drop the arguments (including the receiver); | |
1780 __ addi(r11, r11, Operand(kPointerSize)); | |
1781 __ add(sp, sp, r11); | |
1782 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | |
1783 } | |
1784 | |
1785 | |
1786 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
1787 // Lookup the function in the JavaScript frame. | |
1788 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1789 { | |
1790 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
1791 // Pass function as argument. | |
1792 __ push(r3); | |
1793 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | |
1794 } | |
1795 | |
1796 // If the code object is null, just return to the unoptimized code. | |
1797 Label skip; | |
1798 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); | |
1799 __ bne(&skip); | |
1800 __ Ret(); | |
1801 | |
1802 __ bind(&skip); | |
1803 | |
1804 // Load deoptimization data from the code object. | |
1805 // <deopt_data> = <code>[#deoptimization_data_offset] | |
1806 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); | |
1807 | |
1808 { | |
1809 ConstantPoolUnavailableScope constant_pool_unavailable(masm); | |
1810 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start | |
1811 | |
1812 if (FLAG_enable_embedded_constant_pool) { | |
1813 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); | |
1814 } | |
1815 | |
1816 // Load the OSR entrypoint offset from the deoptimization data. | |
1817 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
1818 __ LoadP(r4, FieldMemOperand( | |
1819 r4, FixedArray::OffsetOfElementAt( | |
1820 DeoptimizationInputData::kOsrPcOffsetIndex))); | |
1821 __ SmiUntag(r4); | |
1822 | |
1823 // Compute the target address = code start + osr_offset | |
1824 __ add(r0, r3, r4); | |
1825 | |
1826 // And "return" to the OSR entry point of the function. | |
1827 __ mtlr(r0); | |
1828 __ blr(); | |
1829 } | |
1830 } | |
1831 | |
1832 | |
1833 // static | |
1834 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | |
1835 int field_index) { | |
1836 // ----------- S t a t e ------------- | |
1837 // -- r3 : number of arguments | |
1838 // -- r4 : function | |
1839 // -- cp : context | |
1840 // -- lr : return address | |
1841 // -- sp[0] : receiver | |
1842 // ----------------------------------- | |
1843 | |
1844 // 1. Pop receiver into r3 and check that it's actually a JSDate object. | |
1845 Label receiver_not_date; | |
1846 { | |
1847 __ Pop(r3); | |
1848 __ JumpIfSmi(r3, &receiver_not_date); | |
1849 __ CompareObjectType(r3, r5, r6, JS_DATE_TYPE); | |
1850 __ bne(&receiver_not_date); | |
1851 } | |
1852 | |
1853 // 2. Load the specified date field, falling back to the runtime as necessary. | |
1854 if (field_index == JSDate::kDateValue) { | |
1855 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset)); | |
1856 } else { | |
1857 if (field_index < JSDate::kFirstUncachedField) { | |
1858 Label stamp_mismatch; | |
1859 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); | |
1860 __ LoadP(r4, MemOperand(r4)); | |
1861 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset)); | |
1862 __ cmp(r4, ip); | |
1863 __ bne(&stamp_mismatch); | |
1864 __ LoadP(r3, FieldMemOperand( | |
1865 r3, JSDate::kValueOffset + field_index * kPointerSize)); | |
1866 __ Ret(); | |
1867 __ bind(&stamp_mismatch); | |
1868 } | |
1869 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
1870 __ PrepareCallCFunction(2, r4); | |
1871 __ LoadSmiLiteral(r4, Smi::FromInt(field_index)); | |
1872 __ CallCFunction( | |
1873 ExternalReference::get_date_field_function(masm->isolate()), 2); | |
1874 } | |
1875 __ Ret(); | |
1876 | |
1877 // 3. Raise a TypeError if the receiver is not a date. | |
1878 __ bind(&receiver_not_date); | |
1879 { | |
1880 FrameScope scope(masm, StackFrame::MANUAL); | |
1881 __ push(r3); | |
1882 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | |
1883 __ EnterBuiltinFrame(cp, r4, r3); | |
1884 __ CallRuntime(Runtime::kThrowNotDateError); | |
1885 } | |
1886 } | |
1887 | |
1888 // static | |
1889 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | |
1890 // ----------- S t a t e ------------- | |
1891 // -- r3 : argc | |
1892 // -- sp[0] : argArray | |
1893 // -- sp[4] : thisArg | |
1894 // -- sp[8] : receiver | |
1895 // ----------------------------------- | |
1896 | |
1897 // 1. Load receiver into r4, argArray into r3 (if present), remove all | |
1898 // arguments from the stack (including the receiver), and push thisArg (if | |
1899 // present) instead. | |
1900 { | |
1901 Label skip; | |
1902 Register arg_size = r5; | |
1903 Register new_sp = r6; | |
1904 Register scratch = r7; | |
1905 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | |
1906 __ add(new_sp, sp, arg_size); | |
1907 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | |
1908 __ mr(scratch, r3); | |
1909 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver | |
1910 __ cmpi(arg_size, Operand(kPointerSize)); | |
1911 __ blt(&skip); | |
1912 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg | |
1913 __ beq(&skip); | |
1914 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray | |
1915 __ bind(&skip); | |
1916 __ mr(sp, new_sp); | |
1917 __ StoreP(scratch, MemOperand(sp, 0)); | |
1918 } | |
1919 | |
1920 // ----------- S t a t e ------------- | |
1921 // -- r3 : argArray | |
1922 // -- r4 : receiver | |
1923 // -- sp[0] : thisArg | |
1924 // ----------------------------------- | |
1925 | |
1926 // 2. Make sure the receiver is actually callable. | |
1927 Label receiver_not_callable; | |
1928 __ JumpIfSmi(r4, &receiver_not_callable); | |
1929 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | |
1930 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
1931 __ TestBit(r7, Map::kIsCallable, r0); | |
1932 __ beq(&receiver_not_callable, cr0); | |
1933 | |
1934 // 3. Tail call with no arguments if argArray is null or undefined. | |
1935 Label no_arguments; | |
1936 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments); | |
1937 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments); | |
1938 | |
1939 // 4a. Apply the receiver to the given argArray (passing undefined for | |
1940 // new.target). | |
1941 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | |
1942 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
1943 | |
1944 // 4b. The argArray is either null or undefined, so we tail call without any | |
1945 // arguments to the receiver. | |
1946 __ bind(&no_arguments); | |
1947 { | |
1948 __ li(r3, Operand::Zero()); | |
1949 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1950 } | |
1951 | |
1952 // 4c. The receiver is not callable, throw an appropriate TypeError. | |
1953 __ bind(&receiver_not_callable); | |
1954 { | |
1955 __ StoreP(r4, MemOperand(sp, 0)); | |
1956 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
1957 } | |
1958 } | |
1959 | |
1960 | |
1961 // static | |
1962 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | |
1963 // 1. Make sure we have at least one argument. | |
1964 // r3: actual number of arguments | |
1965 { | |
1966 Label done; | |
1967 __ cmpi(r3, Operand::Zero()); | |
1968 __ bne(&done); | |
1969 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
1970 __ addi(r3, r3, Operand(1)); | |
1971 __ bind(&done); | |
1972 } | |
1973 | |
1974 // 2. Get the callable to call (passed as receiver) from the stack. | |
1975 // r3: actual number of arguments | |
1976 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | |
1977 __ LoadPX(r4, MemOperand(sp, r5)); | |
1978 | |
1979 // 3. Shift arguments and return address one slot down on the stack | |
1980 // (overwriting the original receiver). Adjust argument count to make | |
1981 // the original first argument the new receiver. | |
1982 // r3: actual number of arguments | |
1983 // r4: callable | |
1984 { | |
1985 Label loop; | |
1986 // Calculate the copy start address (destination). Copy end address is sp. | |
1987 __ add(r5, sp, r5); | |
1988 | |
1989 | |
1990 __ mtctr(r3); | |
1991 __ bind(&loop); | |
1992 __ LoadP(ip, MemOperand(r5, -kPointerSize)); | |
1993 __ StoreP(ip, MemOperand(r5)); | |
1994 __ subi(r5, r5, Operand(kPointerSize)); | |
1995 __ bdnz(&loop); | |
1996 // Adjust the actual number of arguments and remove the top element | |
1997 // (which is a copy of the last argument). | |
1998 __ subi(r3, r3, Operand(1)); | |
1999 __ pop(); | |
2000 } | |
2001 | |
2002 // 4. Call the callable. | |
2003 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2004 } | |
2005 | |
2006 | |
2007 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
2008 // ----------- S t a t e ------------- | |
2009 // -- r3 : argc | |
2010 // -- sp[0] : argumentsList | |
2011 // -- sp[4] : thisArgument | |
2012 // -- sp[8] : target | |
2013 // -- sp[12] : receiver | |
2014 // ----------------------------------- | |
2015 | |
2016 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | |
2017 // remove all arguments from the stack (including the receiver), and push | |
2018 // thisArgument (if present) instead. | |
2019 { | |
2020 Label skip; | |
2021 Register arg_size = r5; | |
2022 Register new_sp = r6; | |
2023 Register scratch = r7; | |
2024 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | |
2025 __ add(new_sp, sp, arg_size); | |
2026 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | |
2027 __ mr(scratch, r4); | |
2028 __ mr(r3, r4); | |
2029 __ cmpi(arg_size, Operand(kPointerSize)); | |
2030 __ blt(&skip); | |
2031 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target | |
2032 __ beq(&skip); | |
2033 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument | |
2034 __ cmpi(arg_size, Operand(2 * kPointerSize)); | |
2035 __ beq(&skip); | |
2036 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList | |
2037 __ bind(&skip); | |
2038 __ mr(sp, new_sp); | |
2039 __ StoreP(scratch, MemOperand(sp, 0)); | |
2040 } | |
2041 | |
2042 // ----------- S t a t e ------------- | |
2043 // -- r3 : argumentsList | |
2044 // -- r4 : target | |
2045 // -- sp[0] : thisArgument | |
2046 // ----------------------------------- | |
2047 | |
2048 // 2. Make sure the target is actually callable. | |
2049 Label target_not_callable; | |
2050 __ JumpIfSmi(r4, &target_not_callable); | |
2051 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | |
2052 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
2053 __ TestBit(r7, Map::kIsCallable, r0); | |
2054 __ beq(&target_not_callable, cr0); | |
2055 | |
2056 // 3a. Apply the target to the given argumentsList (passing undefined for | |
2057 // new.target). | |
2058 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | |
2059 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2060 | |
2061 // 3b. The target is not callable, throw an appropriate TypeError. | |
2062 __ bind(&target_not_callable); | |
2063 { | |
2064 __ StoreP(r4, MemOperand(sp, 0)); | |
2065 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
2066 } | |
2067 } | |
2068 | |
2069 | |
2070 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
2071 // ----------- S t a t e ------------- | |
2072 // -- r3 : argc | |
2073 // -- sp[0] : new.target (optional) | |
2074 // -- sp[4] : argumentsList | |
2075 // -- sp[8] : target | |
2076 // -- sp[12] : receiver | |
2077 // ----------------------------------- | |
2078 | |
2079 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | |
2080 // new.target into r6 (if present, otherwise use target), remove all | |
2081 // arguments from the stack (including the receiver), and push thisArgument | |
2082 // (if present) instead. | |
2083 { | |
2084 Label skip; | |
2085 Register arg_size = r5; | |
2086 Register new_sp = r7; | |
2087 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); | |
2088 __ add(new_sp, sp, arg_size); | |
2089 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | |
2090 __ mr(r3, r4); | |
2091 __ mr(r6, r4); | |
2092 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined) | |
2093 __ cmpi(arg_size, Operand(kPointerSize)); | |
2094 __ blt(&skip); | |
2095 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target | |
2096 __ mr(r6, r4); // new.target defaults to target | |
2097 __ beq(&skip); | |
2098 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList | |
2099 __ cmpi(arg_size, Operand(2 * kPointerSize)); | |
2100 __ beq(&skip); | |
2101 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target | |
2102 __ bind(&skip); | |
2103 __ mr(sp, new_sp); | |
2104 } | |
2105 | |
2106 // ----------- S t a t e ------------- | |
2107 // -- r3 : argumentsList | |
2108 // -- r6 : new.target | |
2109 // -- r4 : target | |
2110 // -- sp[0] : receiver (undefined) | |
2111 // ----------------------------------- | |
2112 | |
2113 // 2. Make sure the target is actually a constructor. | |
2114 Label target_not_constructor; | |
2115 __ JumpIfSmi(r4, &target_not_constructor); | |
2116 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | |
2117 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
2118 __ TestBit(r7, Map::kIsConstructor, r0); | |
2119 __ beq(&target_not_constructor, cr0); | |
2120 | |
2121 // 3. Make sure the target is actually a constructor. | |
2122 Label new_target_not_constructor; | |
2123 __ JumpIfSmi(r6, &new_target_not_constructor); | |
2124 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); | |
2125 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
2126 __ TestBit(r7, Map::kIsConstructor, r0); | |
2127 __ beq(&new_target_not_constructor, cr0); | |
2128 | |
2129 // 4a. Construct the target with the given new.target and argumentsList. | |
2130 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2131 | |
2132 // 4b. The target is not a constructor, throw an appropriate TypeError. | |
2133 __ bind(&target_not_constructor); | |
2134 { | |
2135 __ StoreP(r4, MemOperand(sp, 0)); | |
2136 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2137 } | |
2138 | |
2139 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | |
2140 __ bind(&new_target_not_constructor); | |
2141 { | |
2142 __ StoreP(r6, MemOperand(sp, 0)); | |
2143 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2144 } | |
2145 } | |
2146 | |
2147 | |
2148 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | |
2149 Label* stack_overflow) { | |
2150 // ----------- S t a t e ------------- | |
2151 // -- r3 : actual number of arguments | |
2152 // -- r4 : function (passed through to callee) | |
2153 // -- r5 : expected number of arguments | |
2154 // -- r6 : new target (passed through to callee) | |
2155 // ----------------------------------- | |
2156 // Check the stack for overflow. We are not trying to catch | |
2157 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
2158 // limit" is checked. | |
2159 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); | |
2160 // Make r8 the space we have left. The stack might already be overflowed | |
2161 // here which will cause r8 to become negative. | |
2162 __ sub(r8, sp, r8); | |
2163 // Check if the arguments will overflow the stack. | |
2164 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); | |
2165 __ cmp(r8, r0); | |
2166 __ ble(stack_overflow); // Signed comparison. | |
2167 } | |
2168 | |
2169 | |
2170 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2171 __ SmiTag(r3); | |
2172 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
2173 __ mflr(r0); | |
2174 __ push(r0); | |
2175 if (FLAG_enable_embedded_constant_pool) { | |
2176 __ Push(fp, kConstantPoolRegister, r7, r4, r3); | |
2177 } else { | |
2178 __ Push(fp, r7, r4, r3); | |
2179 } | |
2180 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
2181 kPointerSize)); | |
2182 } | |
2183 | |
2184 | |
2185 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2186 // ----------- S t a t e ------------- | |
2187 // -- r3 : result being passed through | |
2188 // ----------------------------------- | |
2189 // Get the number of arguments passed (as a smi), tear down the frame and | |
2190 // then tear down the parameters. | |
2191 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
2192 kPointerSize))); | |
2193 int stack_adjustment = kPointerSize; // adjust for receiver | |
2194 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); | |
2195 __ SmiToPtrArrayOffset(r0, r4); | |
2196 __ add(sp, sp, r0); | |
2197 } | |
2198 | |
2199 | |
2200 // static | |
2201 void Builtins::Generate_Apply(MacroAssembler* masm) { | |
2202 // ----------- S t a t e ------------- | |
2203 // -- r3 : argumentsList | |
2204 // -- r4 : target | |
2205 // -- r6 : new.target (checked to be constructor or undefined) | |
2206 // -- sp[0] : thisArgument | |
2207 // ----------------------------------- | |
2208 | |
2209 // Create the list of arguments from the array-like argumentsList. | |
2210 { | |
2211 Label create_arguments, create_array, create_runtime, done_create; | |
2212 __ JumpIfSmi(r3, &create_runtime); | |
2213 | |
2214 // Load the map of argumentsList into r5. | |
2215 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
2216 | |
2217 // Load native context into r7. | |
2218 __ LoadP(r7, NativeContextMemOperand()); | |
2219 | |
2220 // Check if argumentsList is an (unmodified) arguments object. | |
2221 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | |
2222 __ cmp(ip, r5); | |
2223 __ beq(&create_arguments); | |
2224 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX)); | |
2225 __ cmp(ip, r5); | |
2226 __ beq(&create_arguments); | |
2227 | |
2228 // Check if argumentsList is a fast JSArray. | |
2229 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE); | |
2230 __ beq(&create_array); | |
2231 | |
2232 // Ask the runtime to create the list (actually a FixedArray). | |
2233 __ bind(&create_runtime); | |
2234 { | |
2235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
2236 __ Push(r4, r6, r3); | |
2237 __ CallRuntime(Runtime::kCreateListFromArrayLike); | |
2238 __ Pop(r4, r6); | |
2239 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset)); | |
2240 __ SmiUntag(r5); | |
2241 } | |
2242 __ b(&done_create); | |
2243 | |
2244 // Try to create the list from an arguments object. | |
2245 __ bind(&create_arguments); | |
2246 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset)); | |
2247 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset)); | |
2248 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset)); | |
2249 __ cmp(r5, ip); | |
2250 __ bne(&create_runtime); | |
2251 __ SmiUntag(r5); | |
2252 __ mr(r3, r7); | |
2253 __ b(&done_create); | |
2254 | |
2255 // Try to create the list from a JSArray object. | |
2256 __ bind(&create_array); | |
2257 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset)); | |
2258 __ DecodeField<Map::ElementsKindBits>(r5); | |
2259 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
2260 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
2261 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
2262 __ cmpi(r5, Operand(FAST_ELEMENTS)); | |
2263 __ bgt(&create_runtime); | |
2264 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS)); | |
2265 __ beq(&create_runtime); | |
2266 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset)); | |
2267 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | |
2268 __ SmiUntag(r5); | |
2269 | |
2270 __ bind(&done_create); | |
2271 } | |
2272 | |
2273 // Check for stack overflow. | |
2274 { | |
2275 // Check the stack for overflow. We are not trying to catch interruptions | |
2276 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2277 Label done; | |
2278 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); | |
2279 // Make ip the space we have left. The stack might already be overflowed | |
2280 // here which will cause ip to become negative. | |
2281 __ sub(ip, sp, ip); | |
2282 // Check if the arguments will overflow the stack. | |
2283 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); | |
2284 __ cmp(ip, r0); // Signed comparison. | |
2285 __ bgt(&done); | |
2286 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2287 __ bind(&done); | |
2288 } | |
2289 | |
2290 // ----------- S t a t e ------------- | |
2291 // -- r4 : target | |
2292 // -- r3 : args (a FixedArray built from argumentsList) | |
2293 // -- r5 : len (number of elements to push from args) | |
2294 // -- r6 : new.target (checked to be constructor or undefined) | |
2295 // -- sp[0] : thisArgument | |
2296 // ----------------------------------- | |
2297 | |
2298 // Push arguments onto the stack (thisArgument is already on the stack). | |
2299 { | |
2300 Label loop, no_args; | |
2301 __ cmpi(r5, Operand::Zero()); | |
2302 __ beq(&no_args); | |
2303 __ addi(r3, r3, | |
2304 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | |
2305 __ mtctr(r5); | |
2306 __ bind(&loop); | |
2307 __ LoadPU(r0, MemOperand(r3, kPointerSize)); | |
2308 __ push(r0); | |
2309 __ bdnz(&loop); | |
2310 __ bind(&no_args); | |
2311 __ mr(r3, r5); | |
2312 } | |
2313 | |
2314 // Dispatch to Call or Construct depending on whether new.target is undefined. | |
2315 { | |
2316 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex); | |
2317 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); | |
2318 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
2319 } | |
2320 } | |
2321 | |
2322 namespace { | |
2323 | |
2324 // Drops top JavaScript frame and an arguments adaptor frame below it (if | |
2325 // present) preserving all the arguments prepared for current call. | |
2326 // Does nothing if debugger is currently active. | |
2327 // ES6 14.6.3. PrepareForTailCall | |
2328 // | |
2329 // Stack structure for the function g() tail calling f(): | |
2330 // | |
2331 // ------- Caller frame: ------- | |
2332 // | ... | |
2333 // | g()'s arg M | |
2334 // | ... | |
2335 // | g()'s arg 1 | |
2336 // | g()'s receiver arg | |
2337 // | g()'s caller pc | |
2338 // ------- g()'s frame: ------- | |
2339 // | g()'s caller fp <- fp | |
2340 // | g()'s context | |
2341 // | function pointer: g | |
2342 // | ------------------------- | |
2343 // | ... | |
2344 // | ... | |
2345 // | f()'s arg N | |
2346 // | ... | |
2347 // | f()'s arg 1 | |
2348 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!) | |
2349 // ---------------------- | |
2350 // | |
2351 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, | |
2352 Register scratch1, Register scratch2, | |
2353 Register scratch3) { | |
2354 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | |
2355 Comment cmnt(masm, "[ PrepareForTailCall"); | |
2356 | |
2357 // Prepare for tail call only if ES2015 tail call elimination is enabled. | |
2358 Label done; | |
2359 ExternalReference is_tail_call_elimination_enabled = | |
2360 ExternalReference::is_tail_call_elimination_enabled_address( | |
2361 masm->isolate()); | |
2362 __ mov(scratch1, Operand(is_tail_call_elimination_enabled)); | |
2363 __ lbz(scratch1, MemOperand(scratch1)); | |
2364 __ cmpi(scratch1, Operand::Zero()); | |
2365 __ beq(&done); | |
2366 | |
2367 // Drop possible interpreter handler/stub frame. | |
2368 { | |
2369 Label no_interpreter_frame; | |
2370 __ LoadP(scratch3, | |
2371 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2372 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0); | |
2373 __ bne(&no_interpreter_frame); | |
2374 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2375 __ bind(&no_interpreter_frame); | |
2376 } | |
2377 | |
2378 // Check if next frame is an arguments adaptor frame. | |
2379 Register caller_args_count_reg = scratch1; | |
2380 Label no_arguments_adaptor, formal_parameter_count_loaded; | |
2381 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2382 __ LoadP( | |
2383 scratch3, | |
2384 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2385 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | |
2386 __ bne(&no_arguments_adaptor); | |
2387 | |
2388 // Drop current frame and load arguments count from arguments adaptor frame. | |
2389 __ mr(fp, scratch2); | |
2390 __ LoadP(caller_args_count_reg, | |
2391 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
2392 __ SmiUntag(caller_args_count_reg); | |
2393 __ b(&formal_parameter_count_loaded); | |
2394 | |
2395 __ bind(&no_arguments_adaptor); | |
2396 // Load caller's formal parameter count | |
2397 __ LoadP(scratch1, | |
2398 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); | |
2399 __ LoadP(scratch1, | |
2400 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | |
2401 __ LoadWordArith( | |
2402 caller_args_count_reg, | |
2403 FieldMemOperand(scratch1, | |
2404 SharedFunctionInfo::kFormalParameterCountOffset)); | |
2405 #if !V8_TARGET_ARCH_PPC64 | |
2406 __ SmiUntag(caller_args_count_reg); | |
2407 #endif | |
2408 | |
2409 __ bind(&formal_parameter_count_loaded); | |
2410 | |
2411 ParameterCount callee_args_count(args_reg); | |
2412 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | |
2413 scratch3); | |
2414 __ bind(&done); | |
2415 } | |
2416 } // namespace | |
2417 | |
2418 // static | |
2419 void Builtins::Generate_CallFunction(MacroAssembler* masm, | |
2420 ConvertReceiverMode mode, | |
2421 TailCallMode tail_call_mode) { | |
2422 // ----------- S t a t e ------------- | |
2423 // -- r3 : the number of arguments (not including the receiver) | |
2424 // -- r4 : the function to call (checked to be a JSFunction) | |
2425 // ----------------------------------- | |
2426 __ AssertFunction(r4); | |
2427 | |
2428 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
2429 // Check that the function is not a "classConstructor". | |
2430 Label class_constructor; | |
2431 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
2432 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); | |
2433 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0); | |
2434 __ bne(&class_constructor, cr0); | |
2435 | |
2436 // Enter the context of the function; ToObject has to run in the function | |
2437 // context, and we also need to take the global proxy from the function | |
2438 // context in case of conversion. | |
2439 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); | |
2440 // We need to convert the receiver for non-native sloppy mode functions. | |
2441 Label done_convert; | |
2442 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) | | |
2443 (1 << SharedFunctionInfo::kNativeBit))); | |
2444 __ bne(&done_convert, cr0); | |
2445 { | |
2446 // ----------- S t a t e ------------- | |
2447 // -- r3 : the number of arguments (not including the receiver) | |
2448 // -- r4 : the function to call (checked to be a JSFunction) | |
2449 // -- r5 : the shared function info. | |
2450 // -- cp : the function context. | |
2451 // ----------------------------------- | |
2452 | |
2453 if (mode == ConvertReceiverMode::kNullOrUndefined) { | |
2454 // Patch receiver to global proxy. | |
2455 __ LoadGlobalProxy(r6); | |
2456 } else { | |
2457 Label convert_to_object, convert_receiver; | |
2458 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2)); | |
2459 __ LoadPX(r6, MemOperand(sp, r6)); | |
2460 __ JumpIfSmi(r6, &convert_to_object); | |
2461 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
2462 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE); | |
2463 __ bge(&done_convert); | |
2464 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | |
2465 Label convert_global_proxy; | |
2466 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex, | |
2467 &convert_global_proxy); | |
2468 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object); | |
2469 __ bind(&convert_global_proxy); | |
2470 { | |
2471 // Patch receiver to global proxy. | |
2472 __ LoadGlobalProxy(r6); | |
2473 } | |
2474 __ b(&convert_receiver); | |
2475 } | |
2476 __ bind(&convert_to_object); | |
2477 { | |
2478 // Convert receiver using ToObject. | |
2479 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
2480 // in the fast case? (fall back to AllocateInNewSpace?) | |
2481 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
2482 __ SmiTag(r3); | |
2483 __ Push(r3, r4); | |
2484 __ mr(r3, r6); | |
2485 ToObjectStub stub(masm->isolate()); | |
2486 __ CallStub(&stub); | |
2487 __ mr(r6, r3); | |
2488 __ Pop(r3, r4); | |
2489 __ SmiUntag(r3); | |
2490 } | |
2491 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
2492 __ bind(&convert_receiver); | |
2493 } | |
2494 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2)); | |
2495 __ StorePX(r6, MemOperand(sp, r7)); | |
2496 } | |
2497 __ bind(&done_convert); | |
2498 | |
2499 // ----------- S t a t e ------------- | |
2500 // -- r3 : the number of arguments (not including the receiver) | |
2501 // -- r4 : the function to call (checked to be a JSFunction) | |
2502 // -- r5 : the shared function info. | |
2503 // -- cp : the function context. | |
2504 // ----------------------------------- | |
2505 | |
2506 if (tail_call_mode == TailCallMode::kAllow) { | |
2507 PrepareForTailCall(masm, r3, r6, r7, r8); | |
2508 } | |
2509 | |
2510 __ LoadWordArith( | |
2511 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); | |
2512 #if !V8_TARGET_ARCH_PPC64 | |
2513 __ SmiUntag(r5); | |
2514 #endif | |
2515 ParameterCount actual(r3); | |
2516 ParameterCount expected(r5); | |
2517 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION, | |
2518 CheckDebugStepCallWrapper()); | |
2519 | |
2520 // The function is a "classConstructor", need to raise an exception. | |
2521 __ bind(&class_constructor); | |
2522 { | |
2523 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); | |
2524 __ push(r4); | |
2525 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | |
2526 } | |
2527 } | |
2528 | |
2529 | |
2530 namespace { | |
2531 | |
2532 void Generate_PushBoundArguments(MacroAssembler* masm) { | |
2533 // ----------- S t a t e ------------- | |
2534 // -- r3 : the number of arguments (not including the receiver) | |
2535 // -- r4 : target (checked to be a JSBoundFunction) | |
2536 // -- r6 : new.target (only in case of [[Construct]]) | |
2537 // ----------------------------------- | |
2538 | |
2539 // Load [[BoundArguments]] into r5 and length of that into r7. | |
2540 Label no_bound_arguments; | |
2541 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset)); | |
2542 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset)); | |
2543 __ SmiUntag(r7, SetRC); | |
2544 __ beq(&no_bound_arguments, cr0); | |
2545 { | |
2546 // ----------- S t a t e ------------- | |
2547 // -- r3 : the number of arguments (not including the receiver) | |
2548 // -- r4 : target (checked to be a JSBoundFunction) | |
2549 // -- r5 : the [[BoundArguments]] (implemented as FixedArray) | |
2550 // -- r6 : new.target (only in case of [[Construct]]) | |
2551 // -- r7 : the number of [[BoundArguments]] | |
2552 // ----------------------------------- | |
2553 | |
2554 // Reserve stack space for the [[BoundArguments]]. | |
2555 { | |
2556 Label done; | |
2557 __ mr(r9, sp); // preserve previous stack pointer | |
2558 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2)); | |
2559 __ sub(sp, sp, r10); | |
2560 // Check the stack for overflow. We are not trying to catch interruptions | |
2561 // (i.e. debug break and preemption) here, so check the "real stack | |
2562 // limit". | |
2563 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); | |
2564 __ bgt(&done); // Signed comparison. | |
2565 // Restore the stack pointer. | |
2566 __ mr(sp, r9); | |
2567 { | |
2568 FrameScope scope(masm, StackFrame::MANUAL); | |
2569 __ EnterFrame(StackFrame::INTERNAL); | |
2570 __ CallRuntime(Runtime::kThrowStackOverflow); | |
2571 } | |
2572 __ bind(&done); | |
2573 } | |
2574 | |
2575 // Relocate arguments down the stack. | |
2576 // -- r3 : the number of arguments (not including the receiver) | |
2577 // -- r9 : the previous stack pointer | |
2578 // -- r10: the size of the [[BoundArguments]] | |
2579 { | |
2580 Label skip, loop; | |
2581 __ li(r8, Operand::Zero()); | |
2582 __ cmpi(r3, Operand::Zero()); | |
2583 __ beq(&skip); | |
2584 __ mtctr(r3); | |
2585 __ bind(&loop); | |
2586 __ LoadPX(r0, MemOperand(r9, r8)); | |
2587 __ StorePX(r0, MemOperand(sp, r8)); | |
2588 __ addi(r8, r8, Operand(kPointerSize)); | |
2589 __ bdnz(&loop); | |
2590 __ bind(&skip); | |
2591 } | |
2592 | |
2593 // Copy [[BoundArguments]] to the stack (below the arguments). | |
2594 { | |
2595 Label loop; | |
2596 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2597 __ add(r5, r5, r10); | |
2598 __ mtctr(r7); | |
2599 __ bind(&loop); | |
2600 __ LoadPU(r0, MemOperand(r5, -kPointerSize)); | |
2601 __ StorePX(r0, MemOperand(sp, r8)); | |
2602 __ addi(r8, r8, Operand(kPointerSize)); | |
2603 __ bdnz(&loop); | |
2604 __ add(r3, r3, r7); | |
2605 } | |
2606 } | |
2607 __ bind(&no_bound_arguments); | |
2608 } | |
2609 | |
2610 } // namespace | |
2611 | |
2612 | |
2613 // static | |
2614 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | |
2615 TailCallMode tail_call_mode) { | |
2616 // ----------- S t a t e ------------- | |
2617 // -- r3 : the number of arguments (not including the receiver) | |
2618 // -- r4 : the function to call (checked to be a JSBoundFunction) | |
2619 // ----------------------------------- | |
2620 __ AssertBoundFunction(r4); | |
2621 | |
2622 if (tail_call_mode == TailCallMode::kAllow) { | |
2623 PrepareForTailCall(masm, r3, r6, r7, r8); | |
2624 } | |
2625 | |
2626 // Patch the receiver to [[BoundThis]]. | |
2627 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset)); | |
2628 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); | |
2629 __ StorePX(ip, MemOperand(sp, r0)); | |
2630 | |
2631 // Push the [[BoundArguments]] onto the stack. | |
2632 Generate_PushBoundArguments(masm); | |
2633 | |
2634 // Call the [[BoundTargetFunction]] via the Call builtin. | |
2635 __ LoadP(r4, | |
2636 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2637 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | |
2638 masm->isolate()))); | |
2639 __ LoadP(ip, MemOperand(ip)); | |
2640 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2641 __ JumpToJSEntry(ip); | |
2642 } | |
2643 | |
2644 | |
2645 // static | |
2646 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | |
2647 TailCallMode tail_call_mode) { | |
2648 // ----------- S t a t e ------------- | |
2649 // -- r3 : the number of arguments (not including the receiver) | |
2650 // -- r4 : the target to call (can be any Object). | |
2651 // ----------------------------------- | |
2652 | |
2653 Label non_callable, non_function, non_smi; | |
2654 __ JumpIfSmi(r4, &non_callable); | |
2655 __ bind(&non_smi); | |
2656 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); | |
2657 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | |
2658 RelocInfo::CODE_TARGET, eq); | |
2659 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2660 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | |
2661 RelocInfo::CODE_TARGET, eq); | |
2662 | |
2663 // Check if target has a [[Call]] internal method. | |
2664 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
2665 __ TestBit(r7, Map::kIsCallable, r0); | |
2666 __ beq(&non_callable, cr0); | |
2667 | |
2668 __ cmpi(r8, Operand(JS_PROXY_TYPE)); | |
2669 __ bne(&non_function); | |
2670 | |
2671 // 0. Prepare for tail call if necessary. | |
2672 if (tail_call_mode == TailCallMode::kAllow) { | |
2673 PrepareForTailCall(masm, r3, r6, r7, r8); | |
2674 } | |
2675 | |
2676 // 1. Runtime fallback for Proxy [[Call]]. | |
2677 __ Push(r4); | |
2678 // Increase the arguments size to include the pushed function and the | |
2679 // existing receiver on the stack. | |
2680 __ addi(r3, r3, Operand(2)); | |
2681 // Tail-call to the runtime. | |
2682 __ JumpToExternalReference( | |
2683 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | |
2684 | |
2685 // 2. Call to something else, which might have a [[Call]] internal method (if | |
2686 // not we raise an exception). | |
2687 __ bind(&non_function); | |
2688 // Overwrite the original receiver the (original) target. | |
2689 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); | |
2690 __ StorePX(r4, MemOperand(sp, r8)); | |
2691 // Let the "call_as_function_delegate" take care of the rest. | |
2692 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4); | |
2693 __ Jump(masm->isolate()->builtins()->CallFunction( | |
2694 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | |
2695 RelocInfo::CODE_TARGET); | |
2696 | |
2697 // 3. Call to something that is not callable. | |
2698 __ bind(&non_callable); | |
2699 { | |
2700 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
2701 __ Push(r4); | |
2702 __ CallRuntime(Runtime::kThrowCalledNonCallable); | |
2703 } | |
2704 } | |
2705 | |
2706 | |
2707 // static | |
2708 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | |
2709 // ----------- S t a t e ------------- | |
2710 // -- r3 : the number of arguments (not including the receiver) | |
2711 // -- r4 : the constructor to call (checked to be a JSFunction) | |
2712 // -- r6 : the new target (checked to be a constructor) | |
2713 // ----------------------------------- | |
2714 __ AssertFunction(r4); | |
2715 | |
2716 // Calling convention for function specific ConstructStubs require | |
2717 // r5 to contain either an AllocationSite or undefined. | |
2718 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
2719 | |
2720 // Tail call to the function-specific construct stub (still in the caller | |
2721 // context at this point). | |
2722 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
2723 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); | |
2724 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2725 __ JumpToJSEntry(ip); | |
2726 } | |
2727 | |
2728 | |
2729 // static | |
2730 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | |
2731 // ----------- S t a t e ------------- | |
2732 // -- r3 : the number of arguments (not including the receiver) | |
2733 // -- r4 : the function to call (checked to be a JSBoundFunction) | |
2734 // -- r6 : the new target (checked to be a constructor) | |
2735 // ----------------------------------- | |
2736 __ AssertBoundFunction(r4); | |
2737 | |
2738 // Push the [[BoundArguments]] onto the stack. | |
2739 Generate_PushBoundArguments(masm); | |
2740 | |
2741 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | |
2742 Label skip; | |
2743 __ cmp(r4, r6); | |
2744 __ bne(&skip); | |
2745 __ LoadP(r6, | |
2746 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2747 __ bind(&skip); | |
2748 | |
2749 // Construct the [[BoundTargetFunction]] via the Construct builtin. | |
2750 __ LoadP(r4, | |
2751 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2752 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | |
2753 __ LoadP(ip, MemOperand(ip)); | |
2754 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2755 __ JumpToJSEntry(ip); | |
2756 } | |
2757 | |
2758 | |
2759 // static | |
2760 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | |
2761 // ----------- S t a t e ------------- | |
2762 // -- r3 : the number of arguments (not including the receiver) | |
2763 // -- r4 : the constructor to call (checked to be a JSProxy) | |
2764 // -- r6 : the new target (either the same as the constructor or | |
2765 // the JSFunction on which new was invoked initially) | |
2766 // ----------------------------------- | |
2767 | |
2768 // Call into the Runtime for Proxy [[Construct]]. | |
2769 __ Push(r4, r6); | |
2770 // Include the pushed new_target, constructor and the receiver. | |
2771 __ addi(r3, r3, Operand(3)); | |
2772 // Tail-call to the runtime. | |
2773 __ JumpToExternalReference( | |
2774 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | |
2775 } | |
2776 | |
2777 | |
2778 // static | |
2779 void Builtins::Generate_Construct(MacroAssembler* masm) { | |
2780 // ----------- S t a t e ------------- | |
2781 // -- r3 : the number of arguments (not including the receiver) | |
2782 // -- r4 : the constructor to call (can be any Object) | |
2783 // -- r6 : the new target (either the same as the constructor or | |
2784 // the JSFunction on which new was invoked initially) | |
2785 // ----------------------------------- | |
2786 | |
2787 // Check if target is a Smi. | |
2788 Label non_constructor; | |
2789 __ JumpIfSmi(r4, &non_constructor); | |
2790 | |
2791 // Dispatch based on instance type. | |
2792 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); | |
2793 __ Jump(masm->isolate()->builtins()->ConstructFunction(), | |
2794 RelocInfo::CODE_TARGET, eq); | |
2795 | |
2796 // Check if target has a [[Construct]] internal method. | |
2797 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset)); | |
2798 __ TestBit(r5, Map::kIsConstructor, r0); | |
2799 __ beq(&non_constructor, cr0); | |
2800 | |
2801 // Only dispatch to bound functions after checking whether they are | |
2802 // constructors. | |
2803 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2804 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), | |
2805 RelocInfo::CODE_TARGET, eq); | |
2806 | |
2807 // Only dispatch to proxies after checking whether they are constructors. | |
2808 __ cmpi(r8, Operand(JS_PROXY_TYPE)); | |
2809 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, | |
2810 eq); | |
2811 | |
2812 // Called Construct on an exotic Object with a [[Construct]] internal method. | |
2813 { | |
2814 // Overwrite the original receiver with the (original) target. | |
2815 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); | |
2816 __ StorePX(r4, MemOperand(sp, r8)); | |
2817 // Let the "call_as_constructor_delegate" take care of the rest. | |
2818 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4); | |
2819 __ Jump(masm->isolate()->builtins()->CallFunction(), | |
2820 RelocInfo::CODE_TARGET); | |
2821 } | |
2822 | |
2823 // Called Construct on an Object that doesn't have a [[Construct]] internal | |
2824 // method. | |
2825 __ bind(&non_constructor); | |
2826 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | |
2827 RelocInfo::CODE_TARGET); | |
2828 } | |
2829 | |
2830 // static | |
2831 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | |
2832 // ----------- S t a t e ------------- | |
2833 // -- r4 : requested object size (untagged) | |
2834 // -- lr : return address | |
2835 // ----------------------------------- | |
2836 __ SmiTag(r4); | |
2837 __ Push(r4); | |
2838 __ LoadSmiLiteral(cp, Smi::FromInt(0)); | |
2839 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | |
2840 } | |
2841 | |
2842 // static | |
2843 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { | |
2844 // ----------- S t a t e ------------- | |
2845 // -- r4 : requested object size (untagged) | |
2846 // -- lr : return address | |
2847 // ----------------------------------- | |
2848 __ SmiTag(r4); | |
2849 __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); | |
2850 __ Push(r4, r5); | |
2851 __ LoadSmiLiteral(cp, Smi::FromInt(0)); | |
2852 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); | |
2853 } | |
2854 | |
2855 // static | |
2856 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { | |
2857 // The StringToNumber stub takes one argument in r3. | |
2858 __ AssertString(r3); | |
2859 | |
2860 // Check if string has a cached array index. | |
2861 Label runtime; | |
2862 __ lwz(r5, FieldMemOperand(r3, String::kHashFieldOffset)); | |
2863 __ And(r0, r5, Operand(String::kContainsCachedArrayIndexMask), SetRC); | |
2864 __ bne(&runtime, cr0); | |
2865 __ IndexFromHash(r5, r3); | |
2866 __ blr(); | |
2867 | |
2868 __ bind(&runtime); | |
2869 { | |
2870 FrameScope frame(masm, StackFrame::INTERNAL); | |
2871 // Push argument. | |
2872 __ push(r3); | |
2873 // We cannot use a tail call here because this builtin can also be called | |
2874 // from wasm. | |
2875 __ CallRuntime(Runtime::kStringToNumber); | |
2876 } | |
2877 __ Ret(); | |
2878 } | |
2879 | |
2880 // static | |
2881 void Builtins::Generate_ToNumber(MacroAssembler* masm) { | |
2882 // The ToNumber stub takes one argument in r3. | |
2883 STATIC_ASSERT(kSmiTag == 0); | |
2884 __ TestIfSmi(r3, r0); | |
2885 __ Ret(eq, cr0); | |
2886 | |
2887 __ CompareObjectType(r3, r4, r4, HEAP_NUMBER_TYPE); | |
2888 // r3: receiver | |
2889 // r4: receiver instance type | |
2890 __ Ret(eq); | |
2891 | |
2892 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), | |
2893 RelocInfo::CODE_TARGET); | |
2894 } | |
2895 | |
2896 // static | |
2897 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { | |
2898 // The NonNumberToNumber stub takes one argument in r3. | |
2899 __ AssertNotNumber(r3); | |
2900 | |
2901 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE); | |
2902 // r3: receiver | |
2903 // r4: receiver instance type | |
2904 __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET, | |
2905 lt); | |
2906 | |
2907 Label not_oddball; | |
2908 __ cmpi(r4, Operand(ODDBALL_TYPE)); | |
2909 __ bne(¬_oddball); | |
2910 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); | |
2911 __ blr(); | |
2912 __ bind(¬_oddball); | |
2913 | |
2914 { | |
2915 FrameScope frame(masm, StackFrame::INTERNAL); | |
2916 // Push argument. | |
2917 __ push(r3); | |
2918 // We cannot use a tail call here because this builtin can also be called | |
2919 // from wasm. | |
2920 __ CallRuntime(Runtime::kToNumber); | |
2921 } | |
2922 __ Ret(); | |
2923 } | |
2924 | |
2925 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
2926 // ----------- S t a t e ------------- | |
2927 // -- r3 : actual number of arguments | |
2928 // -- r4 : function (passed through to callee) | |
2929 // -- r5 : expected number of arguments | |
2930 // -- r6 : new target (passed through to callee) | |
2931 // ----------------------------------- | |
2932 | |
2933 Label invoke, dont_adapt_arguments, stack_overflow; | |
2934 | |
2935 Label enough, too_few; | |
2936 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); | |
2937 __ cmp(r3, r5); | |
2938 __ blt(&too_few); | |
2939 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | |
2940 __ beq(&dont_adapt_arguments); | |
2941 | |
2942 { // Enough parameters: actual >= expected | |
2943 __ bind(&enough); | |
2944 EnterArgumentsAdaptorFrame(masm); | |
2945 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2946 | |
2947 // Calculate copy start address into r3 and copy end address into r7. | |
2948 // r3: actual number of arguments as a smi | |
2949 // r4: function | |
2950 // r5: expected number of arguments | |
2951 // r6: new target (passed through to callee) | |
2952 // ip: code entry to call | |
2953 __ SmiToPtrArrayOffset(r3, r3); | |
2954 __ add(r3, r3, fp); | |
2955 // adjust for return address and receiver | |
2956 __ addi(r3, r3, Operand(2 * kPointerSize)); | |
2957 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); | |
2958 __ sub(r7, r3, r7); | |
2959 | |
2960 // Copy the arguments (including the receiver) to the new stack frame. | |
2961 // r3: copy start address | |
2962 // r4: function | |
2963 // r5: expected number of arguments | |
2964 // r6: new target (passed through to callee) | |
2965 // r7: copy end address | |
2966 // ip: code entry to call | |
2967 | |
2968 Label copy; | |
2969 __ bind(©); | |
2970 __ LoadP(r0, MemOperand(r3, 0)); | |
2971 __ push(r0); | |
2972 __ cmp(r3, r7); // Compare before moving to next argument. | |
2973 __ subi(r3, r3, Operand(kPointerSize)); | |
2974 __ bne(©); | |
2975 | |
2976 __ b(&invoke); | |
2977 } | |
2978 | |
2979 { // Too few parameters: Actual < expected | |
2980 __ bind(&too_few); | |
2981 | |
2982 EnterArgumentsAdaptorFrame(masm); | |
2983 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2984 | |
2985 // Calculate copy start address into r0 and copy end address is fp. | |
2986 // r3: actual number of arguments as a smi | |
2987 // r4: function | |
2988 // r5: expected number of arguments | |
2989 // r6: new target (passed through to callee) | |
2990 // ip: code entry to call | |
2991 __ SmiToPtrArrayOffset(r3, r3); | |
2992 __ add(r3, r3, fp); | |
2993 | |
2994 // Copy the arguments (including the receiver) to the new stack frame. | |
2995 // r3: copy start address | |
2996 // r4: function | |
2997 // r5: expected number of arguments | |
2998 // r6: new target (passed through to callee) | |
2999 // ip: code entry to call | |
3000 Label copy; | |
3001 __ bind(©); | |
3002 // Adjust load for return address and receiver. | |
3003 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize)); | |
3004 __ push(r0); | |
3005 __ cmp(r3, fp); // Compare before moving to next argument. | |
3006 __ subi(r3, r3, Operand(kPointerSize)); | |
3007 __ bne(©); | |
3008 | |
3009 // Fill the remaining expected arguments with undefined. | |
3010 // r4: function | |
3011 // r5: expected number of arguments | |
3012 // r6: new target (passed through to callee) | |
3013 // ip: code entry to call | |
3014 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
3015 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); | |
3016 __ sub(r7, fp, r7); | |
3017 // Adjust for frame. | |
3018 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
3019 2 * kPointerSize)); | |
3020 | |
3021 Label fill; | |
3022 __ bind(&fill); | |
3023 __ push(r0); | |
3024 __ cmp(sp, r7); | |
3025 __ bne(&fill); | |
3026 } | |
3027 | |
3028 // Call the entry point. | |
3029 __ bind(&invoke); | |
3030 __ mr(r3, r5); | |
3031 // r3 : expected number of arguments | |
3032 // r4 : function (passed through to callee) | |
3033 // r6 : new target (passed through to callee) | |
3034 __ CallJSEntry(ip); | |
3035 | |
3036 // Store offset of return address for deoptimizer. | |
3037 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
3038 | |
3039 // Exit frame and return. | |
3040 LeaveArgumentsAdaptorFrame(masm); | |
3041 __ blr(); | |
3042 | |
3043 | |
3044 // ------------------------------------------- | |
3045 // Dont adapt arguments. | |
3046 // ------------------------------------------- | |
3047 __ bind(&dont_adapt_arguments); | |
3048 __ JumpToJSEntry(ip); | |
3049 | |
3050 __ bind(&stack_overflow); | |
3051 { | |
3052 FrameScope frame(masm, StackFrame::MANUAL); | |
3053 __ CallRuntime(Runtime::kThrowStackOverflow); | |
3054 __ bkpt(0); | |
3055 } | |
3056 } | |
3057 | |
3058 | |
3059 #undef __ | |
3060 } // namespace internal | |
3061 } // namespace v8 | |
3062 | |
3063 #endif // V8_TARGET_ARCH_PPC | |
OLD | NEW |