Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(410)

Side by Side Diff: src/arm/builtins-arm.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « BUILD.gn ('k') | src/arm64/builtins-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM
6
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12
13 namespace v8 {
14 namespace internal {
15
16
17 #define __ ACCESS_MASM(masm)
18
19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
20 ExitFrameType exit_frame_type) {
21 // ----------- S t a t e -------------
22 // -- r0 : number of arguments excluding receiver
23 // -- r1 : target
24 // -- r3 : new.target
25 // -- sp[0] : last argument
26 // -- ...
27 // -- sp[4 * (argc - 1)] : first argument
28 // -- sp[4 * argc] : receiver
29 // -----------------------------------
30 __ AssertFunction(r1);
31
32 // Make sure we operate in the context of the called function (for example
33 // ConstructStubs implemented in C++ will be run in the context of the caller
34 // instead of the callee, due to the way that [[Construct]] is defined for
35 // ordinary functions).
36 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
37
38 // JumpToExternalReference expects r0 to contain the number of arguments
39 // including the receiver and the extra arguments.
40 const int num_extra_args = 3;
41 __ add(r0, r0, Operand(num_extra_args + 1));
42
43 // Insert extra arguments.
44 __ SmiTag(r0);
45 __ Push(r0, r1, r3);
46 __ SmiUntag(r0);
47
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
49 exit_frame_type == BUILTIN_EXIT);
50 }
51
52
53 // Load the built-in InternalArray function from the current context.
54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
55 Register result) {
56 // Load the InternalArray function from the current native context.
57 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
58 }
59
60
61 // Load the built-in Array function from the current context.
62 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
63 // Load the Array function from the current native context.
64 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
65 }
66
67
68 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
69 // ----------- S t a t e -------------
70 // -- r0 : number of arguments
71 // -- lr : return address
72 // -- sp[...]: constructor arguments
73 // -----------------------------------
74 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
75
76 // Get the InternalArray function.
77 GenerateLoadInternalArrayFunction(masm, r1);
78
79 if (FLAG_debug_code) {
80 // Initial map for the builtin InternalArray functions should be maps.
81 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
82 __ SmiTst(r2);
83 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
84 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
85 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
86 }
87
88 // Run the native code for the InternalArray function called as a normal
89 // function.
90 // tail call a stub
91 InternalArrayConstructorStub stub(masm->isolate());
92 __ TailCallStub(&stub);
93 }
94
95
96 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
97 // ----------- S t a t e -------------
98 // -- r0 : number of arguments
99 // -- lr : return address
100 // -- sp[...]: constructor arguments
101 // -----------------------------------
102 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
103
104 // Get the Array function.
105 GenerateLoadArrayFunction(masm, r1);
106
107 if (FLAG_debug_code) {
108 // Initial map for the builtin Array functions should be maps.
109 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
110 __ SmiTst(r2);
111 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
112 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
113 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
114 }
115
116 __ mov(r3, r1);
117 // Run the native code for the Array function called as a normal function.
118 // tail call a stub
119 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
120 ArrayConstructorStub stub(masm->isolate());
121 __ TailCallStub(&stub);
122 }
123
124
125 // static
126 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
127 // ----------- S t a t e -------------
128 // -- r0 : number of arguments
129 // -- r1 : function
130 // -- cp : context
131 // -- lr : return address
132 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
133 // -- sp[argc * 4] : receiver
134 // -----------------------------------
135 Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
136 Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
137 Heap::RootListIndex const root_index =
138 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
139 : Heap::kMinusInfinityValueRootIndex;
140 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
141
142 // Load the accumulator with the default return value (either -Infinity or
143 // +Infinity), with the tagged value in r5 and the double value in d1.
144 __ LoadRoot(r5, root_index);
145 __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
146
147 Label done_loop, loop;
148 __ mov(r4, r0);
149 __ bind(&loop);
150 {
151 // Check if all parameters done.
152 __ sub(r4, r4, Operand(1), SetCC);
153 __ b(lt, &done_loop);
154
155 // Load the next parameter tagged value into r2.
156 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2));
157
158 // Load the double value of the parameter into d2, maybe converting the
159 // parameter to a number first using the ToNumber builtin if necessary.
160 Label convert, convert_smi, convert_number, done_convert;
161 __ bind(&convert);
162 __ JumpIfSmi(r2, &convert_smi);
163 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
164 __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
165 {
166 // Parameter is not a Number, use the ToNumber builtin to convert it.
167 DCHECK(!FLAG_enable_embedded_constant_pool);
168 FrameScope scope(masm, StackFrame::MANUAL);
169 __ SmiTag(r0);
170 __ SmiTag(r4);
171 __ EnterBuiltinFrame(cp, r1, r0);
172 __ Push(r4, r5);
173 __ mov(r0, r2);
174 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
175 __ mov(r2, r0);
176 __ Pop(r4, r5);
177 __ LeaveBuiltinFrame(cp, r1, r0);
178 __ SmiUntag(r4);
179 __ SmiUntag(r0);
180 {
181 // Restore the double accumulator value (d1).
182 Label done_restore;
183 __ SmiToDouble(d1, r5);
184 __ JumpIfSmi(r5, &done_restore);
185 __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
186 __ bind(&done_restore);
187 }
188 }
189 __ b(&convert);
190 __ bind(&convert_number);
191 __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
192 __ b(&done_convert);
193 __ bind(&convert_smi);
194 __ SmiToDouble(d2, r2);
195 __ bind(&done_convert);
196
197 // Perform the actual comparison with the accumulator value on the left hand
198 // side (d1) and the next parameter value on the right hand side (d2).
199 Label compare_nan, compare_swap;
200 __ VFPCompareAndSetFlags(d1, d2);
201 __ b(cc_done, &loop);
202 __ b(cc_swap, &compare_swap);
203 __ b(vs, &compare_nan);
204
205 // Left and right hand side are equal, check for -0 vs. +0.
206 __ VmovHigh(ip, reg);
207 __ cmp(ip, Operand(0x80000000));
208 __ b(ne, &loop);
209
210 // Result is on the right hand side.
211 __ bind(&compare_swap);
212 __ vmov(d1, d2);
213 __ mov(r5, r2);
214 __ b(&loop);
215
216 // At least one side is NaN, which means that the result will be NaN too.
217 __ bind(&compare_nan);
218 __ LoadRoot(r5, Heap::kNanValueRootIndex);
219 __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
220 __ b(&loop);
221 }
222
223 __ bind(&done_loop);
224 // Drop all slots, including the receiver.
225 __ add(r0, r0, Operand(1));
226 __ Drop(r0);
227 __ mov(r0, r5);
228 __ Ret();
229 }
230
231 // static
232 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
233 // ----------- S t a t e -------------
234 // -- r0 : number of arguments
235 // -- r1 : constructor function
236 // -- cp : context
237 // -- lr : return address
238 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
239 // -- sp[argc * 4] : receiver
240 // -----------------------------------
241
242 // 1. Load the first argument into r0.
243 Label no_arguments;
244 {
245 __ mov(r2, r0); // Store argc in r2.
246 __ sub(r0, r0, Operand(1), SetCC);
247 __ b(lo, &no_arguments);
248 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
249 }
250
251 // 2a. Convert the first argument to a number.
252 {
253 FrameScope scope(masm, StackFrame::MANUAL);
254 __ SmiTag(r2);
255 __ EnterBuiltinFrame(cp, r1, r2);
256 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
257 __ LeaveBuiltinFrame(cp, r1, r2);
258 __ SmiUntag(r2);
259 }
260
261 {
262 // Drop all arguments including the receiver.
263 __ Drop(r2);
264 __ Ret(1);
265 }
266
267 // 2b. No arguments, return +0.
268 __ bind(&no_arguments);
269 __ Move(r0, Smi::FromInt(0));
270 __ Ret(1);
271 }
272
273
274 // static
275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276 // ----------- S t a t e -------------
277 // -- r0 : number of arguments
278 // -- r1 : constructor function
279 // -- r3 : new target
280 // -- cp : context
281 // -- lr : return address
282 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
283 // -- sp[argc * 4] : receiver
284 // -----------------------------------
285
286 // 1. Make sure we operate in the context of the called function.
287 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
288
289 // 2. Load the first argument into r2.
290 {
291 Label no_arguments, done;
292 __ mov(r6, r0); // Store argc in r6.
293 __ sub(r0, r0, Operand(1), SetCC);
294 __ b(lo, &no_arguments);
295 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
296 __ b(&done);
297 __ bind(&no_arguments);
298 __ Move(r2, Smi::FromInt(0));
299 __ bind(&done);
300 }
301
302 // 3. Make sure r2 is a number.
303 {
304 Label done_convert;
305 __ JumpIfSmi(r2, &done_convert);
306 __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
307 __ b(eq, &done_convert);
308 {
309 FrameScope scope(masm, StackFrame::MANUAL);
310 __ SmiTag(r6);
311 __ EnterBuiltinFrame(cp, r1, r6);
312 __ Push(r3);
313 __ Move(r0, r2);
314 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
315 __ Move(r2, r0);
316 __ Pop(r3);
317 __ LeaveBuiltinFrame(cp, r1, r6);
318 __ SmiUntag(r6);
319 }
320 __ bind(&done_convert);
321 }
322
323 // 4. Check if new target and constructor differ.
324 Label drop_frame_and_ret, new_object;
325 __ cmp(r1, r3);
326 __ b(ne, &new_object);
327
328 // 5. Allocate a JSValue wrapper for the number.
329 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
330 __ b(&drop_frame_and_ret);
331
332 // 6. Fallback to the runtime to create new object.
333 __ bind(&new_object);
334 {
335 FrameScope scope(masm, StackFrame::MANUAL);
336 FastNewObjectStub stub(masm->isolate());
337 __ SmiTag(r6);
338 __ EnterBuiltinFrame(cp, r1, r6);
339 __ Push(r2); // first argument
340 __ CallStub(&stub);
341 __ Pop(r2);
342 __ LeaveBuiltinFrame(cp, r1, r6);
343 __ SmiUntag(r6);
344 }
345 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
346
347 __ bind(&drop_frame_and_ret);
348 {
349 __ Drop(r6);
350 __ Ret(1);
351 }
352 }
353
354
355 // static
356 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
357 // ----------- S t a t e -------------
358 // -- r0 : number of arguments
359 // -- r1 : constructor function
360 // -- cp : context
361 // -- lr : return address
362 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
363 // -- sp[argc * 4] : receiver
364 // -----------------------------------
365
366 // 1. Load the first argument into r0.
367 Label no_arguments;
368 {
369 __ mov(r2, r0); // Store argc in r2.
370 __ sub(r0, r0, Operand(1), SetCC);
371 __ b(lo, &no_arguments);
372 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
373 }
374
375 // 2a. At least one argument, return r0 if it's a string, otherwise
376 // dispatch to appropriate conversion.
377 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
378 {
379 __ JumpIfSmi(r0, &to_string);
380 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
381 __ CompareObjectType(r0, r3, r3, FIRST_NONSTRING_TYPE);
382 __ b(hi, &to_string);
383 __ b(eq, &symbol_descriptive_string);
384 __ b(&drop_frame_and_ret);
385 }
386
387 // 2b. No arguments, return the empty string (and pop the receiver).
388 __ bind(&no_arguments);
389 {
390 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
391 __ Ret(1);
392 }
393
394 // 3a. Convert r0 to a string.
395 __ bind(&to_string);
396 {
397 FrameScope scope(masm, StackFrame::MANUAL);
398 ToStringStub stub(masm->isolate());
399 __ SmiTag(r2);
400 __ EnterBuiltinFrame(cp, r1, r2);
401 __ CallStub(&stub);
402 __ LeaveBuiltinFrame(cp, r1, r2);
403 __ SmiUntag(r2);
404 }
405 __ b(&drop_frame_and_ret);
406
407 // 3b. Convert symbol in r0 to a string.
408 __ bind(&symbol_descriptive_string);
409 {
410 __ Drop(r2);
411 __ Drop(1);
412 __ Push(r0);
413 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
414 }
415
416 __ bind(&drop_frame_and_ret);
417 {
418 __ Drop(r2);
419 __ Ret(1);
420 }
421 }
422
423
424 // static
425 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
426 // ----------- S t a t e -------------
427 // -- r0 : number of arguments
428 // -- r1 : constructor function
429 // -- r3 : new target
430 // -- cp : context
431 // -- lr : return address
432 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
433 // -- sp[argc * 4] : receiver
434 // -----------------------------------
435
436 // 1. Make sure we operate in the context of the called function.
437 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
438
439 // 2. Load the first argument into r2.
440 {
441 Label no_arguments, done;
442 __ mov(r6, r0); // Store argc in r6.
443 __ sub(r0, r0, Operand(1), SetCC);
444 __ b(lo, &no_arguments);
445 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
446 __ b(&done);
447 __ bind(&no_arguments);
448 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
449 __ bind(&done);
450 }
451
452 // 3. Make sure r2 is a string.
453 {
454 Label convert, done_convert;
455 __ JumpIfSmi(r2, &convert);
456 __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
457 __ b(lo, &done_convert);
458 __ bind(&convert);
459 {
460 FrameScope scope(masm, StackFrame::MANUAL);
461 ToStringStub stub(masm->isolate());
462 __ SmiTag(r6);
463 __ EnterBuiltinFrame(cp, r1, r6);
464 __ Push(r3);
465 __ Move(r0, r2);
466 __ CallStub(&stub);
467 __ Move(r2, r0);
468 __ Pop(r3);
469 __ LeaveBuiltinFrame(cp, r1, r6);
470 __ SmiUntag(r6);
471 }
472 __ bind(&done_convert);
473 }
474
475 // 4. Check if new target and constructor differ.
476 Label drop_frame_and_ret, new_object;
477 __ cmp(r1, r3);
478 __ b(ne, &new_object);
479
480 // 5. Allocate a JSValue wrapper for the string.
481 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
482 __ b(&drop_frame_and_ret);
483
484 // 6. Fallback to the runtime to create new object.
485 __ bind(&new_object);
486 {
487 FrameScope scope(masm, StackFrame::MANUAL);
488 FastNewObjectStub stub(masm->isolate());
489 __ SmiTag(r6);
490 __ EnterBuiltinFrame(cp, r1, r6);
491 __ Push(r2); // first argument
492 __ CallStub(&stub);
493 __ Pop(r2);
494 __ LeaveBuiltinFrame(cp, r1, r6);
495 __ SmiUntag(r6);
496 }
497 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
498
499 __ bind(&drop_frame_and_ret);
500 {
501 __ Drop(r6);
502 __ Ret(1);
503 }
504 }
505
506
507 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
508 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
509 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
510 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
511 __ Jump(r2);
512 }
513
514 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
515 Runtime::FunctionId function_id) {
516 // ----------- S t a t e -------------
517 // -- r0 : argument count (preserved for callee)
518 // -- r1 : target function (preserved for callee)
519 // -- r3 : new target (preserved for callee)
520 // -----------------------------------
521 {
522 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
523 // Push the number of arguments to the callee.
524 __ SmiTag(r0);
525 __ push(r0);
526 // Push a copy of the target function and the new target.
527 __ push(r1);
528 __ push(r3);
529 // Push function as parameter to the runtime call.
530 __ Push(r1);
531
532 __ CallRuntime(function_id, 1);
533 __ mov(r2, r0);
534
535 // Restore target function and new target.
536 __ pop(r3);
537 __ pop(r1);
538 __ pop(r0);
539 __ SmiUntag(r0, r0);
540 }
541 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
542 __ Jump(r2);
543 }
544
545
546 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
547 // Checking whether the queued function is ready for install is optional,
548 // since we come across interrupts and stack checks elsewhere. However,
549 // not checking may delay installing ready functions, and always checking
550 // would be quite expensive. A good compromise is to first check against
551 // stack limit as a cue for an interrupt signal.
552 Label ok;
553 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
554 __ cmp(sp, Operand(ip));
555 __ b(hs, &ok);
556
557 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
558
559 __ bind(&ok);
560 GenerateTailCallToSharedCode(masm);
561 }
562
563
564 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
565 bool is_api_function,
566 bool create_implicit_receiver,
567 bool check_derived_construct) {
568 // ----------- S t a t e -------------
569 // -- r0 : number of arguments
570 // -- r1 : constructor function
571 // -- r2 : allocation site or undefined
572 // -- r3 : new target
573 // -- cp : context
574 // -- lr : return address
575 // -- sp[...]: constructor arguments
576 // -----------------------------------
577
578 Isolate* isolate = masm->isolate();
579
580 // Enter a construct frame.
581 {
582 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
583
584 // Preserve the incoming parameters on the stack.
585 __ AssertUndefinedOrAllocationSite(r2, r4);
586 __ Push(cp);
587 __ SmiTag(r0);
588 __ Push(r2, r0);
589
590 if (create_implicit_receiver) {
591 // Allocate the new receiver object.
592 __ Push(r1, r3);
593 FastNewObjectStub stub(masm->isolate());
594 __ CallStub(&stub);
595 __ mov(r4, r0);
596 __ Pop(r1, r3);
597
598 // ----------- S t a t e -------------
599 // -- r1: constructor function
600 // -- r3: new target
601 // -- r4: newly allocated object
602 // -----------------------------------
603
604 // Retrieve smi-tagged arguments count from the stack.
605 __ ldr(r0, MemOperand(sp));
606 }
607
608 __ SmiUntag(r0);
609
610 if (create_implicit_receiver) {
611 // Push the allocated receiver to the stack. We need two copies
612 // because we may have to return the original one and the calling
613 // conventions dictate that the called function pops the receiver.
614 __ push(r4);
615 __ push(r4);
616 } else {
617 __ PushRoot(Heap::kTheHoleValueRootIndex);
618 }
619
620 // Set up pointer to last argument.
621 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
622
623 // Copy arguments and receiver to the expression stack.
624 // r0: number of arguments
625 // r1: constructor function
626 // r2: address of last argument (caller sp)
627 // r3: new target
628 // r4: number of arguments (smi-tagged)
629 // sp[0]: receiver
630 // sp[1]: receiver
631 // sp[2]: number of arguments (smi-tagged)
632 Label loop, entry;
633 __ SmiTag(r4, r0);
634 __ b(&entry);
635 __ bind(&loop);
636 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
637 __ push(ip);
638 __ bind(&entry);
639 __ sub(r4, r4, Operand(2), SetCC);
640 __ b(ge, &loop);
641
642 // Call the function.
643 // r0: number of arguments
644 // r1: constructor function
645 // r3: new target
646 ParameterCount actual(r0);
647 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
648 CheckDebugStepCallWrapper());
649
650 // Store offset of return address for deoptimizer.
651 if (create_implicit_receiver && !is_api_function) {
652 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
653 }
654
655 // Restore context from the frame.
656 // r0: result
657 // sp[0]: receiver
658 // sp[1]: number of arguments (smi-tagged)
659 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
660
661 if (create_implicit_receiver) {
662 // If the result is an object (in the ECMA sense), we should get rid
663 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
664 // on page 74.
665 Label use_receiver, exit;
666
667 // If the result is a smi, it is *not* an object in the ECMA sense.
668 // r0: result
669 // sp[0]: receiver
670 // sp[1]: number of arguments (smi-tagged)
671 __ JumpIfSmi(r0, &use_receiver);
672
673 // If the type of the result (stored in its map) is less than
674 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
675 __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
676 __ b(ge, &exit);
677
678 // Throw away the result of the constructor invocation and use the
679 // on-stack receiver as the result.
680 __ bind(&use_receiver);
681 __ ldr(r0, MemOperand(sp));
682
683 // Remove receiver from the stack, remove caller arguments, and
684 // return.
685 __ bind(&exit);
686 // r0: result
687 // sp[0]: receiver (newly allocated object)
688 // sp[1]: number of arguments (smi-tagged)
689 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
690 } else {
691 __ ldr(r1, MemOperand(sp));
692 }
693
694 // Leave construct frame.
695 }
696
697 // ES6 9.2.2. Step 13+
698 // Check that the result is not a Smi, indicating that the constructor result
699 // from a derived class is neither undefined nor an Object.
700 if (check_derived_construct) {
701 Label dont_throw;
702 __ JumpIfNotSmi(r0, &dont_throw);
703 {
704 FrameScope scope(masm, StackFrame::INTERNAL);
705 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
706 }
707 __ bind(&dont_throw);
708 }
709
710 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
711 __ add(sp, sp, Operand(kPointerSize));
712 if (create_implicit_receiver) {
713 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
714 }
715 __ Jump(lr);
716 }
717
718
719 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
720 Generate_JSConstructStubHelper(masm, false, true, false);
721 }
722
723
724 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
725 Generate_JSConstructStubHelper(masm, true, false, false);
726 }
727
728
729 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
730 Generate_JSConstructStubHelper(masm, false, false, false);
731 }
732
733
734 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
735 MacroAssembler* masm) {
736 Generate_JSConstructStubHelper(masm, false, false, true);
737 }
738
739 // static
740 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
741 // ----------- S t a t e -------------
742 // -- r0 : the value to pass to the generator
743 // -- r1 : the JSGeneratorObject to resume
744 // -- r2 : the resume mode (tagged)
745 // -- lr : return address
746 // -----------------------------------
747 __ AssertGeneratorObject(r1);
748
749 // Store input value into generator object.
750 __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
751 __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
752 kLRHasNotBeenSaved, kDontSaveFPRegs);
753
754 // Store resume mode into generator object.
755 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset));
756
757 // Load suspended function and context.
758 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
759 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
760
761 // Flood function if we are stepping.
762 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
763 Label stepping_prepared;
764 ExternalReference last_step_action =
765 ExternalReference::debug_last_step_action_address(masm->isolate());
766 STATIC_ASSERT(StepFrame > StepIn);
767 __ mov(ip, Operand(last_step_action));
768 __ ldrsb(ip, MemOperand(ip));
769 __ cmp(ip, Operand(StepIn));
770 __ b(ge, &prepare_step_in_if_stepping);
771
772 // Flood function if we need to continue stepping in the suspended generator.
773 ExternalReference debug_suspended_generator =
774 ExternalReference::debug_suspended_generator_address(masm->isolate());
775 __ mov(ip, Operand(debug_suspended_generator));
776 __ ldr(ip, MemOperand(ip));
777 __ cmp(ip, Operand(r1));
778 __ b(eq, &prepare_step_in_suspended_generator);
779 __ bind(&stepping_prepared);
780
781 // Push receiver.
782 __ ldr(ip, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
783 __ Push(ip);
784
785 // ----------- S t a t e -------------
786 // -- r1 : the JSGeneratorObject to resume
787 // -- r2 : the resume mode (tagged)
788 // -- r4 : generator function
789 // -- cp : generator context
790 // -- lr : return address
791 // -- sp[0] : generator receiver
792 // -----------------------------------
793
794 // Push holes for arguments to generator function. Since the parser forced
795 // context allocation for any variables in generators, the actual argument
796 // values have already been copied into the context and these dummy values
797 // will never be used.
798 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
799 __ ldr(r3,
800 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
801 {
802 Label done_loop, loop;
803 __ bind(&loop);
804 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
805 __ b(mi, &done_loop);
806 __ PushRoot(Heap::kTheHoleValueRootIndex);
807 __ b(&loop);
808 __ bind(&done_loop);
809 }
810
811 // Dispatch on the kind of generator object.
812 Label old_generator;
813 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
814 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
815 __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
816 __ b(ne, &old_generator);
817
818 // New-style (ignition/turbofan) generator object
819 {
820 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
821 __ ldr(r0,
822 FieldMemOperand(r0, SharedFunctionInfo::kFormalParameterCountOffset));
823 __ SmiUntag(r0);
824 // We abuse new.target both to indicate that this is a resume call and to
825 // pass in the generator object. In ordinary calls, new.target is always
826 // undefined because generator functions are non-constructable.
827 __ Move(r3, r1);
828 __ Move(r1, r4);
829 __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
830 __ Jump(r5);
831 }
832
833 // Old-style (full-codegen) generator object
834 __ bind(&old_generator);
835 {
836 // Enter a new JavaScript frame, and initialize its slots as they were when
837 // the generator was suspended.
838 DCHECK(!FLAG_enable_embedded_constant_pool);
839 FrameScope scope(masm, StackFrame::MANUAL);
840 __ Push(lr, fp);
841 __ Move(fp, sp);
842 __ Push(cp, r4);
843
844 // Restore the operand stack.
845 __ ldr(r0, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
846 __ ldr(r3, FieldMemOperand(r0, FixedArray::kLengthOffset));
847 __ add(r0, r0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
848 __ add(r3, r0, Operand(r3, LSL, kPointerSizeLog2 - 1));
849 {
850 Label done_loop, loop;
851 __ bind(&loop);
852 __ cmp(r0, r3);
853 __ b(eq, &done_loop);
854 __ ldr(ip, MemOperand(r0, kPointerSize, PostIndex));
855 __ Push(ip);
856 __ b(&loop);
857 __ bind(&done_loop);
858 }
859
860 // Reset operand stack so we don't leak.
861 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
862 __ str(ip, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
863
864 // Resume the generator function at the continuation.
865 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
866 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
867 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
868 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
869 __ add(r3, r3, Operand(r2, ASR, 1));
870 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
871 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
872 __ Move(r0, r1); // Continuation expects generator object in r0.
873 __ Jump(r3);
874 }
875
876 __ bind(&prepare_step_in_if_stepping);
877 {
878 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
879 __ Push(r1, r2, r4);
880 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
881 __ Pop(r1, r2);
882 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
883 }
884 __ b(&stepping_prepared);
885
886 __ bind(&prepare_step_in_suspended_generator);
887 {
888 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
889 __ Push(r1, r2);
890 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
891 __ Pop(r1, r2);
892 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
893 }
894 __ b(&stepping_prepared);
895 }
896
897 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
898 FrameScope scope(masm, StackFrame::INTERNAL);
899 __ push(r1);
900 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
901 }
902
903
904 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
905
906
907 // Clobbers r2; preserves all other registers.
908 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
909 IsTagged argc_is_tagged) {
910 // Check the stack for overflow. We are not trying to catch
911 // interruptions (e.g. debug break and preemption) here, so the "real stack
912 // limit" is checked.
913 Label okay;
914 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
915 // Make r2 the space we have left. The stack might already be overflowed
916 // here which will cause r2 to become negative.
917 __ sub(r2, sp, r2);
918 // Check if the arguments will overflow the stack.
919 if (argc_is_tagged == kArgcIsSmiTagged) {
920 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
921 } else {
922 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
923 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
924 }
925 __ b(gt, &okay); // Signed comparison.
926
927 // Out of stack space.
928 __ CallRuntime(Runtime::kThrowStackOverflow);
929
930 __ bind(&okay);
931 }
932
933
934 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
935 bool is_construct) {
936 // Called from Generate_JS_Entry
937 // r0: new.target
938 // r1: function
939 // r2: receiver
940 // r3: argc
941 // r4: argv
942 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
943 ProfileEntryHookStub::MaybeCallEntryHook(masm);
944
945 // Enter an internal frame.
946 {
947 FrameScope scope(masm, StackFrame::INTERNAL);
948
949 // Setup the context (we need to use the caller context from the isolate).
950 ExternalReference context_address(Isolate::kContextAddress,
951 masm->isolate());
952 __ mov(cp, Operand(context_address));
953 __ ldr(cp, MemOperand(cp));
954
955 __ InitializeRootRegister();
956
957 // Push the function and the receiver onto the stack.
958 __ Push(r1, r2);
959
960 // Check if we have enough stack space to push all arguments.
961 // Clobbers r2.
962 Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
963
964 // Remember new.target.
965 __ mov(r5, r0);
966
967 // Copy arguments to the stack in a loop.
968 // r1: function
969 // r3: argc
970 // r4: argv, i.e. points to first arg
971 Label loop, entry;
972 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
973 // r2 points past last arg.
974 __ b(&entry);
975 __ bind(&loop);
976 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
977 __ ldr(r0, MemOperand(r0)); // dereference handle
978 __ push(r0); // push parameter
979 __ bind(&entry);
980 __ cmp(r4, r2);
981 __ b(ne, &loop);
982
983 // Setup new.target and argc.
984 __ mov(r0, Operand(r3));
985 __ mov(r3, Operand(r5));
986
987 // Initialize all JavaScript callee-saved registers, since they will be seen
988 // by the garbage collector as part of handlers.
989 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
990 __ mov(r5, Operand(r4));
991 __ mov(r6, Operand(r4));
992 if (!FLAG_enable_embedded_constant_pool) {
993 __ mov(r8, Operand(r4));
994 }
995 if (kR9Available == 1) {
996 __ mov(r9, Operand(r4));
997 }
998
999 // Invoke the code.
1000 Handle<Code> builtin = is_construct
1001 ? masm->isolate()->builtins()->Construct()
1002 : masm->isolate()->builtins()->Call();
1003 __ Call(builtin, RelocInfo::CODE_TARGET);
1004
1005 // Exit the JS frame and remove the parameters (except function), and
1006 // return.
1007 // Respect ABI stack constraint.
1008 }
1009 __ Jump(lr);
1010
1011 // r0: result
1012 }
1013
1014
1015 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1016 Generate_JSEntryTrampolineHelper(masm, false);
1017 }
1018
1019
1020 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1021 Generate_JSEntryTrampolineHelper(masm, true);
1022 }
1023
1024 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1025 Register args_count = scratch;
1026
1027 // Get the arguments + receiver count.
1028 __ ldr(args_count,
1029 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1030 __ ldr(args_count,
1031 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1032
1033 // Leave the frame (also dropping the register file).
1034 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1035
1036 // Drop receiver + arguments.
1037 __ add(sp, sp, args_count, LeaveCC);
1038 }
1039
1040 // Generate code for entering a JS function with the interpreter.
1041 // On entry to the function the receiver and arguments have been pushed on the
1042 // stack left to right. The actual argument count matches the formal parameter
1043 // count expected by the function.
1044 //
1045 // The live registers are:
1046 // o r1: the JS function object being called.
1047 // o r3: the new target
1048 // o cp: our context
1049 // o pp: the caller's constant pool pointer (if enabled)
1050 // o fp: the caller's frame pointer
1051 // o sp: stack pointer
1052 // o lr: return address
1053 //
1054 // The function builds an interpreter frame. See InterpreterFrameConstants in
1055 // frames.h for its layout.
1056 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1057 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1058
1059 // Open a frame scope to indicate that there is a frame on the stack. The
1060 // MANUAL indicates that the scope shouldn't actually generate code to set up
1061 // the frame (that is done below).
1062 FrameScope frame_scope(masm, StackFrame::MANUAL);
1063 __ PushStandardFrame(r1);
1064
1065 // Get the bytecode array from the function object (or from the DebugInfo if
1066 // it is present) and load it into kInterpreterBytecodeArrayRegister.
1067 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1068 Register debug_info = kInterpreterBytecodeArrayRegister;
1069 DCHECK(!debug_info.is(r0));
1070 __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
1071 __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
1072 // Load original bytecode array or the debug copy.
1073 __ ldr(kInterpreterBytecodeArrayRegister,
1074 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
1075 __ ldr(kInterpreterBytecodeArrayRegister,
1076 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
1077
1078 // Check function data field is actually a BytecodeArray object.
1079 Label bytecode_array_not_present;
1080 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1081 Heap::kUndefinedValueRootIndex);
1082 __ b(eq, &bytecode_array_not_present);
1083 if (FLAG_debug_code) {
1084 __ SmiTst(kInterpreterBytecodeArrayRegister);
1085 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1086 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
1087 BYTECODE_ARRAY_TYPE);
1088 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1089 }
1090
1091 // Load the initial bytecode offset.
1092 __ mov(kInterpreterBytecodeOffsetRegister,
1093 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1094
1095 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1096 __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
1097 __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
1098
1099 // Allocate the local and temporary register file on the stack.
1100 {
1101 // Load frame size from the BytecodeArray object.
1102 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1103 BytecodeArray::kFrameSizeOffset));
1104
1105 // Do a stack check to ensure we don't go over the limit.
1106 Label ok;
1107 __ sub(r9, sp, Operand(r4));
1108 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1109 __ cmp(r9, Operand(r2));
1110 __ b(hs, &ok);
1111 __ CallRuntime(Runtime::kThrowStackOverflow);
1112 __ bind(&ok);
1113
1114 // If ok, push undefined as the initial value for all register file entries.
1115 Label loop_header;
1116 Label loop_check;
1117 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
1118 __ b(&loop_check, al);
1119 __ bind(&loop_header);
1120 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1121 __ push(r9);
1122 // Continue loop if not done.
1123 __ bind(&loop_check);
1124 __ sub(r4, r4, Operand(kPointerSize), SetCC);
1125 __ b(&loop_header, ge);
1126 }
1127
1128 // Load accumulator and dispatch table into registers.
1129 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1130 __ mov(kInterpreterDispatchTableRegister,
1131 Operand(ExternalReference::interpreter_dispatch_table_address(
1132 masm->isolate())));
1133
1134 // Dispatch to the first bytecode handler for the function.
1135 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1136 kInterpreterBytecodeOffsetRegister));
1137 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1138 kPointerSizeLog2));
1139 __ Call(ip);
1140 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1141
1142 // The return value is in r0.
1143 LeaveInterpreterFrame(masm, r2);
1144 __ Jump(lr);
1145
1146 // If the bytecode array is no longer present, then the underlying function
1147 // has been switched to a different kind of code and we heal the closure by
1148 // switching the code entry field over to the new code object as well.
1149 __ bind(&bytecode_array_not_present);
1150 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1151 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1152 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
1153 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1154 __ str(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1155 __ RecordWriteCodeEntryField(r1, r4, r5);
1156 __ Jump(r4);
1157 }
1158
1159 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1160 // Save the function and context for call to CompileBaseline.
1161 __ ldr(r1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1162 __ ldr(kContextRegister,
1163 MemOperand(fp, StandardFrameConstants::kContextOffset));
1164
1165 // Leave the frame before recompiling for baseline so that we don't count as
1166 // an activation on the stack.
1167 LeaveInterpreterFrame(masm, r2);
1168
1169 {
1170 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1171 // Push return value.
1172 __ push(r0);
1173
1174 // Push function as argument and compile for baseline.
1175 __ push(r1);
1176 __ CallRuntime(Runtime::kCompileBaseline);
1177
1178 // Restore return value.
1179 __ pop(r0);
1180 }
1181 __ Jump(lr);
1182 }
1183
1184 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
1185 Register limit, Register scratch) {
1186 Label loop_header, loop_check;
1187 __ b(al, &loop_check);
1188 __ bind(&loop_header);
1189 __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1190 __ push(scratch);
1191 __ bind(&loop_check);
1192 __ cmp(index, limit);
1193 __ b(gt, &loop_header);
1194 }
1195
1196 // static
1197 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1198 MacroAssembler* masm, TailCallMode tail_call_mode,
1199 CallableType function_type) {
1200 // ----------- S t a t e -------------
1201 // -- r0 : the number of arguments (not including the receiver)
1202 // -- r2 : the address of the first argument to be pushed. Subsequent
1203 // arguments should be consecutive above this, in the same order as
1204 // they are to be pushed onto the stack.
1205 // -- r1 : the target to call (can be any Object).
1206 // -----------------------------------
1207
1208 // Find the address of the last argument.
1209 __ add(r3, r0, Operand(1)); // Add one for receiver.
1210 __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
1211 __ sub(r3, r2, r3);
1212
1213 // Push the arguments.
1214 Generate_InterpreterPushArgs(masm, r2, r3, r4);
1215
1216 // Call the target.
1217 if (function_type == CallableType::kJSFunction) {
1218 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1219 tail_call_mode),
1220 RelocInfo::CODE_TARGET);
1221 } else {
1222 DCHECK_EQ(function_type, CallableType::kAny);
1223 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1224 tail_call_mode),
1225 RelocInfo::CODE_TARGET);
1226 }
1227 }
1228
1229 // static
1230 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1231 // ----------- S t a t e -------------
1232 // -- r0 : argument count (not including receiver)
1233 // -- r3 : new target
1234 // -- r1 : constructor to call
1235 // -- r2 : address of the first argument
1236 // -----------------------------------
1237
1238 // Find the address of the last argument.
1239 __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
1240 __ sub(r4, r2, r4);
1241
1242 // Push a slot for the receiver to be constructed.
1243 __ mov(ip, Operand::Zero());
1244 __ push(ip);
1245
1246 // Push the arguments.
1247 Generate_InterpreterPushArgs(masm, r2, r4, r5);
1248
1249 // Call the constructor with r0, r1, and r3 unmodified.
1250 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1251 }
1252
1253 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1254 // Set the return address to the correct point in the interpreter entry
1255 // trampoline.
1256 Smi* interpreter_entry_return_pc_offset(
1257 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1258 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1259 __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1260 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
1261 Code::kHeaderSize - kHeapObjectTag));
1262
1263 // Initialize the dispatch table register.
1264 __ mov(kInterpreterDispatchTableRegister,
1265 Operand(ExternalReference::interpreter_dispatch_table_address(
1266 masm->isolate())));
1267
1268 // Get the bytecode array pointer from the frame.
1269 __ ldr(kInterpreterBytecodeArrayRegister,
1270 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1271
1272 if (FLAG_debug_code) {
1273 // Check function data field is actually a BytecodeArray object.
1274 __ SmiTst(kInterpreterBytecodeArrayRegister);
1275 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1276 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1277 BYTECODE_ARRAY_TYPE);
1278 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1279 }
1280
1281 // Get the target bytecode offset from the frame.
1282 __ ldr(kInterpreterBytecodeOffsetRegister,
1283 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1284 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1285
1286 // Dispatch to the target bytecode.
1287 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1288 kInterpreterBytecodeOffsetRegister));
1289 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1290 kPointerSizeLog2));
1291 __ mov(pc, ip);
1292 }
1293
1294 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1295 // ----------- S t a t e -------------
1296 // -- r0 : argument count (preserved for callee)
1297 // -- r3 : new target (preserved for callee)
1298 // -- r1 : target function (preserved for callee)
1299 // -----------------------------------
1300 // First lookup code, maybe we don't need to compile!
1301 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1302 Label maybe_call_runtime;
1303 Label try_shared;
1304 Label loop_top, loop_bottom;
1305
1306 Register argument_count = r0;
1307 Register closure = r1;
1308 Register new_target = r3;
1309 __ push(argument_count);
1310 __ push(new_target);
1311 __ push(closure);
1312
1313 Register map = argument_count;
1314 Register index = r2;
1315 __ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1316 __ ldr(map,
1317 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1318 __ ldr(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1319 __ cmp(index, Operand(Smi::FromInt(2)));
1320 __ b(lt, &gotta_call_runtime);
1321
1322 // Find literals.
1323 // r3 : native context
1324 // r2 : length / index
1325 // r0 : optimized code map
1326 // stack[0] : new target
1327 // stack[4] : closure
1328 Register native_context = r3;
1329 __ ldr(native_context, NativeContextMemOperand());
1330
1331 __ bind(&loop_top);
1332 Register temp = r1;
1333 Register array_pointer = r5;
1334
1335 // Does the native context match?
1336 __ add(array_pointer, map, Operand::PointerOffsetFromSmiKey(index));
1337 __ ldr(temp, FieldMemOperand(array_pointer,
1338 SharedFunctionInfo::kOffsetToPreviousContext));
1339 __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1340 __ cmp(temp, native_context);
1341 __ b(ne, &loop_bottom);
1342 // OSR id set to none?
1343 __ ldr(temp, FieldMemOperand(array_pointer,
1344 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1345 const int bailout_id = BailoutId::None().ToInt();
1346 __ cmp(temp, Operand(Smi::FromInt(bailout_id)));
1347 __ b(ne, &loop_bottom);
1348
1349 // Literals available?
1350 Label got_literals, maybe_cleared_weakcell;
1351 __ ldr(temp, FieldMemOperand(array_pointer,
1352 SharedFunctionInfo::kOffsetToPreviousLiterals));
1353 // temp contains either a WeakCell pointing to the literals array or the
1354 // literals array directly.
1355 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
1356 __ ldr(r4, FieldMemOperand(temp, WeakCell::kValueOffset));
1357 __ JumpIfSmi(r4, &maybe_cleared_weakcell);
1358 // r4 is a pointer, therefore temp is a WeakCell pointing to a literals array.
1359 __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1360 __ jmp(&got_literals);
1361
1362 // r4 is a smi. If it's 0, then we are looking at a cleared WeakCell
1363 // around the literals array, and we should visit the runtime. If it's > 0,
1364 // then temp already contains the literals array.
1365 __ bind(&maybe_cleared_weakcell);
1366 __ cmp(r4, Operand(Smi::FromInt(0)));
1367 __ b(eq, &gotta_call_runtime);
1368
1369 // Save the literals in the closure.
1370 __ bind(&got_literals);
1371 __ ldr(r4, MemOperand(sp, 0));
1372 __ str(temp, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
1373 __ push(index);
1374 __ RecordWriteField(r4, JSFunction::kLiteralsOffset, temp, index,
1375 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1376 OMIT_SMI_CHECK);
1377 __ pop(index);
1378
1379 // Code available?
1380 Register entry = r4;
1381 __ ldr(entry,
1382 FieldMemOperand(array_pointer,
1383 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1384 __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1385 __ JumpIfSmi(entry, &maybe_call_runtime);
1386
1387 // Found literals and code. Get them into the closure and return.
1388 __ pop(closure);
1389 // Store code entry in the closure.
1390 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1391
1392 Label install_optimized_code_and_tailcall;
1393 __ bind(&install_optimized_code_and_tailcall);
1394 __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1395 __ RecordWriteCodeEntryField(closure, entry, r5);
1396
1397 // Link the closure into the optimized function list.
1398 // r4 : code entry
1399 // r3 : native context
1400 // r1 : closure
1401 __ ldr(r5,
1402 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1403 __ str(r5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1404 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r5, r0,
1405 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1406 OMIT_SMI_CHECK);
1407 const int function_list_offset =
1408 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1409 __ str(closure,
1410 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1411 // Save closure before the write barrier.
1412 __ mov(r5, closure);
1413 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r0,
1414 kLRHasNotBeenSaved, kDontSaveFPRegs);
1415 __ mov(closure, r5);
1416 __ pop(new_target);
1417 __ pop(argument_count);
1418 __ Jump(entry);
1419
1420 __ bind(&loop_bottom);
1421 __ sub(index, index, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1422 __ cmp(index, Operand(Smi::FromInt(1)));
1423 __ b(gt, &loop_top);
1424
1425 // We found neither literals nor code.
1426 __ jmp(&gotta_call_runtime);
1427
1428 __ bind(&maybe_call_runtime);
1429 __ pop(closure);
1430
1431 // Last possibility. Check the context free optimized code map entry.
1432 __ ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1433 SharedFunctionInfo::kSharedCodeIndex));
1434 __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1435 __ JumpIfSmi(entry, &try_shared);
1436
1437 // Store code entry in the closure.
1438 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1439 __ jmp(&install_optimized_code_and_tailcall);
1440
1441 __ bind(&try_shared);
1442 __ pop(new_target);
1443 __ pop(argument_count);
1444 // Is the full code valid?
1445 __ ldr(entry,
1446 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1447 __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1448 __ ldr(r5, FieldMemOperand(entry, Code::kFlagsOffset));
1449 __ and_(r5, r5, Operand(Code::KindField::kMask));
1450 __ mov(r5, Operand(r5, LSR, Code::KindField::kShift));
1451 __ cmp(r5, Operand(Code::BUILTIN));
1452 __ b(eq, &gotta_call_runtime_no_stack);
1453 // Yes, install the full code.
1454 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1455 __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1456 __ RecordWriteCodeEntryField(closure, entry, r5);
1457 __ Jump(entry);
1458
1459 __ bind(&gotta_call_runtime);
1460 __ pop(closure);
1461 __ pop(new_target);
1462 __ pop(argument_count);
1463 __ bind(&gotta_call_runtime_no_stack);
1464 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1465 }
1466
1467 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1468 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1469 }
1470
1471 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1472 GenerateTailCallToReturnedCode(masm,
1473 Runtime::kCompileOptimized_NotConcurrent);
1474 }
1475
1476
1477 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1478 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1479 }
1480
1481 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1482 // ----------- S t a t e -------------
1483 // -- r0 : argument count (preserved for callee)
1484 // -- r1 : new target (preserved for callee)
1485 // -- r3 : target function (preserved for callee)
1486 // -----------------------------------
1487 Label failed;
1488 {
1489 FrameScope scope(masm, StackFrame::INTERNAL);
1490 // Push the number of arguments to the callee.
1491 __ SmiTag(r0);
1492 __ push(r0);
1493 // Push a copy of the target function and the new target.
1494 __ push(r1);
1495 __ push(r3);
1496
1497 // The function.
1498 __ push(r1);
1499 // Copy arguments from caller (stdlib, foreign, heap).
1500 for (int i = 2; i >= 0; --i) {
1501 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1502 i * kPointerSize));
1503 __ push(r4);
1504 }
1505 // Call runtime, on success unwind frame, and parent frame.
1506 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1507 // A smi 0 is returned on failure, an object on success.
1508 __ JumpIfSmi(r0, &failed);
1509 scope.GenerateLeaveFrame();
1510 __ Drop(4);
1511 __ Ret();
1512
1513 __ bind(&failed);
1514 // Restore target function and new target.
1515 __ pop(r3);
1516 __ pop(r1);
1517 __ pop(r0);
1518 __ SmiUntag(r0);
1519 }
1520 // On failure, tail call back to regular js.
1521 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1522 }
1523
1524 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1525 // For now, we are relying on the fact that make_code_young doesn't do any
1526 // garbage collection which allows us to save/restore the registers without
1527 // worrying about which of them contain pointers. We also don't build an
1528 // internal frame to make the code faster, since we shouldn't have to do stack
1529 // crawls in MakeCodeYoung. This seems a bit fragile.
1530
1531 // The following registers must be saved and restored when calling through to
1532 // the runtime:
1533 // r0 - contains return address (beginning of patch sequence)
1534 // r1 - isolate
1535 // r3 - new target
1536 FrameScope scope(masm, StackFrame::MANUAL);
1537 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1538 __ PrepareCallCFunction(2, 0, r2);
1539 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1540 __ CallCFunction(
1541 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1542 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1543 __ mov(pc, r0);
1544 }
1545
1546 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1547 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1548 MacroAssembler* masm) { \
1549 GenerateMakeCodeYoungAgainCommon(masm); \
1550 } \
1551 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1552 MacroAssembler* masm) { \
1553 GenerateMakeCodeYoungAgainCommon(masm); \
1554 }
1555 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1556 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1557
1558
1559 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1560 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1561 // that make_code_young doesn't do any garbage collection which allows us to
1562 // save/restore the registers without worrying about which of them contain
1563 // pointers.
1564
1565 // The following registers must be saved and restored when calling through to
1566 // the runtime:
1567 // r0 - contains return address (beginning of patch sequence)
1568 // r1 - isolate
1569 // r3 - new target
1570 FrameScope scope(masm, StackFrame::MANUAL);
1571 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1572 __ PrepareCallCFunction(2, 0, r2);
1573 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1574 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1575 masm->isolate()), 2);
1576 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1577
1578 // Perform prologue operations usually performed by the young code stub.
1579 __ PushStandardFrame(r1);
1580
1581 // Jump to point after the code-age stub.
1582 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1583 __ mov(pc, r0);
1584 }
1585
1586
1587 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1588 GenerateMakeCodeYoungAgainCommon(masm);
1589 }
1590
1591
1592 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1593 Generate_MarkCodeAsExecutedOnce(masm);
1594 }
1595
1596
1597 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1598 SaveFPRegsMode save_doubles) {
1599 {
1600 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1601
1602 // Preserve registers across notification, this is important for compiled
1603 // stubs that tail call the runtime on deopts passing their parameters in
1604 // registers.
1605 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1606 // Pass the function and deoptimization type to the runtime system.
1607 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1608 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1609 }
1610
1611 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1612 __ mov(pc, lr); // Jump to miss handler
1613 }
1614
1615
1616 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1617 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1618 }
1619
1620
1621 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1622 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1623 }
1624
1625
1626 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1627 Deoptimizer::BailoutType type) {
1628 {
1629 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1630 // Pass the function and deoptimization type to the runtime system.
1631 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1632 __ push(r0);
1633 __ CallRuntime(Runtime::kNotifyDeoptimized);
1634 }
1635
1636 // Get the full codegen state from the stack and untag it -> r6.
1637 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1638 __ SmiUntag(r6);
1639 // Switch on the state.
1640 Label with_tos_register, unknown_state;
1641 __ cmp(r6,
1642 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1643 __ b(ne, &with_tos_register);
1644 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1645 __ Ret();
1646
1647 __ bind(&with_tos_register);
1648 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
1649 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1650 __ cmp(r6,
1651 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1652 __ b(ne, &unknown_state);
1653 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1654 __ Ret();
1655
1656 __ bind(&unknown_state);
1657 __ stop("no cases left");
1658 }
1659
1660
1661 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1662 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1663 }
1664
1665
1666 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1667 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1668 }
1669
1670
1671 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1672 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1673 }
1674
1675
1676 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1677 Register function_template_info,
1678 Register scratch0, Register scratch1,
1679 Register scratch2,
1680 Label* receiver_check_failed) {
1681 Register signature = scratch0;
1682 Register map = scratch1;
1683 Register constructor = scratch2;
1684
1685 // If there is no signature, return the holder.
1686 __ ldr(signature, FieldMemOperand(function_template_info,
1687 FunctionTemplateInfo::kSignatureOffset));
1688 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1689 Label receiver_check_passed;
1690 __ b(eq, &receiver_check_passed);
1691
1692 // Walk the prototype chain.
1693 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1694 Label prototype_loop_start;
1695 __ bind(&prototype_loop_start);
1696
1697 // Get the constructor, if any.
1698 __ GetMapConstructor(constructor, map, ip, ip);
1699 __ cmp(ip, Operand(JS_FUNCTION_TYPE));
1700 Label next_prototype;
1701 __ b(ne, &next_prototype);
1702 Register type = constructor;
1703 __ ldr(type,
1704 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1705 __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1706
1707 // Loop through the chain of inheriting function templates.
1708 Label function_template_loop;
1709 __ bind(&function_template_loop);
1710
1711 // If the signatures match, we have a compatible receiver.
1712 __ cmp(signature, type);
1713 __ b(eq, &receiver_check_passed);
1714
1715 // If the current type is not a FunctionTemplateInfo, load the next prototype
1716 // in the chain.
1717 __ JumpIfSmi(type, &next_prototype);
1718 __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
1719
1720 // Otherwise load the parent function template and iterate.
1721 __ ldr(type,
1722 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
1723 eq);
1724 __ b(&function_template_loop, eq);
1725
1726 // Load the next prototype.
1727 __ bind(&next_prototype);
1728 __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
1729 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
1730 __ b(eq, receiver_check_failed);
1731 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1732 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1733 // Iterate.
1734 __ b(&prototype_loop_start);
1735
1736 __ bind(&receiver_check_passed);
1737 }
1738
1739
1740 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1741 // ----------- S t a t e -------------
1742 // -- r0 : number of arguments excluding receiver
1743 // -- r1 : callee
1744 // -- lr : return address
1745 // -- sp[0] : last argument
1746 // -- ...
1747 // -- sp[4 * (argc - 1)] : first argument
1748 // -- sp[4 * argc] : receiver
1749 // -----------------------------------
1750
1751 // Load the FunctionTemplateInfo.
1752 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1753 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1754
1755 // Do the compatible receiver check.
1756 Label receiver_check_failed;
1757 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1758 CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
1759
1760 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1761 // beginning of the code.
1762 __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
1763 __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
1764 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1765 __ Jump(r4);
1766
1767 // Compatible receiver check failed: throw an Illegal Invocation exception.
1768 __ bind(&receiver_check_failed);
1769 // Drop the arguments (including the receiver)
1770 __ add(r0, r0, Operand(1));
1771 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1772 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1773 }
1774
1775
1776 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1777 // Lookup the function in the JavaScript frame.
1778 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1779 {
1780 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1781 // Pass function as argument.
1782 __ push(r0);
1783 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1784 }
1785
1786 // If the code object is null, just return to the unoptimized code.
1787 Label skip;
1788 __ cmp(r0, Operand(Smi::FromInt(0)));
1789 __ b(ne, &skip);
1790 __ Ret();
1791
1792 __ bind(&skip);
1793
1794 // Load deoptimization data from the code object.
1795 // <deopt_data> = <code>[#deoptimization_data_offset]
1796 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1797
1798 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1799 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1800
1801 if (FLAG_enable_embedded_constant_pool) {
1802 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
1803 }
1804
1805 // Load the OSR entrypoint offset from the deoptimization data.
1806 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1807 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1808 DeoptimizationInputData::kOsrPcOffsetIndex)));
1809
1810 // Compute the target address = code start + osr_offset
1811 __ add(lr, r0, Operand::SmiUntag(r1));
1812
1813 // And "return" to the OSR entry point of the function.
1814 __ Ret();
1815 }
1816 }
1817
1818
1819 // static
1820 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1821 int field_index) {
1822 // ----------- S t a t e -------------
1823 // -- r0 : number of arguments
1824 // -- r1 : function
1825 // -- cp : context
1826 // -- lr : return address
1827 // -- sp[0] : receiver
1828 // -----------------------------------
1829
1830 // 1. Pop receiver into r0 and check that it's actually a JSDate object.
1831 Label receiver_not_date;
1832 {
1833 __ Pop(r0);
1834 __ JumpIfSmi(r0, &receiver_not_date);
1835 __ CompareObjectType(r0, r2, r3, JS_DATE_TYPE);
1836 __ b(ne, &receiver_not_date);
1837 }
1838
1839 // 2. Load the specified date field, falling back to the runtime as necessary.
1840 if (field_index == JSDate::kDateValue) {
1841 __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
1842 } else {
1843 if (field_index < JSDate::kFirstUncachedField) {
1844 Label stamp_mismatch;
1845 __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1846 __ ldr(r1, MemOperand(r1));
1847 __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
1848 __ cmp(r1, ip);
1849 __ b(ne, &stamp_mismatch);
1850 __ ldr(r0, FieldMemOperand(
1851 r0, JSDate::kValueOffset + field_index * kPointerSize));
1852 __ Ret();
1853 __ bind(&stamp_mismatch);
1854 }
1855 FrameScope scope(masm, StackFrame::INTERNAL);
1856 __ PrepareCallCFunction(2, r1);
1857 __ mov(r1, Operand(Smi::FromInt(field_index)));
1858 __ CallCFunction(
1859 ExternalReference::get_date_field_function(masm->isolate()), 2);
1860 }
1861 __ Ret();
1862
1863 // 3. Raise a TypeError if the receiver is not a date.
1864 __ bind(&receiver_not_date);
1865 {
1866 FrameScope scope(masm, StackFrame::MANUAL);
1867 __ Push(r0);
1868 __ Move(r0, Smi::FromInt(0));
1869 __ EnterBuiltinFrame(cp, r1, r0);
1870 __ CallRuntime(Runtime::kThrowNotDateError);
1871 }
1872 }
1873
1874 // static
1875 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1876 // ----------- S t a t e -------------
1877 // -- r0 : argc
1878 // -- sp[0] : argArray
1879 // -- sp[4] : thisArg
1880 // -- sp[8] : receiver
1881 // -----------------------------------
1882
1883 // 1. Load receiver into r1, argArray into r0 (if present), remove all
1884 // arguments from the stack (including the receiver), and push thisArg (if
1885 // present) instead.
1886 {
1887 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1888 __ mov(r3, r2);
1889 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1890 __ sub(r4, r0, Operand(1), SetCC);
1891 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArg
1892 __ sub(r4, r4, Operand(1), SetCC, ge);
1893 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argArray
1894 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1895 __ str(r2, MemOperand(sp, 0));
1896 __ mov(r0, r3);
1897 }
1898
1899 // ----------- S t a t e -------------
1900 // -- r0 : argArray
1901 // -- r1 : receiver
1902 // -- sp[0] : thisArg
1903 // -----------------------------------
1904
1905 // 2. Make sure the receiver is actually callable.
1906 Label receiver_not_callable;
1907 __ JumpIfSmi(r1, &receiver_not_callable);
1908 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1909 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1910 __ tst(r4, Operand(1 << Map::kIsCallable));
1911 __ b(eq, &receiver_not_callable);
1912
1913 // 3. Tail call with no arguments if argArray is null or undefined.
1914 Label no_arguments;
1915 __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
1916 __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
1917
1918 // 4a. Apply the receiver to the given argArray (passing undefined for
1919 // new.target).
1920 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1921 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1922
1923 // 4b. The argArray is either null or undefined, so we tail call without any
1924 // arguments to the receiver.
1925 __ bind(&no_arguments);
1926 {
1927 __ mov(r0, Operand(0));
1928 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1929 }
1930
1931 // 4c. The receiver is not callable, throw an appropriate TypeError.
1932 __ bind(&receiver_not_callable);
1933 {
1934 __ str(r1, MemOperand(sp, 0));
1935 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1936 }
1937 }
1938
1939
1940 // static
1941 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1942 // 1. Make sure we have at least one argument.
1943 // r0: actual number of arguments
1944 {
1945 Label done;
1946 __ cmp(r0, Operand::Zero());
1947 __ b(ne, &done);
1948 __ PushRoot(Heap::kUndefinedValueRootIndex);
1949 __ add(r0, r0, Operand(1));
1950 __ bind(&done);
1951 }
1952
1953 // 2. Get the callable to call (passed as receiver) from the stack.
1954 // r0: actual number of arguments
1955 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1956
1957 // 3. Shift arguments and return address one slot down on the stack
1958 // (overwriting the original receiver). Adjust argument count to make
1959 // the original first argument the new receiver.
1960 // r0: actual number of arguments
1961 // r1: callable
1962 {
1963 Label loop;
1964 // Calculate the copy start address (destination). Copy end address is sp.
1965 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1966
1967 __ bind(&loop);
1968 __ ldr(ip, MemOperand(r2, -kPointerSize));
1969 __ str(ip, MemOperand(r2));
1970 __ sub(r2, r2, Operand(kPointerSize));
1971 __ cmp(r2, sp);
1972 __ b(ne, &loop);
1973 // Adjust the actual number of arguments and remove the top element
1974 // (which is a copy of the last argument).
1975 __ sub(r0, r0, Operand(1));
1976 __ pop();
1977 }
1978
1979 // 4. Call the callable.
1980 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1981 }
1982
1983
1984 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1985 // ----------- S t a t e -------------
1986 // -- r0 : argc
1987 // -- sp[0] : argumentsList
1988 // -- sp[4] : thisArgument
1989 // -- sp[8] : target
1990 // -- sp[12] : receiver
1991 // -----------------------------------
1992
1993 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1994 // remove all arguments from the stack (including the receiver), and push
1995 // thisArgument (if present) instead.
1996 {
1997 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1998 __ mov(r2, r1);
1999 __ mov(r3, r1);
2000 __ sub(r4, r0, Operand(1), SetCC);
2001 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
2002 __ sub(r4, r4, Operand(1), SetCC, ge);
2003 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArgument
2004 __ sub(r4, r4, Operand(1), SetCC, ge);
2005 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
2006 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
2007 __ str(r2, MemOperand(sp, 0));
2008 __ mov(r0, r3);
2009 }
2010
2011 // ----------- S t a t e -------------
2012 // -- r0 : argumentsList
2013 // -- r1 : target
2014 // -- sp[0] : thisArgument
2015 // -----------------------------------
2016
2017 // 2. Make sure the target is actually callable.
2018 Label target_not_callable;
2019 __ JumpIfSmi(r1, &target_not_callable);
2020 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2021 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2022 __ tst(r4, Operand(1 << Map::kIsCallable));
2023 __ b(eq, &target_not_callable);
2024
2025 // 3a. Apply the target to the given argumentsList (passing undefined for
2026 // new.target).
2027 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2028 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2029
2030 // 3b. The target is not callable, throw an appropriate TypeError.
2031 __ bind(&target_not_callable);
2032 {
2033 __ str(r1, MemOperand(sp, 0));
2034 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2035 }
2036 }
2037
2038
2039 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2040 // ----------- S t a t e -------------
2041 // -- r0 : argc
2042 // -- sp[0] : new.target (optional)
2043 // -- sp[4] : argumentsList
2044 // -- sp[8] : target
2045 // -- sp[12] : receiver
2046 // -----------------------------------
2047
2048 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
2049 // new.target into r3 (if present, otherwise use target), remove all
2050 // arguments from the stack (including the receiver), and push thisArgument
2051 // (if present) instead.
2052 {
2053 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2054 __ mov(r2, r1);
2055 __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
2056 __ sub(r4, r0, Operand(1), SetCC);
2057 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
2058 __ mov(r3, r1); // new.target defaults to target
2059 __ sub(r4, r4, Operand(1), SetCC, ge);
2060 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
2061 __ sub(r4, r4, Operand(1), SetCC, ge);
2062 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // new.target
2063 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
2064 __ mov(r0, r2);
2065 }
2066
2067 // ----------- S t a t e -------------
2068 // -- r0 : argumentsList
2069 // -- r3 : new.target
2070 // -- r1 : target
2071 // -- sp[0] : receiver (undefined)
2072 // -----------------------------------
2073
2074 // 2. Make sure the target is actually a constructor.
2075 Label target_not_constructor;
2076 __ JumpIfSmi(r1, &target_not_constructor);
2077 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2078 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2079 __ tst(r4, Operand(1 << Map::kIsConstructor));
2080 __ b(eq, &target_not_constructor);
2081
2082 // 3. Make sure the target is actually a constructor.
2083 Label new_target_not_constructor;
2084 __ JumpIfSmi(r3, &new_target_not_constructor);
2085 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
2086 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2087 __ tst(r4, Operand(1 << Map::kIsConstructor));
2088 __ b(eq, &new_target_not_constructor);
2089
2090 // 4a. Construct the target with the given new.target and argumentsList.
2091 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2092
2093 // 4b. The target is not a constructor, throw an appropriate TypeError.
2094 __ bind(&target_not_constructor);
2095 {
2096 __ str(r1, MemOperand(sp, 0));
2097 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2098 }
2099
2100 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2101 __ bind(&new_target_not_constructor);
2102 {
2103 __ str(r3, MemOperand(sp, 0));
2104 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2105 }
2106 }
2107
2108
2109 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2110 Label* stack_overflow) {
2111 // ----------- S t a t e -------------
2112 // -- r0 : actual number of arguments
2113 // -- r1 : function (passed through to callee)
2114 // -- r2 : expected number of arguments
2115 // -- r3 : new target (passed through to callee)
2116 // -----------------------------------
2117 // Check the stack for overflow. We are not trying to catch
2118 // interruptions (e.g. debug break and preemption) here, so the "real stack
2119 // limit" is checked.
2120 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
2121 // Make r5 the space we have left. The stack might already be overflowed
2122 // here which will cause r5 to become negative.
2123 __ sub(r5, sp, r5);
2124 // Check if the arguments will overflow the stack.
2125 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
2126 __ b(le, stack_overflow); // Signed comparison.
2127 }
2128
2129
2130 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2131 __ SmiTag(r0);
2132 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2133 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
2134 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
2135 fp.bit() | lr.bit());
2136 __ add(fp, sp,
2137 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
2138 }
2139
2140
2141 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2142 // ----------- S t a t e -------------
2143 // -- r0 : result being passed through
2144 // -----------------------------------
2145 // Get the number of arguments passed (as a smi), tear down the frame and
2146 // then tear down the parameters.
2147 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2148 kPointerSize)));
2149
2150 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
2151 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
2152 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
2153 }
2154
2155
2156 // static
2157 void Builtins::Generate_Apply(MacroAssembler* masm) {
2158 // ----------- S t a t e -------------
2159 // -- r0 : argumentsList
2160 // -- r1 : target
2161 // -- r3 : new.target (checked to be constructor or undefined)
2162 // -- sp[0] : thisArgument
2163 // -----------------------------------
2164
2165 // Create the list of arguments from the array-like argumentsList.
2166 {
2167 Label create_arguments, create_array, create_runtime, done_create;
2168 __ JumpIfSmi(r0, &create_runtime);
2169
2170 // Load the map of argumentsList into r2.
2171 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2172
2173 // Load native context into r4.
2174 __ ldr(r4, NativeContextMemOperand());
2175
2176 // Check if argumentsList is an (unmodified) arguments object.
2177 __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2178 __ cmp(ip, r2);
2179 __ b(eq, &create_arguments);
2180 __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
2181 __ cmp(ip, r2);
2182 __ b(eq, &create_arguments);
2183
2184 // Check if argumentsList is a fast JSArray.
2185 __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
2186 __ b(eq, &create_array);
2187
2188 // Ask the runtime to create the list (actually a FixedArray).
2189 __ bind(&create_runtime);
2190 {
2191 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2192 __ Push(r1, r3, r0);
2193 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2194 __ Pop(r1, r3);
2195 __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
2196 __ SmiUntag(r2);
2197 }
2198 __ jmp(&done_create);
2199
2200 // Try to create the list from an arguments object.
2201 __ bind(&create_arguments);
2202 __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
2203 __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2204 __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
2205 __ cmp(r2, ip);
2206 __ b(ne, &create_runtime);
2207 __ SmiUntag(r2);
2208 __ mov(r0, r4);
2209 __ b(&done_create);
2210
2211 // Try to create the list from a JSArray object.
2212 __ bind(&create_array);
2213 __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
2214 __ DecodeField<Map::ElementsKindBits>(r2);
2215 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2216 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2217 STATIC_ASSERT(FAST_ELEMENTS == 2);
2218 __ cmp(r2, Operand(FAST_ELEMENTS));
2219 __ b(hi, &create_runtime);
2220 __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2221 __ b(eq, &create_runtime);
2222 __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
2223 __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
2224 __ SmiUntag(r2);
2225
2226 __ bind(&done_create);
2227 }
2228
2229 // Check for stack overflow.
2230 {
2231 // Check the stack for overflow. We are not trying to catch interruptions
2232 // (i.e. debug break and preemption) here, so check the "real stack limit".
2233 Label done;
2234 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2235 // Make ip the space we have left. The stack might already be overflowed
2236 // here which will cause ip to become negative.
2237 __ sub(ip, sp, ip);
2238 // Check if the arguments will overflow the stack.
2239 __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
2240 __ b(gt, &done); // Signed comparison.
2241 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2242 __ bind(&done);
2243 }
2244
2245 // ----------- S t a t e -------------
2246 // -- r1 : target
2247 // -- r0 : args (a FixedArray built from argumentsList)
2248 // -- r2 : len (number of elements to push from args)
2249 // -- r3 : new.target (checked to be constructor or undefined)
2250 // -- sp[0] : thisArgument
2251 // -----------------------------------
2252
2253 // Push arguments onto the stack (thisArgument is already on the stack).
2254 {
2255 __ mov(r4, Operand(0));
2256 Label done, loop;
2257 __ bind(&loop);
2258 __ cmp(r4, r2);
2259 __ b(eq, &done);
2260 __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
2261 __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
2262 __ Push(ip);
2263 __ add(r4, r4, Operand(1));
2264 __ b(&loop);
2265 __ bind(&done);
2266 __ Move(r0, r4);
2267 }
2268
2269 // Dispatch to Call or Construct depending on whether new.target is undefined.
2270 {
2271 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
2272 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2273 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2274 }
2275 }
2276
2277 namespace {
2278
2279 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2280 // present) preserving all the arguments prepared for current call.
2281 // Does nothing if debugger is currently active.
2282 // ES6 14.6.3. PrepareForTailCall
2283 //
2284 // Stack structure for the function g() tail calling f():
2285 //
2286 // ------- Caller frame: -------
2287 // | ...
2288 // | g()'s arg M
2289 // | ...
2290 // | g()'s arg 1
2291 // | g()'s receiver arg
2292 // | g()'s caller pc
2293 // ------- g()'s frame: -------
2294 // | g()'s caller fp <- fp
2295 // | g()'s context
2296 // | function pointer: g
2297 // | -------------------------
2298 // | ...
2299 // | ...
2300 // | f()'s arg N
2301 // | ...
2302 // | f()'s arg 1
2303 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2304 // ----------------------
2305 //
2306 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2307 Register scratch1, Register scratch2,
2308 Register scratch3) {
2309 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2310 Comment cmnt(masm, "[ PrepareForTailCall");
2311
2312 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2313 Label done;
2314 ExternalReference is_tail_call_elimination_enabled =
2315 ExternalReference::is_tail_call_elimination_enabled_address(
2316 masm->isolate());
2317 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2318 __ ldrb(scratch1, MemOperand(scratch1));
2319 __ cmp(scratch1, Operand(0));
2320 __ b(eq, &done);
2321
2322 // Drop possible interpreter handler/stub frame.
2323 {
2324 Label no_interpreter_frame;
2325 __ ldr(scratch3,
2326 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2327 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2328 __ b(ne, &no_interpreter_frame);
2329 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2330 __ bind(&no_interpreter_frame);
2331 }
2332
2333 // Check if next frame is an arguments adaptor frame.
2334 Register caller_args_count_reg = scratch1;
2335 Label no_arguments_adaptor, formal_parameter_count_loaded;
2336 __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2337 __ ldr(scratch3,
2338 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2339 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2340 __ b(ne, &no_arguments_adaptor);
2341
2342 // Drop current frame and load arguments count from arguments adaptor frame.
2343 __ mov(fp, scratch2);
2344 __ ldr(caller_args_count_reg,
2345 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2346 __ SmiUntag(caller_args_count_reg);
2347 __ b(&formal_parameter_count_loaded);
2348
2349 __ bind(&no_arguments_adaptor);
2350 // Load caller's formal parameter count
2351 __ ldr(scratch1,
2352 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2353 __ ldr(scratch1,
2354 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2355 __ ldr(caller_args_count_reg,
2356 FieldMemOperand(scratch1,
2357 SharedFunctionInfo::kFormalParameterCountOffset));
2358 __ SmiUntag(caller_args_count_reg);
2359
2360 __ bind(&formal_parameter_count_loaded);
2361
2362 ParameterCount callee_args_count(args_reg);
2363 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2364 scratch3);
2365 __ bind(&done);
2366 }
2367 } // namespace
2368
2369 // static
2370 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2371 ConvertReceiverMode mode,
2372 TailCallMode tail_call_mode) {
2373 // ----------- S t a t e -------------
2374 // -- r0 : the number of arguments (not including the receiver)
2375 // -- r1 : the function to call (checked to be a JSFunction)
2376 // -----------------------------------
2377 __ AssertFunction(r1);
2378
2379 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2380 // Check that the function is not a "classConstructor".
2381 Label class_constructor;
2382 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2383 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
2384 __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2385 __ b(ne, &class_constructor);
2386
2387 // Enter the context of the function; ToObject has to run in the function
2388 // context, and we also need to take the global proxy from the function
2389 // context in case of conversion.
2390 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2391 SharedFunctionInfo::kStrictModeByteOffset);
2392 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2393 // We need to convert the receiver for non-native sloppy mode functions.
2394 Label done_convert;
2395 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
2396 __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2397 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2398 __ b(ne, &done_convert);
2399 {
2400 // ----------- S t a t e -------------
2401 // -- r0 : the number of arguments (not including the receiver)
2402 // -- r1 : the function to call (checked to be a JSFunction)
2403 // -- r2 : the shared function info.
2404 // -- cp : the function context.
2405 // -----------------------------------
2406
2407 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2408 // Patch receiver to global proxy.
2409 __ LoadGlobalProxy(r3);
2410 } else {
2411 Label convert_to_object, convert_receiver;
2412 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2413 __ JumpIfSmi(r3, &convert_to_object);
2414 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2415 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2416 __ b(hs, &done_convert);
2417 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2418 Label convert_global_proxy;
2419 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
2420 &convert_global_proxy);
2421 __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
2422 __ bind(&convert_global_proxy);
2423 {
2424 // Patch receiver to global proxy.
2425 __ LoadGlobalProxy(r3);
2426 }
2427 __ b(&convert_receiver);
2428 }
2429 __ bind(&convert_to_object);
2430 {
2431 // Convert receiver using ToObject.
2432 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2433 // in the fast case? (fall back to AllocateInNewSpace?)
2434 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2435 __ SmiTag(r0);
2436 __ Push(r0, r1);
2437 __ mov(r0, r3);
2438 ToObjectStub stub(masm->isolate());
2439 __ CallStub(&stub);
2440 __ mov(r3, r0);
2441 __ Pop(r0, r1);
2442 __ SmiUntag(r0);
2443 }
2444 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2445 __ bind(&convert_receiver);
2446 }
2447 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2448 }
2449 __ bind(&done_convert);
2450
2451 // ----------- S t a t e -------------
2452 // -- r0 : the number of arguments (not including the receiver)
2453 // -- r1 : the function to call (checked to be a JSFunction)
2454 // -- r2 : the shared function info.
2455 // -- cp : the function context.
2456 // -----------------------------------
2457
2458 if (tail_call_mode == TailCallMode::kAllow) {
2459 PrepareForTailCall(masm, r0, r3, r4, r5);
2460 }
2461
2462 __ ldr(r2,
2463 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2464 __ SmiUntag(r2);
2465 ParameterCount actual(r0);
2466 ParameterCount expected(r2);
2467 __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
2468 CheckDebugStepCallWrapper());
2469
2470 // The function is a "classConstructor", need to raise an exception.
2471 __ bind(&class_constructor);
2472 {
2473 FrameScope frame(masm, StackFrame::INTERNAL);
2474 __ push(r1);
2475 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2476 }
2477 }
2478
2479
2480 namespace {
2481
2482 void Generate_PushBoundArguments(MacroAssembler* masm) {
2483 // ----------- S t a t e -------------
2484 // -- r0 : the number of arguments (not including the receiver)
2485 // -- r1 : target (checked to be a JSBoundFunction)
2486 // -- r3 : new.target (only in case of [[Construct]])
2487 // -----------------------------------
2488
2489 // Load [[BoundArguments]] into r2 and length of that into r4.
2490 Label no_bound_arguments;
2491 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2492 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2493 __ SmiUntag(r4);
2494 __ cmp(r4, Operand(0));
2495 __ b(eq, &no_bound_arguments);
2496 {
2497 // ----------- S t a t e -------------
2498 // -- r0 : the number of arguments (not including the receiver)
2499 // -- r1 : target (checked to be a JSBoundFunction)
2500 // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2501 // -- r3 : new.target (only in case of [[Construct]])
2502 // -- r4 : the number of [[BoundArguments]]
2503 // -----------------------------------
2504
2505 // Reserve stack space for the [[BoundArguments]].
2506 {
2507 Label done;
2508 __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2509 // Check the stack for overflow. We are not trying to catch interruptions
2510 // (i.e. debug break and preemption) here, so check the "real stack
2511 // limit".
2512 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2513 __ b(gt, &done); // Signed comparison.
2514 // Restore the stack pointer.
2515 __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2516 {
2517 FrameScope scope(masm, StackFrame::MANUAL);
2518 __ EnterFrame(StackFrame::INTERNAL);
2519 __ CallRuntime(Runtime::kThrowStackOverflow);
2520 }
2521 __ bind(&done);
2522 }
2523
2524 // Relocate arguments down the stack.
2525 {
2526 Label loop, done_loop;
2527 __ mov(r5, Operand(0));
2528 __ bind(&loop);
2529 __ cmp(r5, r0);
2530 __ b(gt, &done_loop);
2531 __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
2532 __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
2533 __ add(r4, r4, Operand(1));
2534 __ add(r5, r5, Operand(1));
2535 __ b(&loop);
2536 __ bind(&done_loop);
2537 }
2538
2539 // Copy [[BoundArguments]] to the stack (below the arguments).
2540 {
2541 Label loop;
2542 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2543 __ SmiUntag(r4);
2544 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2545 __ bind(&loop);
2546 __ sub(r4, r4, Operand(1), SetCC);
2547 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
2548 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2549 __ add(r0, r0, Operand(1));
2550 __ b(gt, &loop);
2551 }
2552 }
2553 __ bind(&no_bound_arguments);
2554 }
2555
2556 } // namespace
2557
2558
2559 // static
2560 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2561 TailCallMode tail_call_mode) {
2562 // ----------- S t a t e -------------
2563 // -- r0 : the number of arguments (not including the receiver)
2564 // -- r1 : the function to call (checked to be a JSBoundFunction)
2565 // -----------------------------------
2566 __ AssertBoundFunction(r1);
2567
2568 if (tail_call_mode == TailCallMode::kAllow) {
2569 PrepareForTailCall(masm, r0, r3, r4, r5);
2570 }
2571
2572 // Patch the receiver to [[BoundThis]].
2573 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2574 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2575
2576 // Push the [[BoundArguments]] onto the stack.
2577 Generate_PushBoundArguments(masm);
2578
2579 // Call the [[BoundTargetFunction]] via the Call builtin.
2580 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2581 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2582 masm->isolate())));
2583 __ ldr(ip, MemOperand(ip));
2584 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2585 }
2586
2587
2588 // static
2589 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2590 TailCallMode tail_call_mode) {
2591 // ----------- S t a t e -------------
2592 // -- r0 : the number of arguments (not including the receiver)
2593 // -- r1 : the target to call (can be any Object).
2594 // -----------------------------------
2595
2596 Label non_callable, non_function, non_smi;
2597 __ JumpIfSmi(r1, &non_callable);
2598 __ bind(&non_smi);
2599 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2600 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2601 RelocInfo::CODE_TARGET, eq);
2602 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2603 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2604 RelocInfo::CODE_TARGET, eq);
2605
2606 // Check if target has a [[Call]] internal method.
2607 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2608 __ tst(r4, Operand(1 << Map::kIsCallable));
2609 __ b(eq, &non_callable);
2610
2611 __ cmp(r5, Operand(JS_PROXY_TYPE));
2612 __ b(ne, &non_function);
2613
2614 // 0. Prepare for tail call if necessary.
2615 if (tail_call_mode == TailCallMode::kAllow) {
2616 PrepareForTailCall(masm, r0, r3, r4, r5);
2617 }
2618
2619 // 1. Runtime fallback for Proxy [[Call]].
2620 __ Push(r1);
2621 // Increase the arguments size to include the pushed function and the
2622 // existing receiver on the stack.
2623 __ add(r0, r0, Operand(2));
2624 // Tail-call to the runtime.
2625 __ JumpToExternalReference(
2626 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2627
2628 // 2. Call to something else, which might have a [[Call]] internal method (if
2629 // not we raise an exception).
2630 __ bind(&non_function);
2631 // Overwrite the original receiver the (original) target.
2632 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2633 // Let the "call_as_function_delegate" take care of the rest.
2634 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
2635 __ Jump(masm->isolate()->builtins()->CallFunction(
2636 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2637 RelocInfo::CODE_TARGET);
2638
2639 // 3. Call to something that is not callable.
2640 __ bind(&non_callable);
2641 {
2642 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2643 __ Push(r1);
2644 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2645 }
2646 }
2647
2648
2649 // static
2650 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2651 // ----------- S t a t e -------------
2652 // -- r0 : the number of arguments (not including the receiver)
2653 // -- r1 : the constructor to call (checked to be a JSFunction)
2654 // -- r3 : the new target (checked to be a constructor)
2655 // -----------------------------------
2656 __ AssertFunction(r1);
2657
2658 // Calling convention for function specific ConstructStubs require
2659 // r2 to contain either an AllocationSite or undefined.
2660 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2661
2662 // Tail call to the function-specific construct stub (still in the caller
2663 // context at this point).
2664 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2665 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2666 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2667 }
2668
2669
2670 // static
2671 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2672 // ----------- S t a t e -------------
2673 // -- r0 : the number of arguments (not including the receiver)
2674 // -- r1 : the function to call (checked to be a JSBoundFunction)
2675 // -- r3 : the new target (checked to be a constructor)
2676 // -----------------------------------
2677 __ AssertBoundFunction(r1);
2678
2679 // Push the [[BoundArguments]] onto the stack.
2680 Generate_PushBoundArguments(masm);
2681
2682 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2683 __ cmp(r1, r3);
2684 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2685 eq);
2686
2687 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2688 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2689 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2690 __ ldr(ip, MemOperand(ip));
2691 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2692 }
2693
2694
2695 // static
2696 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2697 // ----------- S t a t e -------------
2698 // -- r0 : the number of arguments (not including the receiver)
2699 // -- r1 : the constructor to call (checked to be a JSProxy)
2700 // -- r3 : the new target (either the same as the constructor or
2701 // the JSFunction on which new was invoked initially)
2702 // -----------------------------------
2703
2704 // Call into the Runtime for Proxy [[Construct]].
2705 __ Push(r1);
2706 __ Push(r3);
2707 // Include the pushed new_target, constructor and the receiver.
2708 __ add(r0, r0, Operand(3));
2709 // Tail-call to the runtime.
2710 __ JumpToExternalReference(
2711 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2712 }
2713
2714
2715 // static
2716 void Builtins::Generate_Construct(MacroAssembler* masm) {
2717 // ----------- S t a t e -------------
2718 // -- r0 : the number of arguments (not including the receiver)
2719 // -- r1 : the constructor to call (can be any Object)
2720 // -- r3 : the new target (either the same as the constructor or
2721 // the JSFunction on which new was invoked initially)
2722 // -----------------------------------
2723
2724 // Check if target is a Smi.
2725 Label non_constructor;
2726 __ JumpIfSmi(r1, &non_constructor);
2727
2728 // Dispatch based on instance type.
2729 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2730 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2731 RelocInfo::CODE_TARGET, eq);
2732
2733 // Check if target has a [[Construct]] internal method.
2734 __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2735 __ tst(r2, Operand(1 << Map::kIsConstructor));
2736 __ b(eq, &non_constructor);
2737
2738 // Only dispatch to bound functions after checking whether they are
2739 // constructors.
2740 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2741 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2742 RelocInfo::CODE_TARGET, eq);
2743
2744 // Only dispatch to proxies after checking whether they are constructors.
2745 __ cmp(r5, Operand(JS_PROXY_TYPE));
2746 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2747 eq);
2748
2749 // Called Construct on an exotic Object with a [[Construct]] internal method.
2750 {
2751 // Overwrite the original receiver with the (original) target.
2752 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2753 // Let the "call_as_constructor_delegate" take care of the rest.
2754 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2755 __ Jump(masm->isolate()->builtins()->CallFunction(),
2756 RelocInfo::CODE_TARGET);
2757 }
2758
2759 // Called Construct on an Object that doesn't have a [[Construct]] internal
2760 // method.
2761 __ bind(&non_constructor);
2762 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2763 RelocInfo::CODE_TARGET);
2764 }
2765
2766 // static
2767 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2768 // ----------- S t a t e -------------
2769 // -- r1 : requested object size (untagged)
2770 // -- lr : return address
2771 // -----------------------------------
2772 __ SmiTag(r1);
2773 __ Push(r1);
2774 __ Move(cp, Smi::FromInt(0));
2775 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2776 }
2777
2778 // static
2779 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2780 // ----------- S t a t e -------------
2781 // -- r1 : requested object size (untagged)
2782 // -- lr : return address
2783 // -----------------------------------
2784 __ SmiTag(r1);
2785 __ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2786 __ Push(r1, r2);
2787 __ Move(cp, Smi::FromInt(0));
2788 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2789 }
2790
2791 // static
2792 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2793 // The StringToNumber stub takes one argument in r0.
2794 __ AssertString(r0);
2795
2796 // Check if string has a cached array index.
2797 Label runtime;
2798 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
2799 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
2800 __ b(ne, &runtime);
2801 __ IndexFromHash(r2, r0);
2802 __ Ret();
2803
2804 __ bind(&runtime);
2805 {
2806 FrameScope frame(masm, StackFrame::INTERNAL);
2807 // Push argument.
2808 __ Push(r0);
2809 // We cannot use a tail call here because this builtin can also be called
2810 // from wasm.
2811 __ CallRuntime(Runtime::kStringToNumber);
2812 }
2813 __ Ret();
2814 }
2815
2816 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2817 // The ToNumber stub takes one argument in r0.
2818 STATIC_ASSERT(kSmiTag == 0);
2819 __ tst(r0, Operand(kSmiTagMask));
2820 __ Ret(eq);
2821
2822 __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
2823 // r0: receiver
2824 // r1: receiver instance type
2825 __ Ret(eq);
2826
2827 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2828 RelocInfo::CODE_TARGET);
2829 }
2830
2831 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2832 // The NonNumberToNumber stub takes one argument in r0.
2833 __ AssertNotNumber(r0);
2834
2835 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
2836 // r0: receiver
2837 // r1: receiver instance type
2838 __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET,
2839 lo);
2840
2841 Label not_oddball;
2842 __ cmp(r1, Operand(ODDBALL_TYPE));
2843 __ b(ne, &not_oddball);
2844 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
2845 __ Ret();
2846 __ bind(&not_oddball);
2847 {
2848 FrameScope frame(masm, StackFrame::INTERNAL);
2849 // Push argument.
2850 __ Push(r0);
2851 // We cannot use a tail call here because this builtin can also be called
2852 // from wasm.
2853 __ CallRuntime(Runtime::kToNumber);
2854 }
2855 __ Ret();
2856 }
2857
2858 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2859 // ----------- S t a t e -------------
2860 // -- r0 : actual number of arguments
2861 // -- r1 : function (passed through to callee)
2862 // -- r2 : expected number of arguments
2863 // -- r3 : new target (passed through to callee)
2864 // -----------------------------------
2865
2866 Label invoke, dont_adapt_arguments, stack_overflow;
2867
2868 Label enough, too_few;
2869 __ cmp(r0, r2);
2870 __ b(lt, &too_few);
2871 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2872 __ b(eq, &dont_adapt_arguments);
2873
2874 { // Enough parameters: actual >= expected
2875 __ bind(&enough);
2876 EnterArgumentsAdaptorFrame(masm);
2877 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2878
2879 // Calculate copy start address into r0 and copy end address into r4.
2880 // r0: actual number of arguments as a smi
2881 // r1: function
2882 // r2: expected number of arguments
2883 // r3: new target (passed through to callee)
2884 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2885 // adjust for return address and receiver
2886 __ add(r0, r0, Operand(2 * kPointerSize));
2887 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
2888
2889 // Copy the arguments (including the receiver) to the new stack frame.
2890 // r0: copy start address
2891 // r1: function
2892 // r2: expected number of arguments
2893 // r3: new target (passed through to callee)
2894 // r4: copy end address
2895
2896 Label copy;
2897 __ bind(&copy);
2898 __ ldr(ip, MemOperand(r0, 0));
2899 __ push(ip);
2900 __ cmp(r0, r4); // Compare before moving to next argument.
2901 __ sub(r0, r0, Operand(kPointerSize));
2902 __ b(ne, &copy);
2903
2904 __ b(&invoke);
2905 }
2906
2907 { // Too few parameters: Actual < expected
2908 __ bind(&too_few);
2909 EnterArgumentsAdaptorFrame(masm);
2910 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2911
2912 // Calculate copy start address into r0 and copy end address is fp.
2913 // r0: actual number of arguments as a smi
2914 // r1: function
2915 // r2: expected number of arguments
2916 // r3: new target (passed through to callee)
2917 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2918
2919 // Copy the arguments (including the receiver) to the new stack frame.
2920 // r0: copy start address
2921 // r1: function
2922 // r2: expected number of arguments
2923 // r3: new target (passed through to callee)
2924 Label copy;
2925 __ bind(&copy);
2926 // Adjust load for return address and receiver.
2927 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
2928 __ push(ip);
2929 __ cmp(r0, fp); // Compare before moving to next argument.
2930 __ sub(r0, r0, Operand(kPointerSize));
2931 __ b(ne, &copy);
2932
2933 // Fill the remaining expected arguments with undefined.
2934 // r1: function
2935 // r2: expected number of arguments
2936 // r3: new target (passed through to callee)
2937 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2938 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
2939 // Adjust for frame.
2940 __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2941 2 * kPointerSize));
2942
2943 Label fill;
2944 __ bind(&fill);
2945 __ push(ip);
2946 __ cmp(sp, r4);
2947 __ b(ne, &fill);
2948 }
2949
2950 // Call the entry point.
2951 __ bind(&invoke);
2952 __ mov(r0, r2);
2953 // r0 : expected number of arguments
2954 // r1 : function (passed through to callee)
2955 // r3 : new target (passed through to callee)
2956 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2957 __ Call(r4);
2958
2959 // Store offset of return address for deoptimizer.
2960 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2961
2962 // Exit frame and return.
2963 LeaveArgumentsAdaptorFrame(masm);
2964 __ Jump(lr);
2965
2966
2967 // -------------------------------------------
2968 // Dont adapt arguments.
2969 // -------------------------------------------
2970 __ bind(&dont_adapt_arguments);
2971 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2972 __ Jump(r4);
2973
2974 __ bind(&stack_overflow);
2975 {
2976 FrameScope frame(masm, StackFrame::MANUAL);
2977 __ CallRuntime(Runtime::kThrowStackOverflow);
2978 __ bkpt(0);
2979 }
2980 }
2981
2982
2983 #undef __
2984
2985 } // namespace internal
2986 } // namespace v8
2987
2988 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « BUILD.gn ('k') | src/arm64/builtins-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698