Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(61)

Side by Side Diff: src/arm64/builtins-arm64.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM64
6
7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h"
11 #include "src/full-codegen/full-codegen.h"
12 #include "src/runtime/runtime.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 #define __ ACCESS_MASM(masm)
19
20
21 // Load the built-in Array function from the current context.
22 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
23 // Load the InternalArray function from the native context.
24 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
25 }
26
27
28 // Load the built-in InternalArray function from the current context.
29 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) {
31 // Load the InternalArray function from the native context.
32 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
33 }
34
35 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
36 ExitFrameType exit_frame_type) {
37 // ----------- S t a t e -------------
38 // -- x0 : number of arguments excluding receiver
39 // -- x1 : target
40 // -- x3 : new target
41 // -- sp[0] : last argument
42 // -- ...
43 // -- sp[4 * (argc - 1)] : first argument
44 // -- sp[4 * argc] : receiver
45 // -----------------------------------
46 __ AssertFunction(x1);
47
48 // Make sure we operate in the context of the called function (for example
49 // ConstructStubs implemented in C++ will be run in the context of the caller
50 // instead of the callee, due to the way that [[Construct]] is defined for
51 // ordinary functions).
52 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
53
54 // JumpToExternalReference expects x0 to contain the number of arguments
55 // including the receiver and the extra arguments.
56 const int num_extra_args = 3;
57 __ Add(x0, x0, num_extra_args + 1);
58
59 // Insert extra arguments.
60 __ SmiTag(x0);
61 __ Push(x0, x1, x3);
62 __ SmiUntag(x0);
63
64 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
65 exit_frame_type == BUILTIN_EXIT);
66 }
67
68
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
70 // ----------- S t a t e -------------
71 // -- x0 : number of arguments
72 // -- lr : return address
73 // -- sp[...]: constructor arguments
74 // -----------------------------------
75 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
76 Label generic_array_code;
77
78 // Get the InternalArray function.
79 GenerateLoadInternalArrayFunction(masm, x1);
80
81 if (FLAG_debug_code) {
82 // Initial map for the builtin InternalArray functions should be maps.
83 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
84 __ Tst(x10, kSmiTagMask);
85 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
86 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
87 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
88 }
89
90 // Run the native code for the InternalArray function called as a normal
91 // function.
92 InternalArrayConstructorStub stub(masm->isolate());
93 __ TailCallStub(&stub);
94 }
95
96
97 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
98 // ----------- S t a t e -------------
99 // -- x0 : number of arguments
100 // -- lr : return address
101 // -- sp[...]: constructor arguments
102 // -----------------------------------
103 ASM_LOCATION("Builtins::Generate_ArrayCode");
104 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
105
106 // Get the Array function.
107 GenerateLoadArrayFunction(masm, x1);
108
109 if (FLAG_debug_code) {
110 // Initial map for the builtin Array functions should be maps.
111 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
112 __ Tst(x10, kSmiTagMask);
113 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
114 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
115 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
116 }
117
118 // Run the native code for the Array function called as a normal function.
119 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
120 __ Mov(x3, x1);
121 ArrayConstructorStub stub(masm->isolate());
122 __ TailCallStub(&stub);
123 }
124
125
126 // static
127 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
128 // ----------- S t a t e -------------
129 // -- x0 : number of arguments
130 // -- x1 : function
131 // -- cp : context
132 // -- lr : return address
133 // -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
134 // -- sp[argc * 8] : receiver
135 // -----------------------------------
136 ASM_LOCATION("Builtins::Generate_MathMaxMin");
137
138 Heap::RootListIndex const root_index =
139 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
140 : Heap::kMinusInfinityValueRootIndex;
141
142 // Load the accumulator with the default return value (either -Infinity or
143 // +Infinity), with the tagged value in x5 and the double value in d5.
144 __ LoadRoot(x5, root_index);
145 __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
146
147 Label done_loop, loop;
148 __ mov(x4, x0);
149 __ Bind(&loop);
150 {
151 // Check if all parameters done.
152 __ Subs(x4, x4, 1);
153 __ B(lt, &done_loop);
154
155 // Load the next parameter tagged value into x2.
156 __ Peek(x2, Operand(x4, LSL, kPointerSizeLog2));
157
158 // Load the double value of the parameter into d2, maybe converting the
159 // parameter to a number first using the ToNumber builtin if necessary.
160 Label convert_smi, convert_number, done_convert;
161 __ JumpIfSmi(x2, &convert_smi);
162 __ JumpIfHeapNumber(x2, &convert_number);
163 {
164 // Parameter is not a Number, use the ToNumber builtin to convert it.
165 FrameScope scope(masm, StackFrame::MANUAL);
166 __ SmiTag(x0);
167 __ SmiTag(x4);
168 __ EnterBuiltinFrame(cp, x1, x0);
169 __ Push(x5, x4);
170 __ Mov(x0, x2);
171 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
172 __ Mov(x2, x0);
173 __ Pop(x4, x5);
174 __ LeaveBuiltinFrame(cp, x1, x0);
175 __ SmiUntag(x4);
176 __ SmiUntag(x0);
177 {
178 // Restore the double accumulator value (d5).
179 Label done_restore;
180 __ SmiUntagToDouble(d5, x5, kSpeculativeUntag);
181 __ JumpIfSmi(x5, &done_restore);
182 __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
183 __ Bind(&done_restore);
184 }
185 }
186 __ AssertNumber(x2);
187 __ JumpIfSmi(x2, &convert_smi);
188
189 __ Bind(&convert_number);
190 __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
191 __ B(&done_convert);
192
193 __ Bind(&convert_smi);
194 __ SmiUntagToDouble(d2, x2);
195 __ Bind(&done_convert);
196
197 // We can use a single fmin/fmax for the operation itself, but we then need
198 // to work out which HeapNumber (or smi) the result came from.
199 __ Fmov(x11, d5);
200 if (kind == MathMaxMinKind::kMin) {
201 __ Fmin(d5, d5, d2);
202 } else {
203 DCHECK(kind == MathMaxMinKind::kMax);
204 __ Fmax(d5, d5, d2);
205 }
206 __ Fmov(x10, d5);
207 __ Cmp(x10, x11);
208 __ Csel(x5, x5, x2, eq);
209 __ B(&loop);
210 }
211
212 __ Bind(&done_loop);
213 // Drop all slots, including the receiver.
214 __ Add(x0, x0, 1);
215 __ Drop(x0);
216 __ Mov(x0, x5);
217 __ Ret();
218 }
219
220 // static
221 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
222 // ----------- S t a t e -------------
223 // -- x0 : number of arguments
224 // -- x1 : constructor function
225 // -- cp : context
226 // -- lr : return address
227 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
228 // -- sp[argc * 8] : receiver
229 // -----------------------------------
230 ASM_LOCATION("Builtins::Generate_NumberConstructor");
231
232 // 1. Load the first argument into x0.
233 Label no_arguments;
234 {
235 __ Cbz(x0, &no_arguments);
236 __ Mov(x2, x0); // Store argc in x2.
237 __ Sub(x0, x0, 1);
238 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
239 }
240
241 // 2a. Convert first argument to number.
242 {
243 FrameScope scope(masm, StackFrame::MANUAL);
244 __ SmiTag(x2);
245 __ EnterBuiltinFrame(cp, x1, x2);
246 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
247 __ LeaveBuiltinFrame(cp, x1, x2);
248 __ SmiUntag(x2);
249 }
250
251 {
252 // Drop all arguments.
253 __ Drop(x2);
254 }
255
256 // 2b. No arguments, return +0 (already in x0).
257 __ Bind(&no_arguments);
258 __ Drop(1);
259 __ Ret();
260 }
261
262
263 // static
264 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
265 // ----------- S t a t e -------------
266 // -- x0 : number of arguments
267 // -- x1 : constructor function
268 // -- x3 : new target
269 // -- cp : context
270 // -- lr : return address
271 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
272 // -- sp[argc * 8] : receiver
273 // -----------------------------------
274 ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
275
276 // 1. Make sure we operate in the context of the called function.
277 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
278
279 // 2. Load the first argument into x2.
280 {
281 Label no_arguments, done;
282 __ Move(x6, x0); // Store argc in x6.
283 __ Cbz(x0, &no_arguments);
284 __ Sub(x0, x0, 1);
285 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
286 __ B(&done);
287 __ Bind(&no_arguments);
288 __ Mov(x2, Smi::FromInt(0));
289 __ Bind(&done);
290 }
291
292 // 3. Make sure x2 is a number.
293 {
294 Label done_convert;
295 __ JumpIfSmi(x2, &done_convert);
296 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
297 {
298 FrameScope scope(masm, StackFrame::MANUAL);
299 __ SmiTag(x6);
300 __ EnterBuiltinFrame(cp, x1, x6);
301 __ Push(x3);
302 __ Move(x0, x2);
303 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
304 __ Move(x2, x0);
305 __ Pop(x3);
306 __ LeaveBuiltinFrame(cp, x1, x6);
307 __ SmiUntag(x6);
308 }
309 __ Bind(&done_convert);
310 }
311
312 // 4. Check if new target and constructor differ.
313 Label drop_frame_and_ret, new_object;
314 __ Cmp(x1, x3);
315 __ B(ne, &new_object);
316
317 // 5. Allocate a JSValue wrapper for the number.
318 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
319 __ B(&drop_frame_and_ret);
320
321 // 6. Fallback to the runtime to create new object.
322 __ bind(&new_object);
323 {
324 FrameScope scope(masm, StackFrame::MANUAL);
325 FastNewObjectStub stub(masm->isolate());
326 __ SmiTag(x6);
327 __ EnterBuiltinFrame(cp, x1, x6);
328 __ Push(x2); // first argument
329 __ CallStub(&stub);
330 __ Pop(x2);
331 __ LeaveBuiltinFrame(cp, x1, x6);
332 __ SmiUntag(x6);
333 }
334 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
335
336 __ bind(&drop_frame_and_ret);
337 {
338 __ Drop(x6);
339 __ Drop(1);
340 __ Ret();
341 }
342 }
343
344
345 // static
346 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
347 // ----------- S t a t e -------------
348 // -- x0 : number of arguments
349 // -- x1 : constructor function
350 // -- cp : context
351 // -- lr : return address
352 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
353 // -- sp[argc * 8] : receiver
354 // -----------------------------------
355 ASM_LOCATION("Builtins::Generate_StringConstructor");
356
357 // 1. Load the first argument into x0.
358 Label no_arguments;
359 {
360 __ Cbz(x0, &no_arguments);
361 __ Mov(x2, x0); // Store argc in x2.
362 __ Sub(x0, x0, 1);
363 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
364 }
365
366 // 2a. At least one argument, return x0 if it's a string, otherwise
367 // dispatch to appropriate conversion.
368 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
369 {
370 __ JumpIfSmi(x0, &to_string);
371 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
372 __ CompareObjectType(x0, x3, x3, FIRST_NONSTRING_TYPE);
373 __ B(hi, &to_string);
374 __ B(eq, &symbol_descriptive_string);
375 __ b(&drop_frame_and_ret);
376 }
377
378 // 2b. No arguments, return the empty string (and pop the receiver).
379 __ Bind(&no_arguments);
380 {
381 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
382 __ Drop(1);
383 __ Ret();
384 }
385
386 // 3a. Convert x0 to a string.
387 __ Bind(&to_string);
388 {
389 FrameScope scope(masm, StackFrame::MANUAL);
390 ToStringStub stub(masm->isolate());
391 __ SmiTag(x2);
392 __ EnterBuiltinFrame(cp, x1, x2);
393 __ CallStub(&stub);
394 __ LeaveBuiltinFrame(cp, x1, x2);
395 __ SmiUntag(x2);
396 }
397 __ b(&drop_frame_and_ret);
398
399 // 3b. Convert symbol in x0 to a string.
400 __ Bind(&symbol_descriptive_string);
401 {
402 __ Drop(x2);
403 __ Drop(1);
404 __ Push(x0);
405 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
406 }
407
408 __ bind(&drop_frame_and_ret);
409 {
410 __ Drop(x2);
411 __ Drop(1);
412 __ Ret();
413 }
414 }
415
416
417 // static
418 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
419 // ----------- S t a t e -------------
420 // -- x0 : number of arguments
421 // -- x1 : constructor function
422 // -- x3 : new target
423 // -- cp : context
424 // -- lr : return address
425 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
426 // -- sp[argc * 8] : receiver
427 // -----------------------------------
428 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
429
430 // 1. Make sure we operate in the context of the called function.
431 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
432
433 // 2. Load the first argument into x2.
434 {
435 Label no_arguments, done;
436 __ mov(x6, x0); // Store argc in x6.
437 __ Cbz(x0, &no_arguments);
438 __ Sub(x0, x0, 1);
439 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
440 __ B(&done);
441 __ Bind(&no_arguments);
442 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
443 __ Bind(&done);
444 }
445
446 // 3. Make sure x2 is a string.
447 {
448 Label convert, done_convert;
449 __ JumpIfSmi(x2, &convert);
450 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
451 __ Bind(&convert);
452 {
453 FrameScope scope(masm, StackFrame::MANUAL);
454 ToStringStub stub(masm->isolate());
455 __ SmiTag(x6);
456 __ EnterBuiltinFrame(cp, x1, x6);
457 __ Push(x3);
458 __ Move(x0, x2);
459 __ CallStub(&stub);
460 __ Move(x2, x0);
461 __ Pop(x3);
462 __ LeaveBuiltinFrame(cp, x1, x6);
463 __ SmiUntag(x6);
464 }
465 __ Bind(&done_convert);
466 }
467
468 // 4. Check if new target and constructor differ.
469 Label drop_frame_and_ret, new_object;
470 __ Cmp(x1, x3);
471 __ B(ne, &new_object);
472
473 // 5. Allocate a JSValue wrapper for the string.
474 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
475 __ B(&drop_frame_and_ret);
476
477 // 6. Fallback to the runtime to create new object.
478 __ bind(&new_object);
479 {
480 FrameScope scope(masm, StackFrame::MANUAL);
481 FastNewObjectStub stub(masm->isolate());
482 __ SmiTag(x6);
483 __ EnterBuiltinFrame(cp, x1, x6);
484 __ Push(x2); // first argument
485 __ CallStub(&stub);
486 __ Pop(x2);
487 __ LeaveBuiltinFrame(cp, x1, x6);
488 __ SmiUntag(x6);
489 }
490 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
491
492 __ bind(&drop_frame_and_ret);
493 {
494 __ Drop(x6);
495 __ Drop(1);
496 __ Ret();
497 }
498 }
499
500 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
501 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
502 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
503 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
504 __ Br(x2);
505 }
506
507 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
508 Runtime::FunctionId function_id) {
509 // ----------- S t a t e -------------
510 // -- x0 : argument count (preserved for callee)
511 // -- x1 : target function (preserved for callee)
512 // -- x3 : new target (preserved for callee)
513 // -----------------------------------
514 {
515 FrameScope scope(masm, StackFrame::INTERNAL);
516 // Push a copy of the target function and the new target.
517 // Push another copy as a parameter to the runtime call.
518 __ SmiTag(x0);
519 __ Push(x0, x1, x3, x1);
520
521 __ CallRuntime(function_id, 1);
522 __ Move(x2, x0);
523
524 // Restore target function and new target.
525 __ Pop(x3, x1, x0);
526 __ SmiUntag(x0);
527 }
528
529 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
530 __ Br(x2);
531 }
532
533
534 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
535 // Checking whether the queued function is ready for install is optional,
536 // since we come across interrupts and stack checks elsewhere. However, not
537 // checking may delay installing ready functions, and always checking would be
538 // quite expensive. A good compromise is to first check against stack limit as
539 // a cue for an interrupt signal.
540 Label ok;
541 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
542 __ B(hs, &ok);
543
544 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
545
546 __ Bind(&ok);
547 GenerateTailCallToSharedCode(masm);
548 }
549
550
551 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
552 bool is_api_function,
553 bool create_implicit_receiver,
554 bool check_derived_construct) {
555 // ----------- S t a t e -------------
556 // -- x0 : number of arguments
557 // -- x1 : constructor function
558 // -- x2 : allocation site or undefined
559 // -- x3 : new target
560 // -- lr : return address
561 // -- cp : context pointer
562 // -- sp[...]: constructor arguments
563 // -----------------------------------
564
565 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
566
567 Isolate* isolate = masm->isolate();
568
569 // Enter a construct frame.
570 {
571 FrameScope scope(masm, StackFrame::CONSTRUCT);
572
573 // Preserve the four incoming parameters on the stack.
574 Register argc = x0;
575 Register constructor = x1;
576 Register allocation_site = x2;
577 Register new_target = x3;
578
579 // Preserve the incoming parameters on the stack.
580 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
581 __ Push(cp);
582 __ SmiTag(argc);
583 __ Push(allocation_site, argc);
584
585 if (create_implicit_receiver) {
586 // Allocate the new receiver object.
587 __ Push(constructor, new_target);
588 FastNewObjectStub stub(masm->isolate());
589 __ CallStub(&stub);
590 __ Mov(x4, x0);
591 __ Pop(new_target, constructor);
592
593 // ----------- S t a t e -------------
594 // -- x1: constructor function
595 // -- x3: new target
596 // -- x4: newly allocated object
597 // -----------------------------------
598
599 // Reload the number of arguments from the stack.
600 // Set it up in x0 for the function call below.
601 // jssp[0]: number of arguments (smi-tagged)
602 __ Peek(argc, 0); // Load number of arguments.
603 }
604
605 __ SmiUntag(argc);
606
607 if (create_implicit_receiver) {
608 // Push the allocated receiver to the stack. We need two copies
609 // because we may have to return the original one and the calling
610 // conventions dictate that the called function pops the receiver.
611 __ Push(x4, x4);
612 } else {
613 __ PushRoot(Heap::kTheHoleValueRootIndex);
614 }
615
616 // Set up pointer to last argument.
617 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
618
619 // Copy arguments and receiver to the expression stack.
620 // Copy 2 values every loop to use ldp/stp.
621 // x0: number of arguments
622 // x1: constructor function
623 // x2: address of last argument (caller sp)
624 // x3: new target
625 // jssp[0]: receiver
626 // jssp[1]: receiver
627 // jssp[2]: number of arguments (smi-tagged)
628 // Compute the start address of the copy in x3.
629 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
630 Label loop, entry, done_copying_arguments;
631 __ B(&entry);
632 __ Bind(&loop);
633 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
634 __ Push(x11, x10);
635 __ Bind(&entry);
636 __ Cmp(x4, x2);
637 __ B(gt, &loop);
638 // Because we copied values 2 by 2 we may have copied one extra value.
639 // Drop it if that is the case.
640 __ B(eq, &done_copying_arguments);
641 __ Drop(1);
642 __ Bind(&done_copying_arguments);
643
644 // Call the function.
645 // x0: number of arguments
646 // x1: constructor function
647 // x3: new target
648 ParameterCount actual(argc);
649 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
650 CheckDebugStepCallWrapper());
651
652 // Store offset of return address for deoptimizer.
653 if (create_implicit_receiver && !is_api_function) {
654 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
655 }
656
657 // Restore the context from the frame.
658 // x0: result
659 // jssp[0]: receiver
660 // jssp[1]: number of arguments (smi-tagged)
661 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
662
663 if (create_implicit_receiver) {
664 // If the result is an object (in the ECMA sense), we should get rid
665 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
666 // on page 74.
667 Label use_receiver, exit;
668
669 // If the result is a smi, it is *not* an object in the ECMA sense.
670 // x0: result
671 // jssp[0]: receiver (newly allocated object)
672 // jssp[1]: number of arguments (smi-tagged)
673 __ JumpIfSmi(x0, &use_receiver);
674
675 // If the type of the result (stored in its map) is less than
676 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
677 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
678
679 // Throw away the result of the constructor invocation and use the
680 // on-stack receiver as the result.
681 __ Bind(&use_receiver);
682 __ Peek(x0, 0);
683
684 // Remove the receiver from the stack, remove caller arguments, and
685 // return.
686 __ Bind(&exit);
687 // x0: result
688 // jssp[0]: receiver (newly allocated object)
689 // jssp[1]: number of arguments (smi-tagged)
690 __ Peek(x1, 1 * kXRegSize);
691 } else {
692 __ Peek(x1, 0);
693 }
694
695 // Leave construct frame.
696 }
697
698 // ES6 9.2.2. Step 13+
699 // Check that the result is not a Smi, indicating that the constructor result
700 // from a derived class is neither undefined nor an Object.
701 if (check_derived_construct) {
702 Label dont_throw;
703 __ JumpIfNotSmi(x0, &dont_throw);
704 {
705 FrameScope scope(masm, StackFrame::INTERNAL);
706 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
707 }
708 __ Bind(&dont_throw);
709 }
710
711 __ DropBySMI(x1);
712 __ Drop(1);
713 if (create_implicit_receiver) {
714 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
715 }
716 __ Ret();
717 }
718
719
720 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
721 Generate_JSConstructStubHelper(masm, false, true, false);
722 }
723
724
725 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
726 Generate_JSConstructStubHelper(masm, true, false, false);
727 }
728
729
730 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
731 Generate_JSConstructStubHelper(masm, false, false, false);
732 }
733
734
735 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
736 MacroAssembler* masm) {
737 Generate_JSConstructStubHelper(masm, false, false, true);
738 }
739
740
741 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
742 FrameScope scope(masm, StackFrame::INTERNAL);
743 __ Push(x1);
744 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
745 }
746
747 // static
748 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
749 // ----------- S t a t e -------------
750 // -- x0 : the value to pass to the generator
751 // -- x1 : the JSGeneratorObject to resume
752 // -- x2 : the resume mode (tagged)
753 // -- lr : return address
754 // -----------------------------------
755 __ AssertGeneratorObject(x1);
756
757 // Store input value into generator object.
758 __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
759 __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
760 kLRHasNotBeenSaved, kDontSaveFPRegs);
761
762 // Store resume mode into generator object.
763 __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
764
765 // Load suspended function and context.
766 __ Ldr(cp, FieldMemOperand(x1, JSGeneratorObject::kContextOffset));
767 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
768
769 // Flood function if we are stepping.
770 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
771 Label stepping_prepared;
772 ExternalReference last_step_action =
773 ExternalReference::debug_last_step_action_address(masm->isolate());
774 STATIC_ASSERT(StepFrame > StepIn);
775 __ Mov(x10, Operand(last_step_action));
776 __ Ldrsb(x10, MemOperand(x10));
777 __ CompareAndBranch(x10, Operand(StepIn), ge, &prepare_step_in_if_stepping);
778
779 // Flood function if we need to continue stepping in the suspended generator.
780 ExternalReference debug_suspended_generator =
781 ExternalReference::debug_suspended_generator_address(masm->isolate());
782 __ Mov(x10, Operand(debug_suspended_generator));
783 __ Ldr(x10, MemOperand(x10));
784 __ CompareAndBranch(x10, Operand(x1), eq,
785 &prepare_step_in_suspended_generator);
786 __ Bind(&stepping_prepared);
787
788 // Push receiver.
789 __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
790 __ Push(x5);
791
792 // ----------- S t a t e -------------
793 // -- x1 : the JSGeneratorObject to resume
794 // -- x2 : the resume mode (tagged)
795 // -- x4 : generator function
796 // -- cp : generator context
797 // -- lr : return address
798 // -- jssp[0] : generator receiver
799 // -----------------------------------
800
801 // Push holes for arguments to generator function. Since the parser forced
802 // context allocation for any variables in generators, the actual argument
803 // values have already been copied into the context and these dummy values
804 // will never be used.
805 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
806 __ Ldr(w10,
807 FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
808 __ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
809 __ PushMultipleTimes(x11, w10);
810
811 // Dispatch on the kind of generator object.
812 Label old_generator;
813 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
814 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
815 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
816 __ B(ne, &old_generator);
817
818 // New-style (ignition/turbofan) generator object
819 {
820 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
821 __ Ldr(w0,
822 FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset));
823 // We abuse new.target both to indicate that this is a resume call and to
824 // pass in the generator object. In ordinary calls, new.target is always
825 // undefined because generator functions are non-constructable.
826 __ Move(x3, x1);
827 __ Move(x1, x4);
828 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
829 __ Jump(x5);
830 }
831
832 // Old-style (full-codegen) generator object
833 __ bind(&old_generator);
834 {
835 // Enter a new JavaScript frame, and initialize its slots as they were when
836 // the generator was suspended.
837 FrameScope scope(masm, StackFrame::MANUAL);
838 __ Push(lr, fp);
839 __ Move(fp, jssp);
840 __ Push(cp, x4);
841
842 // Restore the operand stack.
843 __ Ldr(x0, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
844 __ Ldr(w3, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset));
845 __ Add(x0, x0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
846 __ Add(x3, x0, Operand(x3, LSL, kPointerSizeLog2));
847 {
848 Label done_loop, loop;
849 __ Bind(&loop);
850 __ Cmp(x0, x3);
851 __ B(eq, &done_loop);
852 __ Ldr(x10, MemOperand(x0, kPointerSize, PostIndex));
853 __ Push(x10);
854 __ B(&loop);
855 __ Bind(&done_loop);
856 }
857
858 // Reset operand stack so we don't leak.
859 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
860 __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
861
862 // Resume the generator function at the continuation.
863 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
864 __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset));
865 __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag);
866 __ Ldrsw(x11,
867 UntagSmiFieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
868 __ Add(x10, x10, x11);
869 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
870 __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
871 __ Move(x0, x1); // Continuation expects generator object in x0.
872 __ Br(x10);
873 }
874
875 __ Bind(&prepare_step_in_if_stepping);
876 {
877 FrameScope scope(masm, StackFrame::INTERNAL);
878 __ Push(x1, x2, x4);
879 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
880 __ Pop(x2, x1);
881 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
882 }
883 __ B(&stepping_prepared);
884
885 __ Bind(&prepare_step_in_suspended_generator);
886 {
887 FrameScope scope(masm, StackFrame::INTERNAL);
888 __ Push(x1, x2);
889 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
890 __ Pop(x2, x1);
891 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
892 }
893 __ B(&stepping_prepared);
894 }
895
896 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
897
898
899 // Clobbers x10, x15; preserves all other registers.
900 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
901 IsTagged argc_is_tagged) {
902 // Check the stack for overflow.
903 // We are not trying to catch interruptions (e.g. debug break and
904 // preemption) here, so the "real stack limit" is checked.
905 Label enough_stack_space;
906 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
907 // Make x10 the space we have left. The stack might already be overflowed
908 // here which will cause x10 to become negative.
909 // TODO(jbramley): Check that the stack usage here is safe.
910 __ Sub(x10, jssp, x10);
911 // Check if the arguments will overflow the stack.
912 if (argc_is_tagged == kArgcIsSmiTagged) {
913 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
914 } else {
915 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
916 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
917 }
918 __ B(gt, &enough_stack_space);
919 __ CallRuntime(Runtime::kThrowStackOverflow);
920 // We should never return from the APPLY_OVERFLOW builtin.
921 if (__ emit_debug_code()) {
922 __ Unreachable();
923 }
924
925 __ Bind(&enough_stack_space);
926 }
927
928
929 // Input:
930 // x0: new.target.
931 // x1: function.
932 // x2: receiver.
933 // x3: argc.
934 // x4: argv.
935 // Output:
936 // x0: result.
937 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
938 bool is_construct) {
939 // Called from JSEntryStub::GenerateBody().
940 Register new_target = x0;
941 Register function = x1;
942 Register receiver = x2;
943 Register argc = x3;
944 Register argv = x4;
945 Register scratch = x10;
946
947 ProfileEntryHookStub::MaybeCallEntryHook(masm);
948
949 {
950 // Enter an internal frame.
951 FrameScope scope(masm, StackFrame::INTERNAL);
952
953 // Setup the context (we need to use the caller context from the isolate).
954 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
955 masm->isolate())));
956 __ Ldr(cp, MemOperand(scratch));
957
958 __ InitializeRootRegister();
959
960 // Push the function and the receiver onto the stack.
961 __ Push(function, receiver);
962
963 // Check if we have enough stack space to push all arguments.
964 // Expects argument count in eax. Clobbers ecx, edx, edi.
965 Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
966
967 // Copy arguments to the stack in a loop, in reverse order.
968 // x3: argc.
969 // x4: argv.
970 Label loop, entry;
971 // Compute the copy end address.
972 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
973
974 __ B(&entry);
975 __ Bind(&loop);
976 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
977 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
978 __ Push(x12); // Push the argument.
979 __ Bind(&entry);
980 __ Cmp(scratch, argv);
981 __ B(ne, &loop);
982
983 __ Mov(scratch, argc);
984 __ Mov(argc, new_target);
985 __ Mov(new_target, scratch);
986 // x0: argc.
987 // x3: new.target.
988
989 // Initialize all JavaScript callee-saved registers, since they will be seen
990 // by the garbage collector as part of handlers.
991 // The original values have been saved in JSEntryStub::GenerateBody().
992 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
993 __ Mov(x20, x19);
994 __ Mov(x21, x19);
995 __ Mov(x22, x19);
996 __ Mov(x23, x19);
997 __ Mov(x24, x19);
998 __ Mov(x25, x19);
999 // Don't initialize the reserved registers.
1000 // x26 : root register (root).
1001 // x27 : context pointer (cp).
1002 // x28 : JS stack pointer (jssp).
1003 // x29 : frame pointer (fp).
1004
1005 Handle<Code> builtin = is_construct
1006 ? masm->isolate()->builtins()->Construct()
1007 : masm->isolate()->builtins()->Call();
1008 __ Call(builtin, RelocInfo::CODE_TARGET);
1009
1010 // Exit the JS internal frame and remove the parameters (except function),
1011 // and return.
1012 }
1013
1014 // Result is in x0. Return.
1015 __ Ret();
1016 }
1017
1018
1019 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1020 Generate_JSEntryTrampolineHelper(masm, false);
1021 }
1022
1023
1024 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1025 Generate_JSEntryTrampolineHelper(masm, true);
1026 }
1027
1028 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1029 Register args_count = scratch;
1030
1031 // Get the arguments + receiver count.
1032 __ ldr(args_count,
1033 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1034 __ Ldr(args_count.W(),
1035 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1036
1037 // Leave the frame (also dropping the register file).
1038 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1039
1040 // Drop receiver + arguments.
1041 __ Drop(args_count, 1);
1042 }
1043
1044 // Generate code for entering a JS function with the interpreter.
1045 // On entry to the function the receiver and arguments have been pushed on the
1046 // stack left to right. The actual argument count matches the formal parameter
1047 // count expected by the function.
1048 //
1049 // The live registers are:
1050 // - x1: the JS function object being called.
1051 // - x3: the new target
1052 // - cp: our context.
1053 // - fp: our caller's frame pointer.
1054 // - jssp: stack pointer.
1055 // - lr: return address.
1056 //
1057 // The function builds an interpreter frame. See InterpreterFrameConstants in
1058 // frames.h for its layout.
1059 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1060 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1061
1062 // Open a frame scope to indicate that there is a frame on the stack. The
1063 // MANUAL indicates that the scope shouldn't actually generate code to set up
1064 // the frame (that is done below).
1065 FrameScope frame_scope(masm, StackFrame::MANUAL);
1066 __ Push(lr, fp, cp, x1);
1067 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
1068
1069 // Get the bytecode array from the function object (or from the DebugInfo if
1070 // it is present) and load it into kInterpreterBytecodeArrayRegister.
1071 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1072 Register debug_info = kInterpreterBytecodeArrayRegister;
1073 Label load_debug_bytecode_array, bytecode_array_loaded;
1074 DCHECK(!debug_info.is(x0));
1075 __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
1076 __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
1077 __ B(ne, &load_debug_bytecode_array);
1078 __ Ldr(kInterpreterBytecodeArrayRegister,
1079 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
1080 __ Bind(&bytecode_array_loaded);
1081
1082 // Check function data field is actually a BytecodeArray object.
1083 Label bytecode_array_not_present;
1084 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1085 Heap::kUndefinedValueRootIndex);
1086 __ B(eq, &bytecode_array_not_present);
1087 if (FLAG_debug_code) {
1088 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1089 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1090 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
1091 BYTECODE_ARRAY_TYPE);
1092 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1093 }
1094
1095 // Load the initial bytecode offset.
1096 __ Mov(kInterpreterBytecodeOffsetRegister,
1097 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1098
1099 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1100 __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
1101 __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
1102
1103 // Allocate the local and temporary register file on the stack.
1104 {
1105 // Load frame size from the BytecodeArray object.
1106 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1107 BytecodeArray::kFrameSizeOffset));
1108
1109 // Do a stack check to ensure we don't go over the limit.
1110 Label ok;
1111 DCHECK(jssp.Is(__ StackPointer()));
1112 __ Sub(x10, jssp, Operand(x11));
1113 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
1114 __ B(hs, &ok);
1115 __ CallRuntime(Runtime::kThrowStackOverflow);
1116 __ Bind(&ok);
1117
1118 // If ok, push undefined as the initial value for all register file entries.
1119 // Note: there should always be at least one stack slot for the return
1120 // register in the register file.
1121 Label loop_header;
1122 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
1123 // TODO(rmcilroy): Ensure we always have an even number of registers to
1124 // allow stack to be 16 bit aligned (and remove need for jssp).
1125 __ Lsr(x11, x11, kPointerSizeLog2);
1126 __ PushMultipleTimes(x10, x11);
1127 __ Bind(&loop_header);
1128 }
1129
1130 // Load accumulator and dispatch table into registers.
1131 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1132 __ Mov(kInterpreterDispatchTableRegister,
1133 Operand(ExternalReference::interpreter_dispatch_table_address(
1134 masm->isolate())));
1135
1136 // Dispatch to the first bytecode handler for the function.
1137 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1138 kInterpreterBytecodeOffsetRegister));
1139 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1140 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1141 __ Call(ip0);
1142 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1143
1144 // The return value is in x0.
1145 LeaveInterpreterFrame(masm, x2);
1146 __ Ret();
1147
1148 // Load debug copy of the bytecode array.
1149 __ Bind(&load_debug_bytecode_array);
1150 __ Ldr(kInterpreterBytecodeArrayRegister,
1151 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1152 __ B(&bytecode_array_loaded);
1153
1154 // If the bytecode array is no longer present, then the underlying function
1155 // has been switched to a different kind of code and we heal the closure by
1156 // switching the code entry field over to the new code object as well.
1157 __ Bind(&bytecode_array_not_present);
1158 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1159 __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1160 __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset));
1161 __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag));
1162 __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
1163 __ RecordWriteCodeEntryField(x1, x7, x5);
1164 __ Jump(x7);
1165 }
1166
1167 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1168 // Save the function and context for call to CompileBaseline.
1169 __ ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1170 __ ldr(kContextRegister,
1171 MemOperand(fp, StandardFrameConstants::kContextOffset));
1172
1173 // Leave the frame before recompiling for baseline so that we don't count as
1174 // an activation on the stack.
1175 LeaveInterpreterFrame(masm, x2);
1176
1177 {
1178 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1179 // Push return value.
1180 __ push(x0);
1181
1182 // Push function as argument and compile for baseline.
1183 __ push(x1);
1184 __ CallRuntime(Runtime::kCompileBaseline);
1185
1186 // Restore return value.
1187 __ pop(x0);
1188 }
1189 __ Ret();
1190 }
1191
1192 // static
1193 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1194 MacroAssembler* masm, TailCallMode tail_call_mode,
1195 CallableType function_type) {
1196 // ----------- S t a t e -------------
1197 // -- x0 : the number of arguments (not including the receiver)
1198 // -- x2 : the address of the first argument to be pushed. Subsequent
1199 // arguments should be consecutive above this, in the same order as
1200 // they are to be pushed onto the stack.
1201 // -- x1 : the target to call (can be any Object).
1202 // -----------------------------------
1203
1204 // Find the address of the last argument.
1205 __ add(x3, x0, Operand(1)); // Add one for receiver.
1206 __ lsl(x3, x3, kPointerSizeLog2);
1207 __ sub(x4, x2, x3);
1208
1209 // Push the arguments.
1210 Label loop_header, loop_check;
1211 __ Mov(x5, jssp);
1212 __ Claim(x3, 1);
1213 __ B(&loop_check);
1214 __ Bind(&loop_header);
1215 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1216 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
1217 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
1218 __ Bind(&loop_check);
1219 __ Cmp(x2, x4);
1220 __ B(gt, &loop_header);
1221
1222 // Call the target.
1223 if (function_type == CallableType::kJSFunction) {
1224 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1225 tail_call_mode),
1226 RelocInfo::CODE_TARGET);
1227 } else {
1228 DCHECK_EQ(function_type, CallableType::kAny);
1229 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1230 tail_call_mode),
1231 RelocInfo::CODE_TARGET);
1232 }
1233 }
1234
1235 // static
1236 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1237 // ----------- S t a t e -------------
1238 // -- x0 : argument count (not including receiver)
1239 // -- x3 : new target
1240 // -- x1 : constructor to call
1241 // -- x2 : address of the first argument
1242 // -----------------------------------
1243
1244 // Find the address of the last argument.
1245 __ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
1246 __ lsl(x5, x5, kPointerSizeLog2);
1247
1248 // Set stack pointer and where to stop.
1249 __ Mov(x6, jssp);
1250 __ Claim(x5, 1);
1251 __ sub(x4, x6, x5);
1252
1253 // Push a slot for the receiver.
1254 __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
1255
1256 Label loop_header, loop_check;
1257 // Push the arguments.
1258 __ B(&loop_check);
1259 __ Bind(&loop_header);
1260 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1261 __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
1262 __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
1263 __ Bind(&loop_check);
1264 __ Cmp(x6, x4);
1265 __ B(gt, &loop_header);
1266
1267 // Call the constructor with x0, x1, and x3 unmodified.
1268 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1269 }
1270
1271 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1272 // Set the return address to the correct point in the interpreter entry
1273 // trampoline.
1274 Smi* interpreter_entry_return_pc_offset(
1275 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1276 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1277 __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1278 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
1279 Code::kHeaderSize - kHeapObjectTag));
1280
1281 // Initialize the dispatch table register.
1282 __ Mov(kInterpreterDispatchTableRegister,
1283 Operand(ExternalReference::interpreter_dispatch_table_address(
1284 masm->isolate())));
1285
1286 // Get the bytecode array pointer from the frame.
1287 __ Ldr(kInterpreterBytecodeArrayRegister,
1288 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1289
1290 if (FLAG_debug_code) {
1291 // Check function data field is actually a BytecodeArray object.
1292 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1293 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1294 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1295 BYTECODE_ARRAY_TYPE);
1296 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1297 }
1298
1299 // Get the target bytecode offset from the frame.
1300 __ Ldr(kInterpreterBytecodeOffsetRegister,
1301 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1302 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1303
1304 // Dispatch to the target bytecode.
1305 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1306 kInterpreterBytecodeOffsetRegister));
1307 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1308 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1309 __ Jump(ip0);
1310 }
1311
1312 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1313 // ----------- S t a t e -------------
1314 // -- x0 : argument count (preserved for callee)
1315 // -- x3 : new target (preserved for callee)
1316 // -- x1 : target function (preserved for callee)
1317 // -----------------------------------
1318 // First lookup code, maybe we don't need to compile!
1319 Label gotta_call_runtime;
1320 Label maybe_call_runtime;
1321 Label try_shared;
1322 Label loop_top, loop_bottom;
1323
1324 Register closure = x1;
1325 Register map = x13;
1326 Register index = x2;
1327 __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1328 __ Ldr(map,
1329 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1330 __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset));
1331 __ Cmp(index, Operand(2));
1332 __ B(lt, &gotta_call_runtime);
1333
1334 // Find literals.
1335 // x3 : native context
1336 // x2 : length / index
1337 // x13 : optimized code map
1338 // stack[0] : new target
1339 // stack[4] : closure
1340 Register native_context = x4;
1341 __ Ldr(native_context, NativeContextMemOperand());
1342
1343 __ Bind(&loop_top);
1344 Register temp = x5;
1345 Register array_pointer = x6;
1346
1347 // Does the native context match?
1348 __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2));
1349 __ Ldr(temp, FieldMemOperand(array_pointer,
1350 SharedFunctionInfo::kOffsetToPreviousContext));
1351 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1352 __ Cmp(temp, native_context);
1353 __ B(ne, &loop_bottom);
1354 // OSR id set to none?
1355 __ Ldr(temp, FieldMemOperand(array_pointer,
1356 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1357 const int bailout_id = BailoutId::None().ToInt();
1358 __ Cmp(temp, Operand(Smi::FromInt(bailout_id)));
1359 __ B(ne, &loop_bottom);
1360
1361 // Literals available?
1362 Label got_literals, maybe_cleared_weakcell;
1363 Register temp2 = x7;
1364 __ Ldr(temp, FieldMemOperand(array_pointer,
1365 SharedFunctionInfo::kOffsetToPreviousLiterals));
1366 // temp contains either a WeakCell pointing to the literals array or the
1367 // literals array directly.
1368 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
1369 __ Ldr(temp2, FieldMemOperand(temp, WeakCell::kValueOffset));
1370 __ JumpIfSmi(temp2, &maybe_cleared_weakcell);
1371 // temp2 is a pointer, therefore temp is a WeakCell pointing to a literals
1372 // array.
1373 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1374 __ jmp(&got_literals);
1375
1376 // r4 is a smi. If it's 0, then we are looking at a cleared WeakCell
1377 // around the literals array, and we should visit the runtime. If it's > 0,
1378 // then temp already contains the literals array.
1379 __ bind(&maybe_cleared_weakcell);
1380 __ Cmp(temp2, Operand(Smi::FromInt(0)));
1381 __ B(eq, &gotta_call_runtime);
1382
1383 // Save the literals in the closure.
1384 __ bind(&got_literals);
1385 __ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
1386 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7,
1387 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1388 OMIT_SMI_CHECK);
1389
1390 // Code available?
1391 Register entry = x7;
1392 __ Ldr(entry,
1393 FieldMemOperand(array_pointer,
1394 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1395 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1396 __ JumpIfSmi(entry, &maybe_call_runtime);
1397
1398 // Found literals and code. Get them into the closure and return.
1399 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1400
1401 Label install_optimized_code_and_tailcall;
1402 __ Bind(&install_optimized_code_and_tailcall);
1403 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1404 __ RecordWriteCodeEntryField(closure, entry, x5);
1405
1406 // Link the closure into the optimized function list.
1407 // x7 : code entry
1408 // x4 : native context
1409 // x1 : closure
1410 __ Ldr(x8,
1411 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1412 __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1413 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13,
1414 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1415 OMIT_SMI_CHECK);
1416 const int function_list_offset =
1417 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1418 __ Str(closure,
1419 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1420 __ Mov(x5, closure);
1421 __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13,
1422 kLRHasNotBeenSaved, kDontSaveFPRegs);
1423 __ Jump(entry);
1424
1425 __ Bind(&loop_bottom);
1426 __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength));
1427 __ Cmp(index, Operand(1));
1428 __ B(gt, &loop_top);
1429
1430 // We found neither literals nor code.
1431 __ B(&gotta_call_runtime);
1432
1433 __ Bind(&maybe_call_runtime);
1434
1435 // Last possibility. Check the context free optimized code map entry.
1436 __ Ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1437 SharedFunctionInfo::kSharedCodeIndex));
1438 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1439 __ JumpIfSmi(entry, &try_shared);
1440
1441 // Store code entry in the closure.
1442 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1443 __ B(&install_optimized_code_and_tailcall);
1444
1445 __ Bind(&try_shared);
1446 // Is the full code valid?
1447 __ Ldr(entry,
1448 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1449 __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1450 __ Ldr(x5, FieldMemOperand(entry, Code::kFlagsOffset));
1451 __ and_(x5, x5, Operand(Code::KindField::kMask));
1452 __ Mov(x5, Operand(x5, LSR, Code::KindField::kShift));
1453 __ Cmp(x5, Operand(Code::BUILTIN));
1454 __ B(eq, &gotta_call_runtime);
1455 // Yes, install the full code.
1456 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1457 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1458 __ RecordWriteCodeEntryField(closure, entry, x5);
1459 __ Jump(entry);
1460
1461 __ Bind(&gotta_call_runtime);
1462 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1463 }
1464
1465 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1466 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1467 }
1468
1469 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1470 GenerateTailCallToReturnedCode(masm,
1471 Runtime::kCompileOptimized_NotConcurrent);
1472 }
1473
1474
1475 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1476 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1477 }
1478
1479 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1480 // ----------- S t a t e -------------
1481 // -- x0 : argument count (preserved for callee)
1482 // -- x1 : new target (preserved for callee)
1483 // -- x3 : target function (preserved for callee)
1484 // -----------------------------------
1485 Label failed;
1486 {
1487 FrameScope scope(masm, StackFrame::INTERNAL);
1488 // Push a copy of the target function and the new target.
1489 __ SmiTag(x0);
1490 // Push another copy as a parameter to the runtime call.
1491 __ Push(x0, x1, x3, x1);
1492
1493 // Copy arguments from caller (stdlib, foreign, heap).
1494 for (int i = 2; i >= 0; --i) {
1495 __ ldr(x4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1496 i * kPointerSize));
1497 __ push(x4);
1498 }
1499 // Call runtime, on success unwind frame, and parent frame.
1500 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1501 // A smi 0 is returned on failure, an object on success.
1502 __ JumpIfSmi(x0, &failed);
1503 scope.GenerateLeaveFrame();
1504 __ Drop(4);
1505 __ Ret();
1506
1507 __ bind(&failed);
1508 // Restore target function and new target.
1509 __ Pop(x3, x1, x0);
1510 __ SmiUntag(x0);
1511 }
1512 // On failure, tail call back to regular js.
1513 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1514 }
1515
1516 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1517 // For now, we are relying on the fact that make_code_young doesn't do any
1518 // garbage collection which allows us to save/restore the registers without
1519 // worrying about which of them contain pointers. We also don't build an
1520 // internal frame to make the code fast, since we shouldn't have to do stack
1521 // crawls in MakeCodeYoung. This seems a bit fragile.
1522
1523 // The following caller-saved registers must be saved and restored when
1524 // calling through to the runtime:
1525 // x0 - The address from which to resume execution.
1526 // x1 - isolate
1527 // x3 - new target
1528 // lr - The return address for the JSFunction itself. It has not yet been
1529 // preserved on the stack because the frame setup code was replaced
1530 // with a call to this stub, to handle code ageing.
1531 {
1532 FrameScope scope(masm, StackFrame::MANUAL);
1533 __ Push(x0, x1, x3, fp, lr);
1534 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1535 __ CallCFunction(
1536 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1537 __ Pop(lr, fp, x3, x1, x0);
1538 }
1539
1540 // The calling function has been made young again, so return to execute the
1541 // real frame set-up code.
1542 __ Br(x0);
1543 }
1544
1545 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1546 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1547 MacroAssembler* masm) { \
1548 GenerateMakeCodeYoungAgainCommon(masm); \
1549 } \
1550 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1551 MacroAssembler* masm) { \
1552 GenerateMakeCodeYoungAgainCommon(masm); \
1553 }
1554 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1555 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1556
1557
1558 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1559 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1560 // that make_code_young doesn't do any garbage collection which allows us to
1561 // save/restore the registers without worrying about which of them contain
1562 // pointers.
1563
1564 // The following caller-saved registers must be saved and restored when
1565 // calling through to the runtime:
1566 // x0 - The address from which to resume execution.
1567 // x1 - isolate
1568 // x3 - new target
1569 // lr - The return address for the JSFunction itself. It has not yet been
1570 // preserved on the stack because the frame setup code was replaced
1571 // with a call to this stub, to handle code ageing.
1572 {
1573 FrameScope scope(masm, StackFrame::MANUAL);
1574 __ Push(x0, x1, x3, fp, lr);
1575 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1576 __ CallCFunction(
1577 ExternalReference::get_mark_code_as_executed_function(
1578 masm->isolate()), 2);
1579 __ Pop(lr, fp, x3, x1, x0);
1580
1581 // Perform prologue operations usually performed by the young code stub.
1582 __ EmitFrameSetupForCodeAgePatching(masm);
1583 }
1584
1585 // Jump to point after the code-age stub.
1586 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1587 __ Br(x0);
1588 }
1589
1590
1591 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1592 GenerateMakeCodeYoungAgainCommon(masm);
1593 }
1594
1595
1596 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1597 Generate_MarkCodeAsExecutedOnce(masm);
1598 }
1599
1600
1601 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1602 SaveFPRegsMode save_doubles) {
1603 {
1604 FrameScope scope(masm, StackFrame::INTERNAL);
1605
1606 // Preserve registers across notification, this is important for compiled
1607 // stubs that tail call the runtime on deopts passing their parameters in
1608 // registers.
1609 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1610 // registers here? According to the comment above, we should only need to
1611 // preserve the registers with parameters.
1612 __ PushXRegList(kSafepointSavedRegisters);
1613 // Pass the function and deoptimization type to the runtime system.
1614 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1615 __ PopXRegList(kSafepointSavedRegisters);
1616 }
1617
1618 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1619 __ Drop(1);
1620
1621 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1622 // into lr before it jumps here.
1623 __ Br(lr);
1624 }
1625
1626
1627 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1628 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1629 }
1630
1631
1632 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1633 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1634 }
1635
1636
1637 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1638 Deoptimizer::BailoutType type) {
1639 {
1640 FrameScope scope(masm, StackFrame::INTERNAL);
1641 // Pass the deoptimization type to the runtime system.
1642 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1643 __ Push(x0);
1644 __ CallRuntime(Runtime::kNotifyDeoptimized);
1645 }
1646
1647 // Get the full codegen state from the stack and untag it.
1648 Register state = x6;
1649 __ Peek(state, 0);
1650 __ SmiUntag(state);
1651
1652 // Switch on the state.
1653 Label with_tos_register, unknown_state;
1654 __ CompareAndBranch(state,
1655 static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
1656 ne, &with_tos_register);
1657 __ Drop(1); // Remove state.
1658 __ Ret();
1659
1660 __ Bind(&with_tos_register);
1661 // Reload TOS register.
1662 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
1663 __ Peek(x0, kPointerSize);
1664 __ CompareAndBranch(state,
1665 static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
1666 ne, &unknown_state);
1667 __ Drop(2); // Remove state and TOS.
1668 __ Ret();
1669
1670 __ Bind(&unknown_state);
1671 __ Abort(kInvalidFullCodegenState);
1672 }
1673
1674
1675 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1676 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1677 }
1678
1679
1680 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1681 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1682 }
1683
1684
1685 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1686 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1687 }
1688
1689
1690 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1691 Register function_template_info,
1692 Register scratch0, Register scratch1,
1693 Register scratch2,
1694 Label* receiver_check_failed) {
1695 Register signature = scratch0;
1696 Register map = scratch1;
1697 Register constructor = scratch2;
1698
1699 // If there is no signature, return the holder.
1700 __ Ldr(signature, FieldMemOperand(function_template_info,
1701 FunctionTemplateInfo::kSignatureOffset));
1702 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1703 Label receiver_check_passed;
1704 __ B(eq, &receiver_check_passed);
1705
1706 // Walk the prototype chain.
1707 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1708 Label prototype_loop_start;
1709 __ Bind(&prototype_loop_start);
1710
1711 // Get the constructor, if any
1712 __ GetMapConstructor(constructor, map, x16, x16);
1713 __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1714 Label next_prototype;
1715 __ B(ne, &next_prototype);
1716 Register type = constructor;
1717 __ Ldr(type,
1718 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1719 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1720
1721 // Loop through the chain of inheriting function templates.
1722 Label function_template_loop;
1723 __ Bind(&function_template_loop);
1724
1725 // If the signatures match, we have a compatible receiver.
1726 __ Cmp(signature, type);
1727 __ B(eq, &receiver_check_passed);
1728
1729 // If the current type is not a FunctionTemplateInfo, load the next prototype
1730 // in the chain.
1731 __ JumpIfSmi(type, &next_prototype);
1732 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1733 __ B(ne, &next_prototype);
1734
1735 // Otherwise load the parent function template and iterate.
1736 __ Ldr(type,
1737 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1738 __ B(&function_template_loop);
1739
1740 // Load the next prototype.
1741 __ Bind(&next_prototype);
1742 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
1743 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
1744 __ B(eq, receiver_check_failed);
1745 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1746 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1747 // Iterate.
1748 __ B(&prototype_loop_start);
1749
1750 __ Bind(&receiver_check_passed);
1751 }
1752
1753
1754 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1755 // ----------- S t a t e -------------
1756 // -- x0 : number of arguments excluding receiver
1757 // -- x1 : callee
1758 // -- lr : return address
1759 // -- sp[0] : last argument
1760 // -- ...
1761 // -- sp[8 * (argc - 1)] : first argument
1762 // -- sp[8 * argc] : receiver
1763 // -----------------------------------
1764
1765 // Load the FunctionTemplateInfo.
1766 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1767 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1768
1769 // Do the compatible receiver check.
1770 Label receiver_check_failed;
1771 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1772 CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1773
1774 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1775 // beginning of the code.
1776 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1777 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1778 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1779 __ Jump(x4);
1780
1781 // Compatible receiver check failed: throw an Illegal Invocation exception.
1782 __ Bind(&receiver_check_failed);
1783 // Drop the arguments (including the receiver)
1784 __ add(x0, x0, Operand(1));
1785 __ Drop(x0);
1786 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1787 }
1788
1789
1790 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1791 // Lookup the function in the JavaScript frame.
1792 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1793 {
1794 FrameScope scope(masm, StackFrame::INTERNAL);
1795 // Pass function as argument.
1796 __ Push(x0);
1797 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1798 }
1799
1800 // If the code object is null, just return to the unoptimized code.
1801 Label skip;
1802 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1803 __ Ret();
1804
1805 __ Bind(&skip);
1806
1807 // Load deoptimization data from the code object.
1808 // <deopt_data> = <code>[#deoptimization_data_offset]
1809 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1810
1811 // Load the OSR entrypoint offset from the deoptimization data.
1812 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1813 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1814 DeoptimizationInputData::kOsrPcOffsetIndex)));
1815
1816 // Compute the target address = code_obj + header_size + osr_offset
1817 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1818 __ Add(x0, x0, x1);
1819 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1820
1821 // And "return" to the OSR entry point of the function.
1822 __ Ret();
1823 }
1824
1825
1826 // static
1827 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1828 int field_index) {
1829 // ----------- S t a t e -------------
1830 // -- x0 : number of arguments
1831 // -- x1 : function
1832 // -- cp : context
1833 // -- lr : return address
1834 // -- jssp[0] : receiver
1835 // -----------------------------------
1836 ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
1837
1838 // 1. Pop receiver into x0 and check that it's actually a JSDate object.
1839 Label receiver_not_date;
1840 {
1841 __ Pop(x0);
1842 __ JumpIfSmi(x0, &receiver_not_date);
1843 __ JumpIfNotObjectType(x0, x2, x3, JS_DATE_TYPE, &receiver_not_date);
1844 }
1845
1846 // 2. Load the specified date field, falling back to the runtime as necessary.
1847 if (field_index == JSDate::kDateValue) {
1848 __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
1849 } else {
1850 if (field_index < JSDate::kFirstUncachedField) {
1851 Label stamp_mismatch;
1852 __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
1853 __ Ldr(x1, MemOperand(x1));
1854 __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
1855 __ Cmp(x1, x2);
1856 __ B(ne, &stamp_mismatch);
1857 __ Ldr(x0, FieldMemOperand(
1858 x0, JSDate::kValueOffset + field_index * kPointerSize));
1859 __ Ret();
1860 __ Bind(&stamp_mismatch);
1861 }
1862 FrameScope scope(masm, StackFrame::INTERNAL);
1863 __ Mov(x1, Smi::FromInt(field_index));
1864 __ CallCFunction(
1865 ExternalReference::get_date_field_function(masm->isolate()), 2);
1866 }
1867 __ Ret();
1868
1869 // 3. Raise a TypeError if the receiver is not a date.
1870 __ Bind(&receiver_not_date);
1871 {
1872 FrameScope scope(masm, StackFrame::MANUAL);
1873 __ Push(x0);
1874 __ Mov(x0, Smi::FromInt(0));
1875 __ EnterBuiltinFrame(cp, x1, x0);
1876 __ CallRuntime(Runtime::kThrowNotDateError);
1877 }
1878 }
1879
1880 // static
1881 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1882 // ----------- S t a t e -------------
1883 // -- x0 : argc
1884 // -- jssp[0] : argArray (if argc == 2)
1885 // -- jssp[8] : thisArg (if argc >= 1)
1886 // -- jssp[16] : receiver
1887 // -----------------------------------
1888 ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1889
1890 Register argc = x0;
1891 Register arg_array = x0;
1892 Register receiver = x1;
1893 Register this_arg = x2;
1894 Register undefined_value = x3;
1895 Register null_value = x4;
1896
1897 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1898 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1899
1900 // 1. Load receiver into x1, argArray into x0 (if present), remove all
1901 // arguments from the stack (including the receiver), and push thisArg (if
1902 // present) instead.
1903 {
1904 // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1905 // consistent state for a simple pop operation.
1906 __ Claim(2);
1907 __ Drop(argc);
1908
1909 // ----------- S t a t e -------------
1910 // -- x0 : argc
1911 // -- jssp[0] : argArray (dummy value if argc <= 1)
1912 // -- jssp[8] : thisArg (dummy value if argc == 0)
1913 // -- jssp[16] : receiver
1914 // -----------------------------------
1915 __ Cmp(argc, 1);
1916 __ Pop(arg_array, this_arg); // Overwrites argc.
1917 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
1918 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
1919
1920 __ Peek(receiver, 0);
1921 __ Poke(this_arg, 0);
1922 }
1923
1924 // ----------- S t a t e -------------
1925 // -- x0 : argArray
1926 // -- x1 : receiver
1927 // -- x3 : undefined root value
1928 // -- jssp[0] : thisArg
1929 // -----------------------------------
1930
1931 // 2. Make sure the receiver is actually callable.
1932 Label receiver_not_callable;
1933 __ JumpIfSmi(receiver, &receiver_not_callable);
1934 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1935 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1936 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1937 &receiver_not_callable);
1938
1939 // 3. Tail call with no arguments if argArray is null or undefined.
1940 Label no_arguments;
1941 __ Cmp(arg_array, null_value);
1942 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1943 __ B(eq, &no_arguments);
1944
1945 // 4a. Apply the receiver to the given argArray (passing undefined for
1946 // new.target in x3).
1947 DCHECK(undefined_value.Is(x3));
1948 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1949
1950 // 4b. The argArray is either null or undefined, so we tail call without any
1951 // arguments to the receiver.
1952 __ Bind(&no_arguments);
1953 {
1954 __ Mov(x0, 0);
1955 DCHECK(receiver.Is(x1));
1956 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1957 }
1958
1959 // 4c. The receiver is not callable, throw an appropriate TypeError.
1960 __ Bind(&receiver_not_callable);
1961 {
1962 __ Poke(receiver, 0);
1963 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1964 }
1965 }
1966
1967
1968 // static
1969 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1970 Register argc = x0;
1971 Register function = x1;
1972 Register scratch1 = x10;
1973 Register scratch2 = x11;
1974
1975 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1976
1977 // 1. Make sure we have at least one argument.
1978 {
1979 Label done;
1980 __ Cbnz(argc, &done);
1981 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1982 __ Push(scratch1);
1983 __ Mov(argc, 1);
1984 __ Bind(&done);
1985 }
1986
1987 // 2. Get the callable to call (passed as receiver) from the stack.
1988 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1989
1990 // 3. Shift arguments and return address one slot down on the stack
1991 // (overwriting the original receiver). Adjust argument count to make
1992 // the original first argument the new receiver.
1993 {
1994 Label loop;
1995 // Calculate the copy start address (destination). Copy end address is jssp.
1996 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1997 __ Sub(scratch1, scratch2, kPointerSize);
1998
1999 __ Bind(&loop);
2000 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
2001 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
2002 __ Cmp(scratch1, jssp);
2003 __ B(ge, &loop);
2004 // Adjust the actual number of arguments and remove the top element
2005 // (which is a copy of the last argument).
2006 __ Sub(argc, argc, 1);
2007 __ Drop(1);
2008 }
2009
2010 // 4. Call the callable.
2011 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2012 }
2013
2014
2015 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2016 // ----------- S t a t e -------------
2017 // -- x0 : argc
2018 // -- jssp[0] : argumentsList (if argc == 3)
2019 // -- jssp[8] : thisArgument (if argc >= 2)
2020 // -- jssp[16] : target (if argc >= 1)
2021 // -- jssp[24] : receiver
2022 // -----------------------------------
2023 ASM_LOCATION("Builtins::Generate_ReflectApply");
2024
2025 Register argc = x0;
2026 Register arguments_list = x0;
2027 Register target = x1;
2028 Register this_argument = x2;
2029 Register undefined_value = x3;
2030
2031 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2032
2033 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2034 // remove all arguments from the stack (including the receiver), and push
2035 // thisArgument (if present) instead.
2036 {
2037 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2038 // consistent state for a simple pop operation.
2039 __ Claim(3);
2040 __ Drop(argc);
2041
2042 // ----------- S t a t e -------------
2043 // -- x0 : argc
2044 // -- jssp[0] : argumentsList (dummy value if argc <= 2)
2045 // -- jssp[8] : thisArgument (dummy value if argc <= 1)
2046 // -- jssp[16] : target (dummy value if argc == 0)
2047 // -- jssp[24] : receiver
2048 // -----------------------------------
2049 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
2050 __ Pop(arguments_list, this_argument, target); // Overwrites argc.
2051 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
2052 __ Cmp(x10, 2);
2053 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
2054 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
2055
2056 __ Poke(this_argument, 0); // Overwrite receiver.
2057 }
2058
2059 // ----------- S t a t e -------------
2060 // -- x0 : argumentsList
2061 // -- x1 : target
2062 // -- jssp[0] : thisArgument
2063 // -----------------------------------
2064
2065 // 2. Make sure the target is actually callable.
2066 Label target_not_callable;
2067 __ JumpIfSmi(target, &target_not_callable);
2068 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2069 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2070 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
2071
2072 // 3a. Apply the target to the given argumentsList (passing undefined for
2073 // new.target in x3).
2074 DCHECK(undefined_value.Is(x3));
2075 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2076
2077 // 3b. The target is not callable, throw an appropriate TypeError.
2078 __ Bind(&target_not_callable);
2079 {
2080 __ Poke(target, 0);
2081 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2082 }
2083 }
2084
2085
2086 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2087 // ----------- S t a t e -------------
2088 // -- x0 : argc
2089 // -- jssp[0] : new.target (optional)
2090 // -- jssp[8] : argumentsList
2091 // -- jssp[16] : target
2092 // -- jssp[24] : receiver
2093 // -----------------------------------
2094 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
2095
2096 Register argc = x0;
2097 Register arguments_list = x0;
2098 Register target = x1;
2099 Register new_target = x3;
2100 Register undefined_value = x4;
2101
2102 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2103
2104 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2105 // new.target into x3 (if present, otherwise use target), remove all
2106 // arguments from the stack (including the receiver), and push thisArgument
2107 // (if present) instead.
2108 {
2109 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2110 // consistent state for a simple pop operation.
2111 __ Claim(3);
2112 __ Drop(argc);
2113
2114 // ----------- S t a t e -------------
2115 // -- x0 : argc
2116 // -- jssp[0] : new.target (dummy value if argc <= 2)
2117 // -- jssp[8] : argumentsList (dummy value if argc <= 1)
2118 // -- jssp[16] : target (dummy value if argc == 0)
2119 // -- jssp[24] : receiver
2120 // -----------------------------------
2121 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
2122 __ Pop(new_target, arguments_list, target); // Overwrites argc.
2123 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
2124 __ Cmp(x10, 2);
2125 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
2126 __ CmovX(new_target, target, ls); // target if argc <= 2.
2127
2128 __ Poke(undefined_value, 0); // Overwrite receiver.
2129 }
2130
2131 // ----------- S t a t e -------------
2132 // -- x0 : argumentsList
2133 // -- x1 : target
2134 // -- x3 : new.target
2135 // -- jssp[0] : receiver (undefined)
2136 // -----------------------------------
2137
2138 // 2. Make sure the target is actually a constructor.
2139 Label target_not_constructor;
2140 __ JumpIfSmi(target, &target_not_constructor);
2141 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2142 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2143 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2144 &target_not_constructor);
2145
2146 // 3. Make sure the new.target is actually a constructor.
2147 Label new_target_not_constructor;
2148 __ JumpIfSmi(new_target, &new_target_not_constructor);
2149 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
2150 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2151 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2152 &new_target_not_constructor);
2153
2154 // 4a. Construct the target with the given new.target and argumentsList.
2155 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2156
2157 // 4b. The target is not a constructor, throw an appropriate TypeError.
2158 __ Bind(&target_not_constructor);
2159 {
2160 __ Poke(target, 0);
2161 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2162 }
2163
2164 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2165 __ Bind(&new_target_not_constructor);
2166 {
2167 __ Poke(new_target, 0);
2168 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2169 }
2170 }
2171
2172
2173 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2174 Label* stack_overflow) {
2175 // ----------- S t a t e -------------
2176 // -- x0 : actual number of arguments
2177 // -- x1 : function (passed through to callee)
2178 // -- x2 : expected number of arguments
2179 // -- x3 : new target (passed through to callee)
2180 // -----------------------------------
2181 // Check the stack for overflow.
2182 // We are not trying to catch interruptions (e.g. debug break and
2183 // preemption) here, so the "real stack limit" is checked.
2184 Label enough_stack_space;
2185 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2186 // Make x10 the space we have left. The stack might already be overflowed
2187 // here which will cause x10 to become negative.
2188 __ Sub(x10, jssp, x10);
2189 // Check if the arguments will overflow the stack.
2190 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
2191 __ B(le, stack_overflow);
2192 }
2193
2194
2195 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2196 __ SmiTag(x10, x0);
2197 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2198 __ Push(lr, fp);
2199 __ Push(x11, x1, x10);
2200 __ Add(fp, jssp,
2201 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
2202 }
2203
2204
2205 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2206 // ----------- S t a t e -------------
2207 // -- x0 : result being passed through
2208 // -----------------------------------
2209 // Get the number of arguments passed (as a smi), tear down the frame and
2210 // then drop the parameters and the receiver.
2211 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2212 kPointerSize)));
2213 __ Mov(jssp, fp);
2214 __ Pop(fp, lr);
2215 __ DropBySMI(x10, kXRegSize);
2216 __ Drop(1);
2217 }
2218
2219
2220 // static
2221 void Builtins::Generate_Apply(MacroAssembler* masm) {
2222 // ----------- S t a t e -------------
2223 // -- x0 : argumentsList
2224 // -- x1 : target
2225 // -- x3 : new.target (checked to be constructor or undefined)
2226 // -- jssp[0] : thisArgument
2227 // -----------------------------------
2228
2229 Register arguments_list = x0;
2230 Register target = x1;
2231 Register new_target = x3;
2232
2233 Register args = x0;
2234 Register len = x2;
2235
2236 // Create the list of arguments from the array-like argumentsList.
2237 {
2238 Label create_arguments, create_array, create_runtime, done_create;
2239 __ JumpIfSmi(arguments_list, &create_runtime);
2240
2241 // Load native context.
2242 Register native_context = x4;
2243 __ Ldr(native_context, NativeContextMemOperand());
2244
2245 // Load the map of argumentsList.
2246 Register arguments_list_map = x2;
2247 __ Ldr(arguments_list_map,
2248 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2249
2250 // Check if argumentsList is an (unmodified) arguments object.
2251 __ Ldr(x10, ContextMemOperand(native_context,
2252 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2253 __ Ldr(x11, ContextMemOperand(native_context,
2254 Context::STRICT_ARGUMENTS_MAP_INDEX));
2255 __ Cmp(arguments_list_map, x10);
2256 __ Ccmp(arguments_list_map, x11, ZFlag, ne);
2257 __ B(eq, &create_arguments);
2258
2259 // Check if argumentsList is a fast JSArray.
2260 __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
2261 __ B(eq, &create_array);
2262
2263 // Ask the runtime to create the list (actually a FixedArray).
2264 __ Bind(&create_runtime);
2265 {
2266 FrameScope scope(masm, StackFrame::INTERNAL);
2267 __ Push(target, new_target, arguments_list);
2268 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2269 __ Pop(new_target, target);
2270 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2271 FixedArray::kLengthOffset));
2272 }
2273 __ B(&done_create);
2274
2275 // Try to create the list from an arguments object.
2276 __ Bind(&create_arguments);
2277 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2278 JSArgumentsObject::kLengthOffset));
2279 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2280 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
2281 __ CompareAndBranch(len, x11, ne, &create_runtime);
2282 __ Mov(args, x10);
2283 __ B(&done_create);
2284
2285 // Try to create the list from a JSArray object.
2286 __ Bind(&create_array);
2287 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
2288 __ DecodeField<Map::ElementsKindBits>(x10);
2289 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2290 STATIC_ASSERT(FAST_ELEMENTS == 2);
2291 // Branch for anything that's not FAST_{SMI_}ELEMENTS.
2292 __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
2293 __ Ldrsw(len,
2294 UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2295 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2296
2297 __ Bind(&done_create);
2298 }
2299
2300 // Check for stack overflow.
2301 {
2302 // Check the stack for overflow. We are not trying to catch interruptions
2303 // (i.e. debug break and preemption) here, so check the "real stack limit".
2304 Label done;
2305 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2306 // Make x10 the space we have left. The stack might already be overflowed
2307 // here which will cause x10 to become negative.
2308 __ Sub(x10, masm->StackPointer(), x10);
2309 // Check if the arguments will overflow the stack.
2310 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
2311 __ B(gt, &done); // Signed comparison.
2312 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2313 __ Bind(&done);
2314 }
2315
2316 // ----------- S t a t e -------------
2317 // -- x0 : args (a FixedArray built from argumentsList)
2318 // -- x1 : target
2319 // -- x2 : len (number of elements to push from args)
2320 // -- x3 : new.target (checked to be constructor or undefined)
2321 // -- jssp[0] : thisArgument
2322 // -----------------------------------
2323
2324 // Push arguments onto the stack (thisArgument is already on the stack).
2325 {
2326 Label done, loop;
2327 Register src = x4;
2328
2329 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2330 __ Mov(x0, len); // The 'len' argument for Call() or Construct().
2331 __ Cbz(len, &done);
2332 __ Claim(len);
2333 __ Bind(&loop);
2334 __ Sub(len, len, 1);
2335 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
2336 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
2337 __ Cbnz(len, &loop);
2338 __ Bind(&done);
2339 }
2340
2341 // ----------- S t a t e -------------
2342 // -- x0 : argument count (len)
2343 // -- x1 : target
2344 // -- x3 : new.target (checked to be constructor or undefined)
2345 // -- jssp[0] : args[len-1]
2346 // -- jssp[8] : args[len-2]
2347 // ... : ...
2348 // -- jssp[8*(len-2)] : args[1]
2349 // -- jssp[8*(len-1)] : args[0]
2350 // -----------------------------------
2351
2352 // Dispatch to Call or Construct depending on whether new.target is undefined.
2353 {
2354 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
2355 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2356 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2357 }
2358 }
2359
2360 namespace {
2361
2362 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2363 // present) preserving all the arguments prepared for current call.
2364 // Does nothing if debugger is currently active.
2365 // ES6 14.6.3. PrepareForTailCall
2366 //
2367 // Stack structure for the function g() tail calling f():
2368 //
2369 // ------- Caller frame: -------
2370 // | ...
2371 // | g()'s arg M
2372 // | ...
2373 // | g()'s arg 1
2374 // | g()'s receiver arg
2375 // | g()'s caller pc
2376 // ------- g()'s frame: -------
2377 // | g()'s caller fp <- fp
2378 // | g()'s context
2379 // | function pointer: g
2380 // | -------------------------
2381 // | ...
2382 // | ...
2383 // | f()'s arg N
2384 // | ...
2385 // | f()'s arg 1
2386 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2387 // ----------------------
2388 //
2389 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2390 Register scratch1, Register scratch2,
2391 Register scratch3) {
2392 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2393 Comment cmnt(masm, "[ PrepareForTailCall");
2394
2395 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2396 Label done;
2397 ExternalReference is_tail_call_elimination_enabled =
2398 ExternalReference::is_tail_call_elimination_enabled_address(
2399 masm->isolate());
2400 __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
2401 __ Ldrb(scratch1, MemOperand(scratch1));
2402 __ Cmp(scratch1, Operand(0));
2403 __ B(eq, &done);
2404
2405 // Drop possible interpreter handler/stub frame.
2406 {
2407 Label no_interpreter_frame;
2408 __ Ldr(scratch3,
2409 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2410 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2411 __ B(ne, &no_interpreter_frame);
2412 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2413 __ bind(&no_interpreter_frame);
2414 }
2415
2416 // Check if next frame is an arguments adaptor frame.
2417 Register caller_args_count_reg = scratch1;
2418 Label no_arguments_adaptor, formal_parameter_count_loaded;
2419 __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2420 __ Ldr(scratch3,
2421 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2422 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2423 __ B(ne, &no_arguments_adaptor);
2424
2425 // Drop current frame and load arguments count from arguments adaptor frame.
2426 __ mov(fp, scratch2);
2427 __ Ldr(caller_args_count_reg,
2428 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2429 __ SmiUntag(caller_args_count_reg);
2430 __ B(&formal_parameter_count_loaded);
2431
2432 __ bind(&no_arguments_adaptor);
2433 // Load caller's formal parameter count
2434 __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2435 __ Ldr(scratch1,
2436 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2437 __ Ldrsw(caller_args_count_reg,
2438 FieldMemOperand(scratch1,
2439 SharedFunctionInfo::kFormalParameterCountOffset));
2440 __ bind(&formal_parameter_count_loaded);
2441
2442 ParameterCount callee_args_count(args_reg);
2443 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2444 scratch3);
2445 __ bind(&done);
2446 }
2447 } // namespace
2448
2449 // static
2450 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2451 ConvertReceiverMode mode,
2452 TailCallMode tail_call_mode) {
2453 ASM_LOCATION("Builtins::Generate_CallFunction");
2454 // ----------- S t a t e -------------
2455 // -- x0 : the number of arguments (not including the receiver)
2456 // -- x1 : the function to call (checked to be a JSFunction)
2457 // -----------------------------------
2458 __ AssertFunction(x1);
2459
2460 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2461 // Check that function is not a "classConstructor".
2462 Label class_constructor;
2463 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2464 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2465 __ TestAndBranchIfAnySet(
2466 w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
2467 (1 << SharedFunctionInfo::kIsSubclassConstructor) |
2468 (1 << SharedFunctionInfo::kIsBaseConstructor),
2469 &class_constructor);
2470
2471 // Enter the context of the function; ToObject has to run in the function
2472 // context, and we also need to take the global proxy from the function
2473 // context in case of conversion.
2474 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2475 // We need to convert the receiver for non-native sloppy mode functions.
2476 Label done_convert;
2477 __ TestAndBranchIfAnySet(w3,
2478 (1 << SharedFunctionInfo::kNative) |
2479 (1 << SharedFunctionInfo::kStrictModeFunction),
2480 &done_convert);
2481 {
2482 // ----------- S t a t e -------------
2483 // -- x0 : the number of arguments (not including the receiver)
2484 // -- x1 : the function to call (checked to be a JSFunction)
2485 // -- x2 : the shared function info.
2486 // -- cp : the function context.
2487 // -----------------------------------
2488
2489 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2490 // Patch receiver to global proxy.
2491 __ LoadGlobalProxy(x3);
2492 } else {
2493 Label convert_to_object, convert_receiver;
2494 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2495 __ JumpIfSmi(x3, &convert_to_object);
2496 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2497 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2498 __ B(hs, &done_convert);
2499 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2500 Label convert_global_proxy;
2501 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2502 &convert_global_proxy);
2503 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2504 __ Bind(&convert_global_proxy);
2505 {
2506 // Patch receiver to global proxy.
2507 __ LoadGlobalProxy(x3);
2508 }
2509 __ B(&convert_receiver);
2510 }
2511 __ Bind(&convert_to_object);
2512 {
2513 // Convert receiver using ToObject.
2514 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2515 // in the fast case? (fall back to AllocateInNewSpace?)
2516 FrameScope scope(masm, StackFrame::INTERNAL);
2517 __ SmiTag(x0);
2518 __ Push(x0, x1);
2519 __ Mov(x0, x3);
2520 ToObjectStub stub(masm->isolate());
2521 __ CallStub(&stub);
2522 __ Mov(x3, x0);
2523 __ Pop(x1, x0);
2524 __ SmiUntag(x0);
2525 }
2526 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2527 __ Bind(&convert_receiver);
2528 }
2529 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2530 }
2531 __ Bind(&done_convert);
2532
2533 // ----------- S t a t e -------------
2534 // -- x0 : the number of arguments (not including the receiver)
2535 // -- x1 : the function to call (checked to be a JSFunction)
2536 // -- x2 : the shared function info.
2537 // -- cp : the function context.
2538 // -----------------------------------
2539
2540 if (tail_call_mode == TailCallMode::kAllow) {
2541 PrepareForTailCall(masm, x0, x3, x4, x5);
2542 }
2543
2544 __ Ldrsw(
2545 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2546 ParameterCount actual(x0);
2547 ParameterCount expected(x2);
2548 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2549 CheckDebugStepCallWrapper());
2550
2551 // The function is a "classConstructor", need to raise an exception.
2552 __ bind(&class_constructor);
2553 {
2554 FrameScope frame(masm, StackFrame::INTERNAL);
2555 __ Push(x1);
2556 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2557 }
2558 }
2559
2560
2561 namespace {
2562
2563 void Generate_PushBoundArguments(MacroAssembler* masm) {
2564 // ----------- S t a t e -------------
2565 // -- x0 : the number of arguments (not including the receiver)
2566 // -- x1 : target (checked to be a JSBoundFunction)
2567 // -- x3 : new.target (only in case of [[Construct]])
2568 // -----------------------------------
2569
2570 // Load [[BoundArguments]] into x2 and length of that into x4.
2571 Label no_bound_arguments;
2572 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2573 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2574 __ Cmp(x4, 0);
2575 __ B(eq, &no_bound_arguments);
2576 {
2577 // ----------- S t a t e -------------
2578 // -- x0 : the number of arguments (not including the receiver)
2579 // -- x1 : target (checked to be a JSBoundFunction)
2580 // -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2581 // -- x3 : new.target (only in case of [[Construct]])
2582 // -- x4 : the number of [[BoundArguments]]
2583 // -----------------------------------
2584
2585 // Reserve stack space for the [[BoundArguments]].
2586 {
2587 Label done;
2588 __ Claim(x4);
2589 // Check the stack for overflow. We are not trying to catch interruptions
2590 // (i.e. debug break and preemption) here, so check the "real stack
2591 // limit".
2592 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2593 __ B(gt, &done); // Signed comparison.
2594 // Restore the stack pointer.
2595 __ Drop(x4);
2596 {
2597 FrameScope scope(masm, StackFrame::MANUAL);
2598 __ EnterFrame(StackFrame::INTERNAL);
2599 __ CallRuntime(Runtime::kThrowStackOverflow);
2600 }
2601 __ Bind(&done);
2602 }
2603
2604 // Relocate arguments down the stack.
2605 {
2606 Label loop, done_loop;
2607 __ Mov(x5, 0);
2608 __ Bind(&loop);
2609 __ Cmp(x5, x0);
2610 __ B(gt, &done_loop);
2611 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2612 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2613 __ Add(x4, x4, 1);
2614 __ Add(x5, x5, 1);
2615 __ B(&loop);
2616 __ Bind(&done_loop);
2617 }
2618
2619 // Copy [[BoundArguments]] to the stack (below the arguments).
2620 {
2621 Label loop;
2622 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2623 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2624 __ Bind(&loop);
2625 __ Sub(x4, x4, 1);
2626 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2627 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2628 __ Add(x0, x0, 1);
2629 __ Cmp(x4, 0);
2630 __ B(gt, &loop);
2631 }
2632 }
2633 __ Bind(&no_bound_arguments);
2634 }
2635
2636 } // namespace
2637
2638
2639 // static
2640 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2641 TailCallMode tail_call_mode) {
2642 // ----------- S t a t e -------------
2643 // -- x0 : the number of arguments (not including the receiver)
2644 // -- x1 : the function to call (checked to be a JSBoundFunction)
2645 // -----------------------------------
2646 __ AssertBoundFunction(x1);
2647
2648 if (tail_call_mode == TailCallMode::kAllow) {
2649 PrepareForTailCall(masm, x0, x3, x4, x5);
2650 }
2651
2652 // Patch the receiver to [[BoundThis]].
2653 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2654 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2655
2656 // Push the [[BoundArguments]] onto the stack.
2657 Generate_PushBoundArguments(masm);
2658
2659 // Call the [[BoundTargetFunction]] via the Call builtin.
2660 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2661 __ Mov(x10,
2662 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2663 __ Ldr(x11, MemOperand(x10));
2664 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2665 __ Br(x12);
2666 }
2667
2668
2669 // static
2670 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2671 TailCallMode tail_call_mode) {
2672 // ----------- S t a t e -------------
2673 // -- x0 : the number of arguments (not including the receiver)
2674 // -- x1 : the target to call (can be any Object).
2675 // -----------------------------------
2676
2677 Label non_callable, non_function, non_smi;
2678 __ JumpIfSmi(x1, &non_callable);
2679 __ Bind(&non_smi);
2680 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2681 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2682 RelocInfo::CODE_TARGET, eq);
2683 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2684 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2685 RelocInfo::CODE_TARGET, eq);
2686
2687 // Check if target has a [[Call]] internal method.
2688 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2689 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2690
2691 __ Cmp(x5, JS_PROXY_TYPE);
2692 __ B(ne, &non_function);
2693
2694 // 0. Prepare for tail call if necessary.
2695 if (tail_call_mode == TailCallMode::kAllow) {
2696 PrepareForTailCall(masm, x0, x3, x4, x5);
2697 }
2698
2699 // 1. Runtime fallback for Proxy [[Call]].
2700 __ Push(x1);
2701 // Increase the arguments size to include the pushed function and the
2702 // existing receiver on the stack.
2703 __ Add(x0, x0, Operand(2));
2704 // Tail-call to the runtime.
2705 __ JumpToExternalReference(
2706 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2707
2708 // 2. Call to something else, which might have a [[Call]] internal method (if
2709 // not we raise an exception).
2710 __ Bind(&non_function);
2711 // Overwrite the original receiver with the (original) target.
2712 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2713 // Let the "call_as_function_delegate" take care of the rest.
2714 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2715 __ Jump(masm->isolate()->builtins()->CallFunction(
2716 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2717 RelocInfo::CODE_TARGET);
2718
2719 // 3. Call to something that is not callable.
2720 __ bind(&non_callable);
2721 {
2722 FrameScope scope(masm, StackFrame::INTERNAL);
2723 __ Push(x1);
2724 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2725 }
2726 }
2727
2728
2729 // static
2730 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2731 // ----------- S t a t e -------------
2732 // -- x0 : the number of arguments (not including the receiver)
2733 // -- x1 : the constructor to call (checked to be a JSFunction)
2734 // -- x3 : the new target (checked to be a constructor)
2735 // -----------------------------------
2736 __ AssertFunction(x1);
2737
2738 // Calling convention for function specific ConstructStubs require
2739 // x2 to contain either an AllocationSite or undefined.
2740 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2741
2742 // Tail call to the function-specific construct stub (still in the caller
2743 // context at this point).
2744 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2745 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2746 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2747 __ Br(x4);
2748 }
2749
2750
2751 // static
2752 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2753 // ----------- S t a t e -------------
2754 // -- x0 : the number of arguments (not including the receiver)
2755 // -- x1 : the function to call (checked to be a JSBoundFunction)
2756 // -- x3 : the new target (checked to be a constructor)
2757 // -----------------------------------
2758 __ AssertBoundFunction(x1);
2759
2760 // Push the [[BoundArguments]] onto the stack.
2761 Generate_PushBoundArguments(masm);
2762
2763 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2764 {
2765 Label done;
2766 __ Cmp(x1, x3);
2767 __ B(ne, &done);
2768 __ Ldr(x3,
2769 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2770 __ Bind(&done);
2771 }
2772
2773 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2774 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2775 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2776 __ Ldr(x11, MemOperand(x10));
2777 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2778 __ Br(x12);
2779 }
2780
2781
2782 // static
2783 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2784 // ----------- S t a t e -------------
2785 // -- x0 : the number of arguments (not including the receiver)
2786 // -- x1 : the constructor to call (checked to be a JSProxy)
2787 // -- x3 : the new target (either the same as the constructor or
2788 // the JSFunction on which new was invoked initially)
2789 // -----------------------------------
2790
2791 // Call into the Runtime for Proxy [[Construct]].
2792 __ Push(x1);
2793 __ Push(x3);
2794 // Include the pushed new_target, constructor and the receiver.
2795 __ Add(x0, x0, 3);
2796 // Tail-call to the runtime.
2797 __ JumpToExternalReference(
2798 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2799 }
2800
2801
2802 // static
2803 void Builtins::Generate_Construct(MacroAssembler* masm) {
2804 // ----------- S t a t e -------------
2805 // -- x0 : the number of arguments (not including the receiver)
2806 // -- x1 : the constructor to call (can be any Object)
2807 // -- x3 : the new target (either the same as the constructor or
2808 // the JSFunction on which new was invoked initially)
2809 // -----------------------------------
2810
2811 // Check if target is a Smi.
2812 Label non_constructor;
2813 __ JumpIfSmi(x1, &non_constructor);
2814
2815 // Dispatch based on instance type.
2816 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2817 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2818 RelocInfo::CODE_TARGET, eq);
2819
2820 // Check if target has a [[Construct]] internal method.
2821 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2822 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2823
2824 // Only dispatch to bound functions after checking whether they are
2825 // constructors.
2826 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2827 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2828 RelocInfo::CODE_TARGET, eq);
2829
2830 // Only dispatch to proxies after checking whether they are constructors.
2831 __ Cmp(x5, JS_PROXY_TYPE);
2832 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2833 eq);
2834
2835 // Called Construct on an exotic Object with a [[Construct]] internal method.
2836 {
2837 // Overwrite the original receiver with the (original) target.
2838 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2839 // Let the "call_as_constructor_delegate" take care of the rest.
2840 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2841 __ Jump(masm->isolate()->builtins()->CallFunction(),
2842 RelocInfo::CODE_TARGET);
2843 }
2844
2845 // Called Construct on an Object that doesn't have a [[Construct]] internal
2846 // method.
2847 __ bind(&non_constructor);
2848 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2849 RelocInfo::CODE_TARGET);
2850 }
2851
2852 // static
2853 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2854 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
2855 // ----------- S t a t e -------------
2856 // -- x1 : requested object size (untagged)
2857 // -- lr : return address
2858 // -----------------------------------
2859 __ SmiTag(x1);
2860 __ Push(x1);
2861 __ Move(cp, Smi::FromInt(0));
2862 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2863 }
2864
2865 // static
2866 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2867 ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
2868 // ----------- S t a t e -------------
2869 // -- x1 : requested object size (untagged)
2870 // -- lr : return address
2871 // -----------------------------------
2872 __ SmiTag(x1);
2873 __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2874 __ Push(x1, x2);
2875 __ Move(cp, Smi::FromInt(0));
2876 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2877 }
2878
2879 // static
2880 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2881 // The StringToNumber stub takes one argument in x0.
2882 __ AssertString(x0);
2883
2884 // Check if string has a cached array index.
2885 Label runtime;
2886 __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset));
2887 __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask));
2888 __ B(ne, &runtime);
2889 __ IndexFromHash(x2, x0);
2890 __ Ret();
2891
2892 __ Bind(&runtime);
2893 {
2894 FrameScope frame(masm, StackFrame::INTERNAL);
2895 // Push argument.
2896 __ Push(x0);
2897 // We cannot use a tail call here because this builtin can also be called
2898 // from wasm.
2899 __ CallRuntime(Runtime::kStringToNumber);
2900 }
2901 __ Ret();
2902 }
2903
2904 // static
2905 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2906 // The ToNumber stub takes one argument in x0.
2907 Label not_smi;
2908 __ JumpIfNotSmi(x0, &not_smi);
2909 __ Ret();
2910 __ Bind(&not_smi);
2911
2912 Label not_heap_number;
2913 __ CompareObjectType(x0, x1, x1, HEAP_NUMBER_TYPE);
2914 // x0: receiver
2915 // x1: receiver instance type
2916 __ B(ne, &not_heap_number);
2917 __ Ret();
2918 __ Bind(&not_heap_number);
2919
2920 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2921 RelocInfo::CODE_TARGET);
2922 }
2923
2924 // static
2925 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2926 // The NonNumberToNumber stub takes one argument in x0.
2927 __ AssertNotNumber(x0);
2928
2929 Label not_string;
2930 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
2931 // x0: receiver
2932 // x1: receiver instance type
2933 __ B(hs, &not_string);
2934 __ Jump(masm->isolate()->builtins()->StringToNumber(),
2935 RelocInfo::CODE_TARGET);
2936 __ Bind(&not_string);
2937
2938 Label not_oddball;
2939 __ Cmp(x1, ODDBALL_TYPE);
2940 __ B(ne, &not_oddball);
2941 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset));
2942 __ Ret();
2943 __ Bind(&not_oddball);
2944 {
2945 FrameScope frame(masm, StackFrame::INTERNAL);
2946 // Push argument.
2947 __ Push(x0);
2948 // We cannot use a tail call here because this builtin can also be called
2949 // from wasm.
2950 __ CallRuntime(Runtime::kToNumber);
2951 }
2952 __ Ret();
2953 }
2954
2955 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2956 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2957 // ----------- S t a t e -------------
2958 // -- x0 : actual number of arguments
2959 // -- x1 : function (passed through to callee)
2960 // -- x2 : expected number of arguments
2961 // -- x3 : new target (passed through to callee)
2962 // -----------------------------------
2963
2964 Register argc_actual = x0; // Excluding the receiver.
2965 Register argc_expected = x2; // Excluding the receiver.
2966 Register function = x1;
2967 Register code_entry = x10;
2968
2969 Label invoke, dont_adapt_arguments, stack_overflow;
2970
2971 Label enough, too_few;
2972 __ Cmp(argc_actual, argc_expected);
2973 __ B(lt, &too_few);
2974 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2975 __ B(eq, &dont_adapt_arguments);
2976
2977 { // Enough parameters: actual >= expected
2978 EnterArgumentsAdaptorFrame(masm);
2979 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2980
2981 Register copy_start = x10;
2982 Register copy_end = x11;
2983 Register copy_to = x12;
2984 Register scratch1 = x13, scratch2 = x14;
2985
2986 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
2987
2988 // Adjust for fp, lr, and the receiver.
2989 __ Add(copy_start, fp, 3 * kPointerSize);
2990 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
2991 __ Sub(copy_end, copy_start, scratch2);
2992 __ Sub(copy_end, copy_end, kPointerSize);
2993 __ Mov(copy_to, jssp);
2994
2995 // Claim space for the arguments, the receiver, and one extra slot.
2996 // The extra slot ensures we do not write under jssp. It will be popped
2997 // later.
2998 __ Add(scratch1, scratch2, 2 * kPointerSize);
2999 __ Claim(scratch1, 1);
3000
3001 // Copy the arguments (including the receiver) to the new stack frame.
3002 Label copy_2_by_2;
3003 __ Bind(&copy_2_by_2);
3004 __ Ldp(scratch1, scratch2,
3005 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
3006 __ Stp(scratch1, scratch2,
3007 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
3008 __ Cmp(copy_start, copy_end);
3009 __ B(hi, &copy_2_by_2);
3010
3011 // Correct the space allocated for the extra slot.
3012 __ Drop(1);
3013
3014 __ B(&invoke);
3015 }
3016
3017 { // Too few parameters: Actual < expected
3018 __ Bind(&too_few);
3019
3020 Register copy_from = x10;
3021 Register copy_end = x11;
3022 Register copy_to = x12;
3023 Register scratch1 = x13, scratch2 = x14;
3024
3025 EnterArgumentsAdaptorFrame(masm);
3026 ArgumentAdaptorStackCheck(masm, &stack_overflow);
3027
3028 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
3029 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
3030
3031 // Adjust for fp, lr, and the receiver.
3032 __ Add(copy_from, fp, 3 * kPointerSize);
3033 __ Add(copy_from, copy_from, argc_actual);
3034 __ Mov(copy_to, jssp);
3035 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
3036 __ Sub(copy_end, copy_end, argc_actual);
3037
3038 // Claim space for the arguments, the receiver, and one extra slot.
3039 // The extra slot ensures we do not write under jssp. It will be popped
3040 // later.
3041 __ Add(scratch1, scratch2, 2 * kPointerSize);
3042 __ Claim(scratch1, 1);
3043
3044 // Copy the arguments (including the receiver) to the new stack frame.
3045 Label copy_2_by_2;
3046 __ Bind(&copy_2_by_2);
3047 __ Ldp(scratch1, scratch2,
3048 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
3049 __ Stp(scratch1, scratch2,
3050 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
3051 __ Cmp(copy_to, copy_end);
3052 __ B(hi, &copy_2_by_2);
3053
3054 __ Mov(copy_to, copy_end);
3055
3056 // Fill the remaining expected arguments with undefined.
3057 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
3058 __ Add(copy_end, jssp, kPointerSize);
3059
3060 Label fill;
3061 __ Bind(&fill);
3062 __ Stp(scratch1, scratch1,
3063 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
3064 __ Cmp(copy_to, copy_end);
3065 __ B(hi, &fill);
3066
3067 // Correct the space allocated for the extra slot.
3068 __ Drop(1);
3069 }
3070
3071 // Arguments have been adapted. Now call the entry point.
3072 __ Bind(&invoke);
3073 __ Mov(argc_actual, argc_expected);
3074 // x0 : expected number of arguments
3075 // x1 : function (passed through to callee)
3076 // x3 : new target (passed through to callee)
3077 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3078 __ Call(code_entry);
3079
3080 // Store offset of return address for deoptimizer.
3081 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3082
3083 // Exit frame and return.
3084 LeaveArgumentsAdaptorFrame(masm);
3085 __ Ret();
3086
3087 // Call the entry point without adapting the arguments.
3088 __ Bind(&dont_adapt_arguments);
3089 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3090 __ Jump(code_entry);
3091
3092 __ Bind(&stack_overflow);
3093 {
3094 FrameScope frame(masm, StackFrame::MANUAL);
3095 __ CallRuntime(Runtime::kThrowStackOverflow);
3096 __ Unreachable();
3097 }
3098 }
3099
3100
3101 #undef __
3102
3103 } // namespace internal
3104 } // namespace v8
3105
3106 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698