Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(32)

Side by Side Diff: src/builtins/arm64/builtins-arm64.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/builtins/arm/builtins-arm.cc ('k') | src/builtins/builtins.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/debug/debug.h" 9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h" 10 #include "src/deoptimizer.h"
11 #include "src/full-codegen/full-codegen.h" 11 #include "src/full-codegen/full-codegen.h"
12 #include "src/runtime/runtime.h" 12 #include "src/runtime/runtime.h"
13 13
14 namespace v8 { 14 namespace v8 {
15 namespace internal { 15 namespace internal {
16 16
17
18 #define __ ACCESS_MASM(masm) 17 #define __ ACCESS_MASM(masm)
19 18
20
21 // Load the built-in Array function from the current context. 19 // Load the built-in Array function from the current context.
22 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { 20 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
23 // Load the InternalArray function from the native context. 21 // Load the InternalArray function from the native context.
24 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); 22 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
25 } 23 }
26 24
27
28 // Load the built-in InternalArray function from the current context. 25 // Load the built-in InternalArray function from the current context.
29 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, 26 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) { 27 Register result) {
31 // Load the InternalArray function from the native context. 28 // Load the InternalArray function from the native context.
32 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); 29 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
33 } 30 }
34 31
35 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, 32 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
36 ExitFrameType exit_frame_type) { 33 ExitFrameType exit_frame_type) {
37 // ----------- S t a t e ------------- 34 // ----------- S t a t e -------------
(...skipping 20 matching lines...) Expand all
58 55
59 // Insert extra arguments. 56 // Insert extra arguments.
60 __ SmiTag(x0); 57 __ SmiTag(x0);
61 __ Push(x0, x1, x3); 58 __ Push(x0, x1, x3);
62 __ SmiUntag(x0); 59 __ SmiUntag(x0);
63 60
64 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 61 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
65 exit_frame_type == BUILTIN_EXIT); 62 exit_frame_type == BUILTIN_EXIT);
66 } 63 }
67 64
68
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 65 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
70 // ----------- S t a t e ------------- 66 // ----------- S t a t e -------------
71 // -- x0 : number of arguments 67 // -- x0 : number of arguments
72 // -- lr : return address 68 // -- lr : return address
73 // -- sp[...]: constructor arguments 69 // -- sp[...]: constructor arguments
74 // ----------------------------------- 70 // -----------------------------------
75 ASM_LOCATION("Builtins::Generate_InternalArrayCode"); 71 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
76 Label generic_array_code; 72 Label generic_array_code;
77 73
78 // Get the InternalArray function. 74 // Get the InternalArray function.
79 GenerateLoadInternalArrayFunction(masm, x1); 75 GenerateLoadInternalArrayFunction(masm, x1);
80 76
81 if (FLAG_debug_code) { 77 if (FLAG_debug_code) {
82 // Initial map for the builtin InternalArray functions should be maps. 78 // Initial map for the builtin InternalArray functions should be maps.
83 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); 79 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
84 __ Tst(x10, kSmiTagMask); 80 __ Tst(x10, kSmiTagMask);
85 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); 81 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
86 __ CompareObjectType(x10, x11, x12, MAP_TYPE); 82 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
87 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); 83 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
88 } 84 }
89 85
90 // Run the native code for the InternalArray function called as a normal 86 // Run the native code for the InternalArray function called as a normal
91 // function. 87 // function.
92 InternalArrayConstructorStub stub(masm->isolate()); 88 InternalArrayConstructorStub stub(masm->isolate());
93 __ TailCallStub(&stub); 89 __ TailCallStub(&stub);
94 } 90 }
95 91
96
97 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 92 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
98 // ----------- S t a t e ------------- 93 // ----------- S t a t e -------------
99 // -- x0 : number of arguments 94 // -- x0 : number of arguments
100 // -- lr : return address 95 // -- lr : return address
101 // -- sp[...]: constructor arguments 96 // -- sp[...]: constructor arguments
102 // ----------------------------------- 97 // -----------------------------------
103 ASM_LOCATION("Builtins::Generate_ArrayCode"); 98 ASM_LOCATION("Builtins::Generate_ArrayCode");
104 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 99 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
105 100
106 // Get the Array function. 101 // Get the Array function.
107 GenerateLoadArrayFunction(masm, x1); 102 GenerateLoadArrayFunction(masm, x1);
108 103
109 if (FLAG_debug_code) { 104 if (FLAG_debug_code) {
110 // Initial map for the builtin Array functions should be maps. 105 // Initial map for the builtin Array functions should be maps.
111 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); 106 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
112 __ Tst(x10, kSmiTagMask); 107 __ Tst(x10, kSmiTagMask);
113 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); 108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
114 __ CompareObjectType(x10, x11, x12, MAP_TYPE); 109 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
115 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); 110 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
116 } 111 }
117 112
118 // Run the native code for the Array function called as a normal function. 113 // Run the native code for the Array function called as a normal function.
119 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); 114 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
120 __ Mov(x3, x1); 115 __ Mov(x3, x1);
121 ArrayConstructorStub stub(masm->isolate()); 116 ArrayConstructorStub stub(masm->isolate());
122 __ TailCallStub(&stub); 117 __ TailCallStub(&stub);
123 } 118 }
124 119
125
126 // static 120 // static
127 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { 121 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
128 // ----------- S t a t e ------------- 122 // ----------- S t a t e -------------
129 // -- x0 : number of arguments 123 // -- x0 : number of arguments
130 // -- x1 : function 124 // -- x1 : function
131 // -- cp : context 125 // -- cp : context
132 // -- lr : return address 126 // -- lr : return address
133 // -- sp[(argc - n - 1) * 8] : arg[n] (zero-based) 127 // -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
134 // -- sp[argc * 8] : receiver 128 // -- sp[argc * 8] : receiver
135 // ----------------------------------- 129 // -----------------------------------
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 // Drop all arguments. 246 // Drop all arguments.
253 __ Drop(x2); 247 __ Drop(x2);
254 } 248 }
255 249
256 // 2b. No arguments, return +0 (already in x0). 250 // 2b. No arguments, return +0 (already in x0).
257 __ Bind(&no_arguments); 251 __ Bind(&no_arguments);
258 __ Drop(1); 252 __ Drop(1);
259 __ Ret(); 253 __ Ret();
260 } 254 }
261 255
262
263 // static 256 // static
264 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { 257 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
265 // ----------- S t a t e ------------- 258 // ----------- S t a t e -------------
266 // -- x0 : number of arguments 259 // -- x0 : number of arguments
267 // -- x1 : constructor function 260 // -- x1 : constructor function
268 // -- x3 : new target 261 // -- x3 : new target
269 // -- cp : context 262 // -- cp : context
270 // -- lr : return address 263 // -- lr : return address
271 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 264 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
272 // -- sp[argc * 8] : receiver 265 // -- sp[argc * 8] : receiver
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset)); 327 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
335 328
336 __ bind(&drop_frame_and_ret); 329 __ bind(&drop_frame_and_ret);
337 { 330 {
338 __ Drop(x6); 331 __ Drop(x6);
339 __ Drop(1); 332 __ Drop(1);
340 __ Ret(); 333 __ Ret();
341 } 334 }
342 } 335 }
343 336
344
345 // static 337 // static
346 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 338 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
347 // ----------- S t a t e ------------- 339 // ----------- S t a t e -------------
348 // -- x0 : number of arguments 340 // -- x0 : number of arguments
349 // -- x1 : constructor function 341 // -- x1 : constructor function
350 // -- cp : context 342 // -- cp : context
351 // -- lr : return address 343 // -- lr : return address
352 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 344 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
353 // -- sp[argc * 8] : receiver 345 // -- sp[argc * 8] : receiver
354 // ----------------------------------- 346 // -----------------------------------
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
406 } 398 }
407 399
408 __ bind(&drop_frame_and_ret); 400 __ bind(&drop_frame_and_ret);
409 { 401 {
410 __ Drop(x2); 402 __ Drop(x2);
411 __ Drop(1); 403 __ Drop(1);
412 __ Ret(); 404 __ Ret();
413 } 405 }
414 } 406 }
415 407
416
417 // static 408 // static
418 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 409 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
419 // ----------- S t a t e ------------- 410 // ----------- S t a t e -------------
420 // -- x0 : number of arguments 411 // -- x0 : number of arguments
421 // -- x1 : constructor function 412 // -- x1 : constructor function
422 // -- x3 : new target 413 // -- x3 : new target
423 // -- cp : context 414 // -- cp : context
424 // -- lr : return address 415 // -- lr : return address
425 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 416 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
426 // -- sp[argc * 8] : receiver 417 // -- sp[argc * 8] : receiver
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
523 514
524 // Restore target function and new target. 515 // Restore target function and new target.
525 __ Pop(x3, x1, x0); 516 __ Pop(x3, x1, x0);
526 __ SmiUntag(x0); 517 __ SmiUntag(x0);
527 } 518 }
528 519
529 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); 520 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
530 __ Br(x2); 521 __ Br(x2);
531 } 522 }
532 523
533
534 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 524 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
535 // Checking whether the queued function is ready for install is optional, 525 // Checking whether the queued function is ready for install is optional,
536 // since we come across interrupts and stack checks elsewhere. However, not 526 // since we come across interrupts and stack checks elsewhere. However, not
537 // checking may delay installing ready functions, and always checking would be 527 // checking may delay installing ready functions, and always checking would be
538 // quite expensive. A good compromise is to first check against stack limit as 528 // quite expensive. A good compromise is to first check against stack limit as
539 // a cue for an interrupt signal. 529 // a cue for an interrupt signal.
540 Label ok; 530 Label ok;
541 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); 531 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
542 __ B(hs, &ok); 532 __ B(hs, &ok);
543 533
544 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); 534 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
545 535
546 __ Bind(&ok); 536 __ Bind(&ok);
547 GenerateTailCallToSharedCode(masm); 537 GenerateTailCallToSharedCode(masm);
548 } 538 }
549 539
550
551 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 540 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
552 bool is_api_function, 541 bool is_api_function,
553 bool create_implicit_receiver, 542 bool create_implicit_receiver,
554 bool check_derived_construct) { 543 bool check_derived_construct) {
555 // ----------- S t a t e ------------- 544 // ----------- S t a t e -------------
556 // -- x0 : number of arguments 545 // -- x0 : number of arguments
557 // -- x1 : constructor function 546 // -- x1 : constructor function
558 // -- x2 : allocation site or undefined 547 // -- x2 : allocation site or undefined
559 // -- x3 : new target 548 // -- x3 : new target
560 // -- lr : return address 549 // -- lr : return address
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
709 } 698 }
710 699
711 __ DropBySMI(x1); 700 __ DropBySMI(x1);
712 __ Drop(1); 701 __ Drop(1);
713 if (create_implicit_receiver) { 702 if (create_implicit_receiver) {
714 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); 703 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
715 } 704 }
716 __ Ret(); 705 __ Ret();
717 } 706 }
718 707
719
720 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 708 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
721 Generate_JSConstructStubHelper(masm, false, true, false); 709 Generate_JSConstructStubHelper(masm, false, true, false);
722 } 710 }
723 711
724
725 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 712 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
726 Generate_JSConstructStubHelper(masm, true, false, false); 713 Generate_JSConstructStubHelper(masm, true, false, false);
727 } 714 }
728 715
729
730 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 716 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
731 Generate_JSConstructStubHelper(masm, false, false, false); 717 Generate_JSConstructStubHelper(masm, false, false, false);
732 } 718 }
733 719
734
735 void Builtins::Generate_JSBuiltinsConstructStubForDerived( 720 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
736 MacroAssembler* masm) { 721 MacroAssembler* masm) {
737 Generate_JSConstructStubHelper(masm, false, false, true); 722 Generate_JSConstructStubHelper(masm, false, false, true);
738 } 723 }
739 724
740
741 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 725 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
742 FrameScope scope(masm, StackFrame::INTERNAL); 726 FrameScope scope(masm, StackFrame::INTERNAL);
743 __ Push(x1); 727 __ Push(x1);
744 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 728 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
745 } 729 }
746 730
747 // static 731 // static
748 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { 732 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
749 // ----------- S t a t e ------------- 733 // ----------- S t a t e -------------
750 // -- x0 : the value to pass to the generator 734 // -- x0 : the value to pass to the generator
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
811 // Dispatch on the kind of generator object. 795 // Dispatch on the kind of generator object.
812 Label old_generator; 796 Label old_generator;
813 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); 797 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
814 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); 798 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
815 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE); 799 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
816 __ B(ne, &old_generator); 800 __ B(ne, &old_generator);
817 801
818 // New-style (ignition/turbofan) generator object 802 // New-style (ignition/turbofan) generator object
819 { 803 {
820 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); 804 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
821 __ Ldr(w0, 805 __ Ldr(w0, FieldMemOperand(
822 FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset)); 806 x0, SharedFunctionInfo::kFormalParameterCountOffset));
823 // We abuse new.target both to indicate that this is a resume call and to 807 // We abuse new.target both to indicate that this is a resume call and to
824 // pass in the generator object. In ordinary calls, new.target is always 808 // pass in the generator object. In ordinary calls, new.target is always
825 // undefined because generator functions are non-constructable. 809 // undefined because generator functions are non-constructable.
826 __ Move(x3, x1); 810 __ Move(x3, x1);
827 __ Move(x1, x4); 811 __ Move(x1, x4);
828 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); 812 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
829 __ Jump(x5); 813 __ Jump(x5);
830 } 814 }
831 815
832 // Old-style (full-codegen) generator object 816 // Old-style (full-codegen) generator object
(...skipping 23 matching lines...) Expand all
856 } 840 }
857 841
858 // Reset operand stack so we don't leak. 842 // Reset operand stack so we don't leak.
859 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex); 843 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
860 __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset)); 844 __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
861 845
862 // Resume the generator function at the continuation. 846 // Resume the generator function at the continuation.
863 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); 847 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
864 __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset)); 848 __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset));
865 __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag); 849 __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag);
866 __ Ldrsw(x11, 850 __ Ldrsw(x11, UntagSmiFieldMemOperand(
867 UntagSmiFieldMemOperand(x1, JSGeneratorObject::kContinuationOffset)); 851 x1, JSGeneratorObject::kContinuationOffset));
868 __ Add(x10, x10, x11); 852 __ Add(x10, x10, x11);
869 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); 853 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
870 __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset)); 854 __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
871 __ Move(x0, x1); // Continuation expects generator object in x0. 855 __ Move(x0, x1); // Continuation expects generator object in x0.
872 __ Br(x10); 856 __ Br(x10);
873 } 857 }
874 858
875 __ Bind(&prepare_step_in_if_stepping); 859 __ Bind(&prepare_step_in_if_stepping);
876 { 860 {
877 FrameScope scope(masm, StackFrame::INTERNAL); 861 FrameScope scope(masm, StackFrame::INTERNAL);
(...skipping 10 matching lines...) Expand all
888 __ Push(x1, x2); 872 __ Push(x1, x2);
889 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); 873 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
890 __ Pop(x2, x1); 874 __ Pop(x2, x1);
891 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); 875 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
892 } 876 }
893 __ B(&stepping_prepared); 877 __ B(&stepping_prepared);
894 } 878 }
895 879
896 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; 880 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
897 881
898
899 // Clobbers x10, x15; preserves all other registers. 882 // Clobbers x10, x15; preserves all other registers.
900 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, 883 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
901 IsTagged argc_is_tagged) { 884 IsTagged argc_is_tagged) {
902 // Check the stack for overflow. 885 // Check the stack for overflow.
903 // We are not trying to catch interruptions (e.g. debug break and 886 // We are not trying to catch interruptions (e.g. debug break and
904 // preemption) here, so the "real stack limit" is checked. 887 // preemption) here, so the "real stack limit" is checked.
905 Label enough_stack_space; 888 Label enough_stack_space;
906 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); 889 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
907 // Make x10 the space we have left. The stack might already be overflowed 890 // Make x10 the space we have left. The stack might already be overflowed
908 // here which will cause x10 to become negative. 891 // here which will cause x10 to become negative.
909 // TODO(jbramley): Check that the stack usage here is safe. 892 // TODO(jbramley): Check that the stack usage here is safe.
910 __ Sub(x10, jssp, x10); 893 __ Sub(x10, jssp, x10);
911 // Check if the arguments will overflow the stack. 894 // Check if the arguments will overflow the stack.
912 if (argc_is_tagged == kArgcIsSmiTagged) { 895 if (argc_is_tagged == kArgcIsSmiTagged) {
913 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2)); 896 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
914 } else { 897 } else {
915 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); 898 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
916 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2)); 899 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
917 } 900 }
918 __ B(gt, &enough_stack_space); 901 __ B(gt, &enough_stack_space);
919 __ CallRuntime(Runtime::kThrowStackOverflow); 902 __ CallRuntime(Runtime::kThrowStackOverflow);
920 // We should never return from the APPLY_OVERFLOW builtin. 903 // We should never return from the APPLY_OVERFLOW builtin.
921 if (__ emit_debug_code()) { 904 if (__ emit_debug_code()) {
922 __ Unreachable(); 905 __ Unreachable();
923 } 906 }
924 907
925 __ Bind(&enough_stack_space); 908 __ Bind(&enough_stack_space);
926 } 909 }
927 910
928
929 // Input: 911 // Input:
930 // x0: new.target. 912 // x0: new.target.
931 // x1: function. 913 // x1: function.
932 // x2: receiver. 914 // x2: receiver.
933 // x3: argc. 915 // x3: argc.
934 // x4: argv. 916 // x4: argv.
935 // Output: 917 // Output:
936 // x0: result. 918 // x0: result.
937 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 919 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
938 bool is_construct) { 920 bool is_construct) {
(...skipping 29 matching lines...) Expand all
968 // x3: argc. 950 // x3: argc.
969 // x4: argv. 951 // x4: argv.
970 Label loop, entry; 952 Label loop, entry;
971 // Compute the copy end address. 953 // Compute the copy end address.
972 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2)); 954 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
973 955
974 __ B(&entry); 956 __ B(&entry);
975 __ Bind(&loop); 957 __ Bind(&loop);
976 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); 958 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
977 __ Ldr(x12, MemOperand(x11)); // Dereference the handle. 959 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
978 __ Push(x12); // Push the argument. 960 __ Push(x12); // Push the argument.
979 __ Bind(&entry); 961 __ Bind(&entry);
980 __ Cmp(scratch, argv); 962 __ Cmp(scratch, argv);
981 __ B(ne, &loop); 963 __ B(ne, &loop);
982 964
983 __ Mov(scratch, argc); 965 __ Mov(scratch, argc);
984 __ Mov(argc, new_target); 966 __ Mov(argc, new_target);
985 __ Mov(new_target, scratch); 967 __ Mov(new_target, scratch);
986 // x0: argc. 968 // x0: argc.
987 // x3: new.target. 969 // x3: new.target.
988 970
(...skipping 19 matching lines...) Expand all
1008 __ Call(builtin, RelocInfo::CODE_TARGET); 990 __ Call(builtin, RelocInfo::CODE_TARGET);
1009 991
1010 // Exit the JS internal frame and remove the parameters (except function), 992 // Exit the JS internal frame and remove the parameters (except function),
1011 // and return. 993 // and return.
1012 } 994 }
1013 995
1014 // Result is in x0. Return. 996 // Result is in x0. Return.
1015 __ Ret(); 997 __ Ret();
1016 } 998 }
1017 999
1018
1019 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1000 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1020 Generate_JSEntryTrampolineHelper(masm, false); 1001 Generate_JSEntryTrampolineHelper(masm, false);
1021 } 1002 }
1022 1003
1023
1024 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 1004 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1025 Generate_JSEntryTrampolineHelper(masm, true); 1005 Generate_JSEntryTrampolineHelper(masm, true);
1026 } 1006 }
1027 1007
1028 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { 1008 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1029 Register args_count = scratch; 1009 Register args_count = scratch;
1030 1010
1031 // Get the arguments + receiver count. 1011 // Get the arguments + receiver count.
1032 __ ldr(args_count, 1012 __ ldr(args_count,
1033 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 1013 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
(...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after
1464 1444
1465 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { 1445 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1466 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); 1446 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1467 } 1447 }
1468 1448
1469 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1449 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1470 GenerateTailCallToReturnedCode(masm, 1450 GenerateTailCallToReturnedCode(masm,
1471 Runtime::kCompileOptimized_NotConcurrent); 1451 Runtime::kCompileOptimized_NotConcurrent);
1472 } 1452 }
1473 1453
1474
1475 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 1454 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1476 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); 1455 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1477 } 1456 }
1478 1457
1479 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { 1458 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1480 // ----------- S t a t e ------------- 1459 // ----------- S t a t e -------------
1481 // -- x0 : argument count (preserved for callee) 1460 // -- x0 : argument count (preserved for callee)
1482 // -- x1 : new target (preserved for callee) 1461 // -- x1 : new target (preserved for callee)
1483 // -- x3 : target function (preserved for callee) 1462 // -- x3 : target function (preserved for callee)
1484 // ----------------------------------- 1463 // -----------------------------------
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1535 __ CallCFunction( 1514 __ CallCFunction(
1536 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 1515 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1537 __ Pop(lr, fp, x3, x1, x0); 1516 __ Pop(lr, fp, x3, x1, x0);
1538 } 1517 }
1539 1518
1540 // The calling function has been made young again, so return to execute the 1519 // The calling function has been made young again, so return to execute the
1541 // real frame set-up code. 1520 // real frame set-up code.
1542 __ Br(x0); 1521 __ Br(x0);
1543 } 1522 }
1544 1523
1545 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 1524 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1546 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 1525 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1547 MacroAssembler* masm) { \ 1526 MacroAssembler* masm) { \
1548 GenerateMakeCodeYoungAgainCommon(masm); \ 1527 GenerateMakeCodeYoungAgainCommon(masm); \
1549 } \ 1528 } \
1550 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 1529 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1551 MacroAssembler* masm) { \ 1530 MacroAssembler* masm) { \
1552 GenerateMakeCodeYoungAgainCommon(masm); \ 1531 GenerateMakeCodeYoungAgainCommon(masm); \
1553 } 1532 }
1554 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 1533 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1555 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 1534 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1556 1535
1557
1558 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 1536 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1559 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 1537 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1560 // that make_code_young doesn't do any garbage collection which allows us to 1538 // that make_code_young doesn't do any garbage collection which allows us to
1561 // save/restore the registers without worrying about which of them contain 1539 // save/restore the registers without worrying about which of them contain
1562 // pointers. 1540 // pointers.
1563 1541
1564 // The following caller-saved registers must be saved and restored when 1542 // The following caller-saved registers must be saved and restored when
1565 // calling through to the runtime: 1543 // calling through to the runtime:
1566 // x0 - The address from which to resume execution. 1544 // x0 - The address from which to resume execution.
1567 // x1 - isolate 1545 // x1 - isolate
1568 // x3 - new target 1546 // x3 - new target
1569 // lr - The return address for the JSFunction itself. It has not yet been 1547 // lr - The return address for the JSFunction itself. It has not yet been
1570 // preserved on the stack because the frame setup code was replaced 1548 // preserved on the stack because the frame setup code was replaced
1571 // with a call to this stub, to handle code ageing. 1549 // with a call to this stub, to handle code ageing.
1572 { 1550 {
1573 FrameScope scope(masm, StackFrame::MANUAL); 1551 FrameScope scope(masm, StackFrame::MANUAL);
1574 __ Push(x0, x1, x3, fp, lr); 1552 __ Push(x0, x1, x3, fp, lr);
1575 __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); 1553 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1576 __ CallCFunction( 1554 __ CallCFunction(
1577 ExternalReference::get_mark_code_as_executed_function( 1555 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1578 masm->isolate()), 2); 1556 2);
1579 __ Pop(lr, fp, x3, x1, x0); 1557 __ Pop(lr, fp, x3, x1, x0);
1580 1558
1581 // Perform prologue operations usually performed by the young code stub. 1559 // Perform prologue operations usually performed by the young code stub.
1582 __ EmitFrameSetupForCodeAgePatching(masm); 1560 __ EmitFrameSetupForCodeAgePatching(masm);
1583 } 1561 }
1584 1562
1585 // Jump to point after the code-age stub. 1563 // Jump to point after the code-age stub.
1586 __ Add(x0, x0, kNoCodeAgeSequenceLength); 1564 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1587 __ Br(x0); 1565 __ Br(x0);
1588 } 1566 }
1589 1567
1590
1591 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 1568 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1592 GenerateMakeCodeYoungAgainCommon(masm); 1569 GenerateMakeCodeYoungAgainCommon(masm);
1593 } 1570 }
1594 1571
1595
1596 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { 1572 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1597 Generate_MarkCodeAsExecutedOnce(masm); 1573 Generate_MarkCodeAsExecutedOnce(masm);
1598 } 1574 }
1599 1575
1600
1601 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 1576 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1602 SaveFPRegsMode save_doubles) { 1577 SaveFPRegsMode save_doubles) {
1603 { 1578 {
1604 FrameScope scope(masm, StackFrame::INTERNAL); 1579 FrameScope scope(masm, StackFrame::INTERNAL);
1605 1580
1606 // Preserve registers across notification, this is important for compiled 1581 // Preserve registers across notification, this is important for compiled
1607 // stubs that tail call the runtime on deopts passing their parameters in 1582 // stubs that tail call the runtime on deopts passing their parameters in
1608 // registers. 1583 // registers.
1609 // TODO(jbramley): Is it correct (and appropriate) to use safepoint 1584 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1610 // registers here? According to the comment above, we should only need to 1585 // registers here? According to the comment above, we should only need to
1611 // preserve the registers with parameters. 1586 // preserve the registers with parameters.
1612 __ PushXRegList(kSafepointSavedRegisters); 1587 __ PushXRegList(kSafepointSavedRegisters);
1613 // Pass the function and deoptimization type to the runtime system. 1588 // Pass the function and deoptimization type to the runtime system.
1614 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); 1589 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1615 __ PopXRegList(kSafepointSavedRegisters); 1590 __ PopXRegList(kSafepointSavedRegisters);
1616 } 1591 }
1617 1592
1618 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). 1593 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1619 __ Drop(1); 1594 __ Drop(1);
1620 1595
1621 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this 1596 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1622 // into lr before it jumps here. 1597 // into lr before it jumps here.
1623 __ Br(lr); 1598 __ Br(lr);
1624 } 1599 }
1625 1600
1626
1627 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1601 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1628 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1602 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1629 } 1603 }
1630 1604
1631
1632 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1605 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1633 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1606 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1634 } 1607 }
1635 1608
1636
1637 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1609 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1638 Deoptimizer::BailoutType type) { 1610 Deoptimizer::BailoutType type) {
1639 { 1611 {
1640 FrameScope scope(masm, StackFrame::INTERNAL); 1612 FrameScope scope(masm, StackFrame::INTERNAL);
1641 // Pass the deoptimization type to the runtime system. 1613 // Pass the deoptimization type to the runtime system.
1642 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); 1614 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1643 __ Push(x0); 1615 __ Push(x0);
1644 __ CallRuntime(Runtime::kNotifyDeoptimized); 1616 __ CallRuntime(Runtime::kNotifyDeoptimized);
1645 } 1617 }
1646 1618
(...skipping 17 matching lines...) Expand all
1664 __ CompareAndBranch(state, 1636 __ CompareAndBranch(state,
1665 static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER), 1637 static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
1666 ne, &unknown_state); 1638 ne, &unknown_state);
1667 __ Drop(2); // Remove state and TOS. 1639 __ Drop(2); // Remove state and TOS.
1668 __ Ret(); 1640 __ Ret();
1669 1641
1670 __ Bind(&unknown_state); 1642 __ Bind(&unknown_state);
1671 __ Abort(kInvalidFullCodegenState); 1643 __ Abort(kInvalidFullCodegenState);
1672 } 1644 }
1673 1645
1674
1675 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1646 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1676 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1647 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1677 } 1648 }
1678 1649
1679
1680 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1650 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1681 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1651 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1682 } 1652 }
1683 1653
1684
1685 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1654 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1686 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1655 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1687 } 1656 }
1688 1657
1689
1690 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 1658 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1691 Register function_template_info, 1659 Register function_template_info,
1692 Register scratch0, Register scratch1, 1660 Register scratch0, Register scratch1,
1693 Register scratch2, 1661 Register scratch2,
1694 Label* receiver_check_failed) { 1662 Label* receiver_check_failed) {
1695 Register signature = scratch0; 1663 Register signature = scratch0;
1696 Register map = scratch1; 1664 Register map = scratch1;
1697 Register constructor = scratch2; 1665 Register constructor = scratch2;
1698 1666
1699 // If there is no signature, return the holder. 1667 // If there is no signature, return the holder.
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1743 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask)); 1711 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
1744 __ B(eq, receiver_check_failed); 1712 __ B(eq, receiver_check_failed);
1745 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); 1713 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1746 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1714 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1747 // Iterate. 1715 // Iterate.
1748 __ B(&prototype_loop_start); 1716 __ B(&prototype_loop_start);
1749 1717
1750 __ Bind(&receiver_check_passed); 1718 __ Bind(&receiver_check_passed);
1751 } 1719 }
1752 1720
1753
1754 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { 1721 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1755 // ----------- S t a t e ------------- 1722 // ----------- S t a t e -------------
1756 // -- x0 : number of arguments excluding receiver 1723 // -- x0 : number of arguments excluding receiver
1757 // -- x1 : callee 1724 // -- x1 : callee
1758 // -- lr : return address 1725 // -- lr : return address
1759 // -- sp[0] : last argument 1726 // -- sp[0] : last argument
1760 // -- ... 1727 // -- ...
1761 // -- sp[8 * (argc - 1)] : first argument 1728 // -- sp[8 * (argc - 1)] : first argument
1762 // -- sp[8 * argc] : receiver 1729 // -- sp[8 * argc] : receiver
1763 // ----------------------------------- 1730 // -----------------------------------
(...skipping 15 matching lines...) Expand all
1779 __ Jump(x4); 1746 __ Jump(x4);
1780 1747
1781 // Compatible receiver check failed: throw an Illegal Invocation exception. 1748 // Compatible receiver check failed: throw an Illegal Invocation exception.
1782 __ Bind(&receiver_check_failed); 1749 __ Bind(&receiver_check_failed);
1783 // Drop the arguments (including the receiver) 1750 // Drop the arguments (including the receiver)
1784 __ add(x0, x0, Operand(1)); 1751 __ add(x0, x0, Operand(1));
1785 __ Drop(x0); 1752 __ Drop(x0);
1786 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); 1753 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1787 } 1754 }
1788 1755
1789
1790 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1756 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1791 // Lookup the function in the JavaScript frame. 1757 // Lookup the function in the JavaScript frame.
1792 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1758 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1793 { 1759 {
1794 FrameScope scope(masm, StackFrame::INTERNAL); 1760 FrameScope scope(masm, StackFrame::INTERNAL);
1795 // Pass function as argument. 1761 // Pass function as argument.
1796 __ Push(x0); 1762 __ Push(x0);
1797 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 1763 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1798 } 1764 }
1799 1765
1800 // If the code object is null, just return to the unoptimized code. 1766 // If the code object is null, just return to the unoptimized code.
1801 Label skip; 1767 Label skip;
1802 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip); 1768 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1803 __ Ret(); 1769 __ Ret();
1804 1770
1805 __ Bind(&skip); 1771 __ Bind(&skip);
1806 1772
1807 // Load deoptimization data from the code object. 1773 // Load deoptimization data from the code object.
1808 // <deopt_data> = <code>[#deoptimization_data_offset] 1774 // <deopt_data> = <code>[#deoptimization_data_offset]
1809 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1775 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1810 1776
1811 // Load the OSR entrypoint offset from the deoptimization data. 1777 // Load the OSR entrypoint offset from the deoptimization data.
1812 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1778 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1813 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt( 1779 __ Ldrsw(w1, UntagSmiFieldMemOperand(
1814 DeoptimizationInputData::kOsrPcOffsetIndex))); 1780 x1, FixedArray::OffsetOfElementAt(
1781 DeoptimizationInputData::kOsrPcOffsetIndex)));
1815 1782
1816 // Compute the target address = code_obj + header_size + osr_offset 1783 // Compute the target address = code_obj + header_size + osr_offset
1817 // <entry_addr> = <code_obj> + #header_size + <osr_offset> 1784 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1818 __ Add(x0, x0, x1); 1785 __ Add(x0, x0, x1);
1819 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag); 1786 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1820 1787
1821 // And "return" to the OSR entry point of the function. 1788 // And "return" to the OSR entry point of the function.
1822 __ Ret(); 1789 __ Ret();
1823 } 1790 }
1824 1791
1825
1826 // static 1792 // static
1827 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, 1793 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1828 int field_index) { 1794 int field_index) {
1829 // ----------- S t a t e ------------- 1795 // ----------- S t a t e -------------
1830 // -- x0 : number of arguments 1796 // -- x0 : number of arguments
1831 // -- x1 : function 1797 // -- x1 : function
1832 // -- cp : context 1798 // -- cp : context
1833 // -- lr : return address 1799 // -- lr : return address
1834 // -- jssp[0] : receiver 1800 // -- jssp[0] : receiver
1835 // ----------------------------------- 1801 // -----------------------------------
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
1957 } 1923 }
1958 1924
1959 // 4c. The receiver is not callable, throw an appropriate TypeError. 1925 // 4c. The receiver is not callable, throw an appropriate TypeError.
1960 __ Bind(&receiver_not_callable); 1926 __ Bind(&receiver_not_callable);
1961 { 1927 {
1962 __ Poke(receiver, 0); 1928 __ Poke(receiver, 0);
1963 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1929 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1964 } 1930 }
1965 } 1931 }
1966 1932
1967
1968 // static 1933 // static
1969 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1934 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1970 Register argc = x0; 1935 Register argc = x0;
1971 Register function = x1; 1936 Register function = x1;
1972 Register scratch1 = x10; 1937 Register scratch1 = x10;
1973 Register scratch2 = x11; 1938 Register scratch2 = x11;
1974 1939
1975 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall"); 1940 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1976 1941
1977 // 1. Make sure we have at least one argument. 1942 // 1. Make sure we have at least one argument.
(...skipping 26 matching lines...) Expand all
2004 // Adjust the actual number of arguments and remove the top element 1969 // Adjust the actual number of arguments and remove the top element
2005 // (which is a copy of the last argument). 1970 // (which is a copy of the last argument).
2006 __ Sub(argc, argc, 1); 1971 __ Sub(argc, argc, 1);
2007 __ Drop(1); 1972 __ Drop(1);
2008 } 1973 }
2009 1974
2010 // 4. Call the callable. 1975 // 4. Call the callable.
2011 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1976 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2012 } 1977 }
2013 1978
2014
2015 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1979 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2016 // ----------- S t a t e ------------- 1980 // ----------- S t a t e -------------
2017 // -- x0 : argc 1981 // -- x0 : argc
2018 // -- jssp[0] : argumentsList (if argc == 3) 1982 // -- jssp[0] : argumentsList (if argc == 3)
2019 // -- jssp[8] : thisArgument (if argc >= 2) 1983 // -- jssp[8] : thisArgument (if argc >= 2)
2020 // -- jssp[16] : target (if argc >= 1) 1984 // -- jssp[16] : target (if argc >= 1)
2021 // -- jssp[24] : receiver 1985 // -- jssp[24] : receiver
2022 // ----------------------------------- 1986 // -----------------------------------
2023 ASM_LOCATION("Builtins::Generate_ReflectApply"); 1987 ASM_LOCATION("Builtins::Generate_ReflectApply");
2024 1988
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2075 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 2039 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2076 2040
2077 // 3b. The target is not callable, throw an appropriate TypeError. 2041 // 3b. The target is not callable, throw an appropriate TypeError.
2078 __ Bind(&target_not_callable); 2042 __ Bind(&target_not_callable);
2079 { 2043 {
2080 __ Poke(target, 0); 2044 __ Poke(target, 0);
2081 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 2045 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2082 } 2046 }
2083 } 2047 }
2084 2048
2085
2086 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 2049 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2087 // ----------- S t a t e ------------- 2050 // ----------- S t a t e -------------
2088 // -- x0 : argc 2051 // -- x0 : argc
2089 // -- jssp[0] : new.target (optional) 2052 // -- jssp[0] : new.target (optional)
2090 // -- jssp[8] : argumentsList 2053 // -- jssp[8] : argumentsList
2091 // -- jssp[16] : target 2054 // -- jssp[16] : target
2092 // -- jssp[24] : receiver 2055 // -- jssp[24] : receiver
2093 // ----------------------------------- 2056 // -----------------------------------
2094 ASM_LOCATION("Builtins::Generate_ReflectConstruct"); 2057 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
2095 2058
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2162 } 2125 }
2163 2126
2164 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 2127 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2165 __ Bind(&new_target_not_constructor); 2128 __ Bind(&new_target_not_constructor);
2166 { 2129 {
2167 __ Poke(new_target, 0); 2130 __ Poke(new_target, 0);
2168 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 2131 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2169 } 2132 }
2170 } 2133 }
2171 2134
2172
2173 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 2135 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2174 Label* stack_overflow) { 2136 Label* stack_overflow) {
2175 // ----------- S t a t e ------------- 2137 // ----------- S t a t e -------------
2176 // -- x0 : actual number of arguments 2138 // -- x0 : actual number of arguments
2177 // -- x1 : function (passed through to callee) 2139 // -- x1 : function (passed through to callee)
2178 // -- x2 : expected number of arguments 2140 // -- x2 : expected number of arguments
2179 // -- x3 : new target (passed through to callee) 2141 // -- x3 : new target (passed through to callee)
2180 // ----------------------------------- 2142 // -----------------------------------
2181 // Check the stack for overflow. 2143 // Check the stack for overflow.
2182 // We are not trying to catch interruptions (e.g. debug break and 2144 // We are not trying to catch interruptions (e.g. debug break and
2183 // preemption) here, so the "real stack limit" is checked. 2145 // preemption) here, so the "real stack limit" is checked.
2184 Label enough_stack_space; 2146 Label enough_stack_space;
2185 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); 2147 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2186 // Make x10 the space we have left. The stack might already be overflowed 2148 // Make x10 the space we have left. The stack might already be overflowed
2187 // here which will cause x10 to become negative. 2149 // here which will cause x10 to become negative.
2188 __ Sub(x10, jssp, x10); 2150 __ Sub(x10, jssp, x10);
2189 // Check if the arguments will overflow the stack. 2151 // Check if the arguments will overflow the stack.
2190 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2)); 2152 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
2191 __ B(le, stack_overflow); 2153 __ B(le, stack_overflow);
2192 } 2154 }
2193 2155
2194
2195 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 2156 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2196 __ SmiTag(x10, x0); 2157 __ SmiTag(x10, x0);
2197 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2158 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2198 __ Push(lr, fp); 2159 __ Push(lr, fp);
2199 __ Push(x11, x1, x10); 2160 __ Push(x11, x1, x10);
2200 __ Add(fp, jssp, 2161 __ Add(fp, jssp,
2201 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); 2162 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
2202 } 2163 }
2203 2164
2204
2205 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 2165 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2206 // ----------- S t a t e ------------- 2166 // ----------- S t a t e -------------
2207 // -- x0 : result being passed through 2167 // -- x0 : result being passed through
2208 // ----------------------------------- 2168 // -----------------------------------
2209 // Get the number of arguments passed (as a smi), tear down the frame and 2169 // Get the number of arguments passed (as a smi), tear down the frame and
2210 // then drop the parameters and the receiver. 2170 // then drop the parameters and the receiver.
2211 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + 2171 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2212 kPointerSize))); 2172 kPointerSize)));
2213 __ Mov(jssp, fp); 2173 __ Mov(jssp, fp);
2214 __ Pop(fp, lr); 2174 __ Pop(fp, lr);
2215 __ DropBySMI(x10, kXRegSize); 2175 __ DropBySMI(x10, kXRegSize);
2216 __ Drop(1); 2176 __ Drop(1);
2217 } 2177 }
2218 2178
2219
2220 // static 2179 // static
2221 void Builtins::Generate_Apply(MacroAssembler* masm) { 2180 void Builtins::Generate_Apply(MacroAssembler* masm) {
2222 // ----------- S t a t e ------------- 2181 // ----------- S t a t e -------------
2223 // -- x0 : argumentsList 2182 // -- x0 : argumentsList
2224 // -- x1 : target 2183 // -- x1 : target
2225 // -- x3 : new.target (checked to be constructor or undefined) 2184 // -- x3 : new.target (checked to be constructor or undefined)
2226 // -- jssp[0] : thisArgument 2185 // -- jssp[0] : thisArgument
2227 // ----------------------------------- 2186 // -----------------------------------
2228 2187
2229 Register arguments_list = x0; 2188 Register arguments_list = x0;
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
2550 2509
2551 // The function is a "classConstructor", need to raise an exception. 2510 // The function is a "classConstructor", need to raise an exception.
2552 __ bind(&class_constructor); 2511 __ bind(&class_constructor);
2553 { 2512 {
2554 FrameScope frame(masm, StackFrame::INTERNAL); 2513 FrameScope frame(masm, StackFrame::INTERNAL);
2555 __ Push(x1); 2514 __ Push(x1);
2556 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); 2515 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2557 } 2516 }
2558 } 2517 }
2559 2518
2560
2561 namespace { 2519 namespace {
2562 2520
2563 void Generate_PushBoundArguments(MacroAssembler* masm) { 2521 void Generate_PushBoundArguments(MacroAssembler* masm) {
2564 // ----------- S t a t e ------------- 2522 // ----------- S t a t e -------------
2565 // -- x0 : the number of arguments (not including the receiver) 2523 // -- x0 : the number of arguments (not including the receiver)
2566 // -- x1 : target (checked to be a JSBoundFunction) 2524 // -- x1 : target (checked to be a JSBoundFunction)
2567 // -- x3 : new.target (only in case of [[Construct]]) 2525 // -- x3 : new.target (only in case of [[Construct]])
2568 // ----------------------------------- 2526 // -----------------------------------
2569 2527
2570 // Load [[BoundArguments]] into x2 and length of that into x4. 2528 // Load [[BoundArguments]] into x2 and length of that into x4.
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
2628 __ Add(x0, x0, 1); 2586 __ Add(x0, x0, 1);
2629 __ Cmp(x4, 0); 2587 __ Cmp(x4, 0);
2630 __ B(gt, &loop); 2588 __ B(gt, &loop);
2631 } 2589 }
2632 } 2590 }
2633 __ Bind(&no_bound_arguments); 2591 __ Bind(&no_bound_arguments);
2634 } 2592 }
2635 2593
2636 } // namespace 2594 } // namespace
2637 2595
2638
2639 // static 2596 // static
2640 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, 2597 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2641 TailCallMode tail_call_mode) { 2598 TailCallMode tail_call_mode) {
2642 // ----------- S t a t e ------------- 2599 // ----------- S t a t e -------------
2643 // -- x0 : the number of arguments (not including the receiver) 2600 // -- x0 : the number of arguments (not including the receiver)
2644 // -- x1 : the function to call (checked to be a JSBoundFunction) 2601 // -- x1 : the function to call (checked to be a JSBoundFunction)
2645 // ----------------------------------- 2602 // -----------------------------------
2646 __ AssertBoundFunction(x1); 2603 __ AssertBoundFunction(x1);
2647 2604
2648 if (tail_call_mode == TailCallMode::kAllow) { 2605 if (tail_call_mode == TailCallMode::kAllow) {
2649 PrepareForTailCall(masm, x0, x3, x4, x5); 2606 PrepareForTailCall(masm, x0, x3, x4, x5);
2650 } 2607 }
2651 2608
2652 // Patch the receiver to [[BoundThis]]. 2609 // Patch the receiver to [[BoundThis]].
2653 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset)); 2610 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2654 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2)); 2611 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2655 2612
2656 // Push the [[BoundArguments]] onto the stack. 2613 // Push the [[BoundArguments]] onto the stack.
2657 Generate_PushBoundArguments(masm); 2614 Generate_PushBoundArguments(masm);
2658 2615
2659 // Call the [[BoundTargetFunction]] via the Call builtin. 2616 // Call the [[BoundTargetFunction]] via the Call builtin.
2660 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); 2617 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2661 __ Mov(x10, 2618 __ Mov(x10,
2662 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate())); 2619 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2663 __ Ldr(x11, MemOperand(x10)); 2620 __ Ldr(x11, MemOperand(x10));
2664 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); 2621 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2665 __ Br(x12); 2622 __ Br(x12);
2666 } 2623 }
2667 2624
2668
2669 // static 2625 // static
2670 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2626 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2671 TailCallMode tail_call_mode) { 2627 TailCallMode tail_call_mode) {
2672 // ----------- S t a t e ------------- 2628 // ----------- S t a t e -------------
2673 // -- x0 : the number of arguments (not including the receiver) 2629 // -- x0 : the number of arguments (not including the receiver)
2674 // -- x1 : the target to call (can be any Object). 2630 // -- x1 : the target to call (can be any Object).
2675 // ----------------------------------- 2631 // -----------------------------------
2676 2632
2677 Label non_callable, non_function, non_smi; 2633 Label non_callable, non_function, non_smi;
2678 __ JumpIfSmi(x1, &non_callable); 2634 __ JumpIfSmi(x1, &non_callable);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2718 2674
2719 // 3. Call to something that is not callable. 2675 // 3. Call to something that is not callable.
2720 __ bind(&non_callable); 2676 __ bind(&non_callable);
2721 { 2677 {
2722 FrameScope scope(masm, StackFrame::INTERNAL); 2678 FrameScope scope(masm, StackFrame::INTERNAL);
2723 __ Push(x1); 2679 __ Push(x1);
2724 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2680 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2725 } 2681 }
2726 } 2682 }
2727 2683
2728
2729 // static 2684 // static
2730 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2685 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2731 // ----------- S t a t e ------------- 2686 // ----------- S t a t e -------------
2732 // -- x0 : the number of arguments (not including the receiver) 2687 // -- x0 : the number of arguments (not including the receiver)
2733 // -- x1 : the constructor to call (checked to be a JSFunction) 2688 // -- x1 : the constructor to call (checked to be a JSFunction)
2734 // -- x3 : the new target (checked to be a constructor) 2689 // -- x3 : the new target (checked to be a constructor)
2735 // ----------------------------------- 2690 // -----------------------------------
2736 __ AssertFunction(x1); 2691 __ AssertFunction(x1);
2737 2692
2738 // Calling convention for function specific ConstructStubs require 2693 // Calling convention for function specific ConstructStubs require
2739 // x2 to contain either an AllocationSite or undefined. 2694 // x2 to contain either an AllocationSite or undefined.
2740 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); 2695 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2741 2696
2742 // Tail call to the function-specific construct stub (still in the caller 2697 // Tail call to the function-specific construct stub (still in the caller
2743 // context at this point). 2698 // context at this point).
2744 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 2699 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2745 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); 2700 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2746 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); 2701 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2747 __ Br(x4); 2702 __ Br(x4);
2748 } 2703 }
2749 2704
2750
2751 // static 2705 // static
2752 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2706 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2753 // ----------- S t a t e ------------- 2707 // ----------- S t a t e -------------
2754 // -- x0 : the number of arguments (not including the receiver) 2708 // -- x0 : the number of arguments (not including the receiver)
2755 // -- x1 : the function to call (checked to be a JSBoundFunction) 2709 // -- x1 : the function to call (checked to be a JSBoundFunction)
2756 // -- x3 : the new target (checked to be a constructor) 2710 // -- x3 : the new target (checked to be a constructor)
2757 // ----------------------------------- 2711 // -----------------------------------
2758 __ AssertBoundFunction(x1); 2712 __ AssertBoundFunction(x1);
2759 2713
2760 // Push the [[BoundArguments]] onto the stack. 2714 // Push the [[BoundArguments]] onto the stack.
(...skipping 10 matching lines...) Expand all
2771 } 2725 }
2772 2726
2773 // Construct the [[BoundTargetFunction]] via the Construct builtin. 2727 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2774 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); 2728 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2775 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate())); 2729 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2776 __ Ldr(x11, MemOperand(x10)); 2730 __ Ldr(x11, MemOperand(x10));
2777 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); 2731 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2778 __ Br(x12); 2732 __ Br(x12);
2779 } 2733 }
2780 2734
2781
2782 // static 2735 // static
2783 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 2736 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2784 // ----------- S t a t e ------------- 2737 // ----------- S t a t e -------------
2785 // -- x0 : the number of arguments (not including the receiver) 2738 // -- x0 : the number of arguments (not including the receiver)
2786 // -- x1 : the constructor to call (checked to be a JSProxy) 2739 // -- x1 : the constructor to call (checked to be a JSProxy)
2787 // -- x3 : the new target (either the same as the constructor or 2740 // -- x3 : the new target (either the same as the constructor or
2788 // the JSFunction on which new was invoked initially) 2741 // the JSFunction on which new was invoked initially)
2789 // ----------------------------------- 2742 // -----------------------------------
2790 2743
2791 // Call into the Runtime for Proxy [[Construct]]. 2744 // Call into the Runtime for Proxy [[Construct]].
2792 __ Push(x1); 2745 __ Push(x1);
2793 __ Push(x3); 2746 __ Push(x3);
2794 // Include the pushed new_target, constructor and the receiver. 2747 // Include the pushed new_target, constructor and the receiver.
2795 __ Add(x0, x0, 3); 2748 __ Add(x0, x0, 3);
2796 // Tail-call to the runtime. 2749 // Tail-call to the runtime.
2797 __ JumpToExternalReference( 2750 __ JumpToExternalReference(
2798 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); 2751 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2799 } 2752 }
2800 2753
2801
2802 // static 2754 // static
2803 void Builtins::Generate_Construct(MacroAssembler* masm) { 2755 void Builtins::Generate_Construct(MacroAssembler* masm) {
2804 // ----------- S t a t e ------------- 2756 // ----------- S t a t e -------------
2805 // -- x0 : the number of arguments (not including the receiver) 2757 // -- x0 : the number of arguments (not including the receiver)
2806 // -- x1 : the constructor to call (can be any Object) 2758 // -- x1 : the constructor to call (can be any Object)
2807 // -- x3 : the new target (either the same as the constructor or 2759 // -- x3 : the new target (either the same as the constructor or
2808 // the JSFunction on which new was invoked initially) 2760 // the JSFunction on which new was invoked initially)
2809 // ----------------------------------- 2761 // -----------------------------------
2810 2762
2811 // Check if target is a Smi. 2763 // Check if target is a Smi.
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
2954 2906
2955 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 2907 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2956 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); 2908 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2957 // ----------- S t a t e ------------- 2909 // ----------- S t a t e -------------
2958 // -- x0 : actual number of arguments 2910 // -- x0 : actual number of arguments
2959 // -- x1 : function (passed through to callee) 2911 // -- x1 : function (passed through to callee)
2960 // -- x2 : expected number of arguments 2912 // -- x2 : expected number of arguments
2961 // -- x3 : new target (passed through to callee) 2913 // -- x3 : new target (passed through to callee)
2962 // ----------------------------------- 2914 // -----------------------------------
2963 2915
2964 Register argc_actual = x0; // Excluding the receiver. 2916 Register argc_actual = x0; // Excluding the receiver.
2965 Register argc_expected = x2; // Excluding the receiver. 2917 Register argc_expected = x2; // Excluding the receiver.
2966 Register function = x1; 2918 Register function = x1;
2967 Register code_entry = x10; 2919 Register code_entry = x10;
2968 2920
2969 Label invoke, dont_adapt_arguments, stack_overflow; 2921 Label invoke, dont_adapt_arguments, stack_overflow;
2970 2922
2971 Label enough, too_few; 2923 Label enough, too_few;
2972 __ Cmp(argc_actual, argc_expected); 2924 __ Cmp(argc_actual, argc_expected);
2973 __ B(lt, &too_few); 2925 __ B(lt, &too_few);
2974 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel); 2926 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
(...skipping 20 matching lines...) Expand all
2995 // Claim space for the arguments, the receiver, and one extra slot. 2947 // Claim space for the arguments, the receiver, and one extra slot.
2996 // The extra slot ensures we do not write under jssp. It will be popped 2948 // The extra slot ensures we do not write under jssp. It will be popped
2997 // later. 2949 // later.
2998 __ Add(scratch1, scratch2, 2 * kPointerSize); 2950 __ Add(scratch1, scratch2, 2 * kPointerSize);
2999 __ Claim(scratch1, 1); 2951 __ Claim(scratch1, 1);
3000 2952
3001 // Copy the arguments (including the receiver) to the new stack frame. 2953 // Copy the arguments (including the receiver) to the new stack frame.
3002 Label copy_2_by_2; 2954 Label copy_2_by_2;
3003 __ Bind(&copy_2_by_2); 2955 __ Bind(&copy_2_by_2);
3004 __ Ldp(scratch1, scratch2, 2956 __ Ldp(scratch1, scratch2,
3005 MemOperand(copy_start, - 2 * kPointerSize, PreIndex)); 2957 MemOperand(copy_start, -2 * kPointerSize, PreIndex));
3006 __ Stp(scratch1, scratch2, 2958 __ Stp(scratch1, scratch2,
3007 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); 2959 MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3008 __ Cmp(copy_start, copy_end); 2960 __ Cmp(copy_start, copy_end);
3009 __ B(hi, &copy_2_by_2); 2961 __ B(hi, &copy_2_by_2);
3010 2962
3011 // Correct the space allocated for the extra slot. 2963 // Correct the space allocated for the extra slot.
3012 __ Drop(1); 2964 __ Drop(1);
3013 2965
3014 __ B(&invoke); 2966 __ B(&invoke);
3015 } 2967 }
3016 2968
3017 { // Too few parameters: Actual < expected 2969 { // Too few parameters: Actual < expected
3018 __ Bind(&too_few); 2970 __ Bind(&too_few);
3019 2971
3020 Register copy_from = x10; 2972 Register copy_from = x10;
3021 Register copy_end = x11; 2973 Register copy_end = x11;
3022 Register copy_to = x12; 2974 Register copy_to = x12;
3023 Register scratch1 = x13, scratch2 = x14; 2975 Register scratch1 = x13, scratch2 = x14;
3024 2976
3025 EnterArgumentsAdaptorFrame(masm); 2977 EnterArgumentsAdaptorFrame(masm);
3026 ArgumentAdaptorStackCheck(masm, &stack_overflow); 2978 ArgumentAdaptorStackCheck(masm, &stack_overflow);
3027 2979
3028 __ Lsl(scratch2, argc_expected, kPointerSizeLog2); 2980 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
3029 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2); 2981 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
3030 2982
3031 // Adjust for fp, lr, and the receiver. 2983 // Adjust for fp, lr, and the receiver.
3032 __ Add(copy_from, fp, 3 * kPointerSize); 2984 __ Add(copy_from, fp, 3 * kPointerSize);
3033 __ Add(copy_from, copy_from, argc_actual); 2985 __ Add(copy_from, copy_from, argc_actual);
3034 __ Mov(copy_to, jssp); 2986 __ Mov(copy_to, jssp);
3035 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. 2987 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
3036 __ Sub(copy_end, copy_end, argc_actual); 2988 __ Sub(copy_end, copy_end, argc_actual);
3037 2989
3038 // Claim space for the arguments, the receiver, and one extra slot. 2990 // Claim space for the arguments, the receiver, and one extra slot.
3039 // The extra slot ensures we do not write under jssp. It will be popped 2991 // The extra slot ensures we do not write under jssp. It will be popped
3040 // later. 2992 // later.
3041 __ Add(scratch1, scratch2, 2 * kPointerSize); 2993 __ Add(scratch1, scratch2, 2 * kPointerSize);
3042 __ Claim(scratch1, 1); 2994 __ Claim(scratch1, 1);
3043 2995
3044 // Copy the arguments (including the receiver) to the new stack frame. 2996 // Copy the arguments (including the receiver) to the new stack frame.
3045 Label copy_2_by_2; 2997 Label copy_2_by_2;
3046 __ Bind(&copy_2_by_2); 2998 __ Bind(&copy_2_by_2);
3047 __ Ldp(scratch1, scratch2, 2999 __ Ldp(scratch1, scratch2,
3048 MemOperand(copy_from, - 2 * kPointerSize, PreIndex)); 3000 MemOperand(copy_from, -2 * kPointerSize, PreIndex));
3049 __ Stp(scratch1, scratch2, 3001 __ Stp(scratch1, scratch2,
3050 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); 3002 MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3051 __ Cmp(copy_to, copy_end); 3003 __ Cmp(copy_to, copy_end);
3052 __ B(hi, &copy_2_by_2); 3004 __ B(hi, &copy_2_by_2);
3053 3005
3054 __ Mov(copy_to, copy_end); 3006 __ Mov(copy_to, copy_end);
3055 3007
3056 // Fill the remaining expected arguments with undefined. 3008 // Fill the remaining expected arguments with undefined.
3057 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); 3009 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
3058 __ Add(copy_end, jssp, kPointerSize); 3010 __ Add(copy_end, jssp, kPointerSize);
3059 3011
3060 Label fill; 3012 Label fill;
3061 __ Bind(&fill); 3013 __ Bind(&fill);
3062 __ Stp(scratch1, scratch1, 3014 __ Stp(scratch1, scratch1,
3063 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); 3015 MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3064 __ Cmp(copy_to, copy_end); 3016 __ Cmp(copy_to, copy_end);
3065 __ B(hi, &fill); 3017 __ B(hi, &fill);
3066 3018
3067 // Correct the space allocated for the extra slot. 3019 // Correct the space allocated for the extra slot.
3068 __ Drop(1); 3020 __ Drop(1);
3069 } 3021 }
3070 3022
3071 // Arguments have been adapted. Now call the entry point. 3023 // Arguments have been adapted. Now call the entry point.
3072 __ Bind(&invoke); 3024 __ Bind(&invoke);
3073 __ Mov(argc_actual, argc_expected); 3025 __ Mov(argc_actual, argc_expected);
(...skipping 16 matching lines...) Expand all
3090 __ Jump(code_entry); 3042 __ Jump(code_entry);
3091 3043
3092 __ Bind(&stack_overflow); 3044 __ Bind(&stack_overflow);
3093 { 3045 {
3094 FrameScope frame(masm, StackFrame::MANUAL); 3046 FrameScope frame(masm, StackFrame::MANUAL);
3095 __ CallRuntime(Runtime::kThrowStackOverflow); 3047 __ CallRuntime(Runtime::kThrowStackOverflow);
3096 __ Unreachable(); 3048 __ Unreachable();
3097 } 3049 }
3098 } 3050 }
3099 3051
3100
3101 #undef __ 3052 #undef __
3102 3053
3103 } // namespace internal 3054 } // namespace internal
3104 } // namespace v8 3055 } // namespace v8
3105 3056
3106 #endif // V8_TARGET_ARCH_ARM 3057 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/builtins/arm/builtins-arm.cc ('k') | src/builtins/builtins.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698