Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(488)

Side by Side Diff: src/builtins/arm/builtins-arm.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/builtins.cc ('k') | src/builtins/arm64/builtins-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM 5 #if V8_TARGET_ARCH_ARM
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h" 11 #include "src/runtime/runtime.h"
12 12
13 namespace v8 { 13 namespace v8 {
14 namespace internal { 14 namespace internal {
15 15
16
17 #define __ ACCESS_MASM(masm) 16 #define __ ACCESS_MASM(masm)
18 17
19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
20 ExitFrameType exit_frame_type) { 19 ExitFrameType exit_frame_type) {
21 // ----------- S t a t e ------------- 20 // ----------- S t a t e -------------
22 // -- r0 : number of arguments excluding receiver 21 // -- r0 : number of arguments excluding receiver
23 // -- r1 : target 22 // -- r1 : target
24 // -- r3 : new.target 23 // -- r3 : new.target
25 // -- sp[0] : last argument 24 // -- sp[0] : last argument
26 // -- ... 25 // -- ...
(...skipping 15 matching lines...) Expand all
42 41
43 // Insert extra arguments. 42 // Insert extra arguments.
44 __ SmiTag(r0); 43 __ SmiTag(r0);
45 __ Push(r0, r1, r3); 44 __ Push(r0, r1, r3);
46 __ SmiUntag(r0); 45 __ SmiUntag(r0);
47 46
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
49 exit_frame_type == BUILTIN_EXIT); 48 exit_frame_type == BUILTIN_EXIT);
50 } 49 }
51 50
52
53 // Load the built-in InternalArray function from the current context. 51 // Load the built-in InternalArray function from the current context.
54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, 52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
55 Register result) { 53 Register result) {
56 // Load the InternalArray function from the current native context. 54 // Load the InternalArray function from the current native context.
57 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
58 } 56 }
59 57
60
61 // Load the built-in Array function from the current context. 58 // Load the built-in Array function from the current context.
62 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { 59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
63 // Load the Array function from the current native context. 60 // Load the Array function from the current native context.
64 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
65 } 62 }
66 63
67
68 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
69 // ----------- S t a t e ------------- 65 // ----------- S t a t e -------------
70 // -- r0 : number of arguments 66 // -- r0 : number of arguments
71 // -- lr : return address 67 // -- lr : return address
72 // -- sp[...]: constructor arguments 68 // -- sp[...]: constructor arguments
73 // ----------------------------------- 69 // -----------------------------------
74 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
75 71
76 // Get the InternalArray function. 72 // Get the InternalArray function.
77 GenerateLoadInternalArrayFunction(masm, r1); 73 GenerateLoadInternalArrayFunction(masm, r1);
78 74
79 if (FLAG_debug_code) { 75 if (FLAG_debug_code) {
80 // Initial map for the builtin InternalArray functions should be maps. 76 // Initial map for the builtin InternalArray functions should be maps.
81 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 77 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
82 __ SmiTst(r2); 78 __ SmiTst(r2);
83 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); 79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
84 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 80 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
85 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); 81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
86 } 82 }
87 83
88 // Run the native code for the InternalArray function called as a normal 84 // Run the native code for the InternalArray function called as a normal
89 // function. 85 // function.
90 // tail call a stub 86 // tail call a stub
91 InternalArrayConstructorStub stub(masm->isolate()); 87 InternalArrayConstructorStub stub(masm->isolate());
92 __ TailCallStub(&stub); 88 __ TailCallStub(&stub);
93 } 89 }
94 90
95
96 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
97 // ----------- S t a t e ------------- 92 // ----------- S t a t e -------------
98 // -- r0 : number of arguments 93 // -- r0 : number of arguments
99 // -- lr : return address 94 // -- lr : return address
100 // -- sp[...]: constructor arguments 95 // -- sp[...]: constructor arguments
101 // ----------------------------------- 96 // -----------------------------------
102 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 97 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
103 98
104 // Get the Array function. 99 // Get the Array function.
105 GenerateLoadArrayFunction(masm, r1); 100 GenerateLoadArrayFunction(masm, r1);
106 101
107 if (FLAG_debug_code) { 102 if (FLAG_debug_code) {
108 // Initial map for the builtin Array functions should be maps. 103 // Initial map for the builtin Array functions should be maps.
109 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 104 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
110 __ SmiTst(r2); 105 __ SmiTst(r2);
111 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); 106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
112 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 107 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
113 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); 108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
114 } 109 }
115 110
116 __ mov(r3, r1); 111 __ mov(r3, r1);
117 // Run the native code for the Array function called as a normal function. 112 // Run the native code for the Array function called as a normal function.
118 // tail call a stub 113 // tail call a stub
119 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 114 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
120 ArrayConstructorStub stub(masm->isolate()); 115 ArrayConstructorStub stub(masm->isolate());
121 __ TailCallStub(&stub); 116 __ TailCallStub(&stub);
122 } 117 }
123 118
124
125 // static 119 // static
126 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { 120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
127 // ----------- S t a t e ------------- 121 // ----------- S t a t e -------------
128 // -- r0 : number of arguments 122 // -- r0 : number of arguments
129 // -- r1 : function 123 // -- r1 : function
130 // -- cp : context 124 // -- cp : context
131 // -- lr : return address 125 // -- lr : return address
132 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 126 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
133 // -- sp[argc * 4] : receiver 127 // -- sp[argc * 4] : receiver
134 // ----------------------------------- 128 // -----------------------------------
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
263 __ Drop(r2); 257 __ Drop(r2);
264 __ Ret(1); 258 __ Ret(1);
265 } 259 }
266 260
267 // 2b. No arguments, return +0. 261 // 2b. No arguments, return +0.
268 __ bind(&no_arguments); 262 __ bind(&no_arguments);
269 __ Move(r0, Smi::FromInt(0)); 263 __ Move(r0, Smi::FromInt(0));
270 __ Ret(1); 264 __ Ret(1);
271 } 265 }
272 266
273
274 // static 267 // static
275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { 268 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276 // ----------- S t a t e ------------- 269 // ----------- S t a t e -------------
277 // -- r0 : number of arguments 270 // -- r0 : number of arguments
278 // -- r1 : constructor function 271 // -- r1 : constructor function
279 // -- r3 : new target 272 // -- r3 : new target
280 // -- cp : context 273 // -- cp : context
281 // -- lr : return address 274 // -- lr : return address
282 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 275 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
283 // -- sp[argc * 4] : receiver 276 // -- sp[argc * 4] : receiver
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
344 } 337 }
345 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset)); 338 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
346 339
347 __ bind(&drop_frame_and_ret); 340 __ bind(&drop_frame_and_ret);
348 { 341 {
349 __ Drop(r6); 342 __ Drop(r6);
350 __ Ret(1); 343 __ Ret(1);
351 } 344 }
352 } 345 }
353 346
354
355 // static 347 // static
356 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 348 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
357 // ----------- S t a t e ------------- 349 // ----------- S t a t e -------------
358 // -- r0 : number of arguments 350 // -- r0 : number of arguments
359 // -- r1 : constructor function 351 // -- r1 : constructor function
360 // -- cp : context 352 // -- cp : context
361 // -- lr : return address 353 // -- lr : return address
362 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 354 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
363 // -- sp[argc * 4] : receiver 355 // -- sp[argc * 4] : receiver
364 // ----------------------------------- 356 // -----------------------------------
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
413 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); 405 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
414 } 406 }
415 407
416 __ bind(&drop_frame_and_ret); 408 __ bind(&drop_frame_and_ret);
417 { 409 {
418 __ Drop(r2); 410 __ Drop(r2);
419 __ Ret(1); 411 __ Ret(1);
420 } 412 }
421 } 413 }
422 414
423
424 // static 415 // static
425 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 416 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
426 // ----------- S t a t e ------------- 417 // ----------- S t a t e -------------
427 // -- r0 : number of arguments 418 // -- r0 : number of arguments
428 // -- r1 : constructor function 419 // -- r1 : constructor function
429 // -- r3 : new target 420 // -- r3 : new target
430 // -- cp : context 421 // -- cp : context
431 // -- lr : return address 422 // -- lr : return address
432 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 423 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
433 // -- sp[argc * 4] : receiver 424 // -- sp[argc * 4] : receiver
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
496 } 487 }
497 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset)); 488 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
498 489
499 __ bind(&drop_frame_and_ret); 490 __ bind(&drop_frame_and_ret);
500 { 491 {
501 __ Drop(r6); 492 __ Drop(r6);
502 __ Ret(1); 493 __ Ret(1);
503 } 494 }
504 } 495 }
505 496
506
507 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 497 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
508 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 498 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
509 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); 499 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
510 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); 500 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
511 __ Jump(r2); 501 __ Jump(r2);
512 } 502 }
513 503
514 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, 504 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
515 Runtime::FunctionId function_id) { 505 Runtime::FunctionId function_id) {
516 // ----------- S t a t e ------------- 506 // ----------- S t a t e -------------
(...skipping 18 matching lines...) Expand all
535 // Restore target function and new target. 525 // Restore target function and new target.
536 __ pop(r3); 526 __ pop(r3);
537 __ pop(r1); 527 __ pop(r1);
538 __ pop(r0); 528 __ pop(r0);
539 __ SmiUntag(r0, r0); 529 __ SmiUntag(r0, r0);
540 } 530 }
541 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); 531 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
542 __ Jump(r2); 532 __ Jump(r2);
543 } 533 }
544 534
545
546 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 535 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
547 // Checking whether the queued function is ready for install is optional, 536 // Checking whether the queued function is ready for install is optional,
548 // since we come across interrupts and stack checks elsewhere. However, 537 // since we come across interrupts and stack checks elsewhere. However,
549 // not checking may delay installing ready functions, and always checking 538 // not checking may delay installing ready functions, and always checking
550 // would be quite expensive. A good compromise is to first check against 539 // would be quite expensive. A good compromise is to first check against
551 // stack limit as a cue for an interrupt signal. 540 // stack limit as a cue for an interrupt signal.
552 Label ok; 541 Label ok;
553 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 542 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
554 __ cmp(sp, Operand(ip)); 543 __ cmp(sp, Operand(ip));
555 __ b(hs, &ok); 544 __ b(hs, &ok);
556 545
557 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); 546 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
558 547
559 __ bind(&ok); 548 __ bind(&ok);
560 GenerateTailCallToSharedCode(masm); 549 GenerateTailCallToSharedCode(masm);
561 } 550 }
562 551
563
564 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 552 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
565 bool is_api_function, 553 bool is_api_function,
566 bool create_implicit_receiver, 554 bool create_implicit_receiver,
567 bool check_derived_construct) { 555 bool check_derived_construct) {
568 // ----------- S t a t e ------------- 556 // ----------- S t a t e -------------
569 // -- r0 : number of arguments 557 // -- r0 : number of arguments
570 // -- r1 : constructor function 558 // -- r1 : constructor function
571 // -- r2 : allocation site or undefined 559 // -- r2 : allocation site or undefined
572 // -- r3 : new target 560 // -- r3 : new target
573 // -- cp : context 561 // -- cp : context
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
708 } 696 }
709 697
710 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); 698 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
711 __ add(sp, sp, Operand(kPointerSize)); 699 __ add(sp, sp, Operand(kPointerSize));
712 if (create_implicit_receiver) { 700 if (create_implicit_receiver) {
713 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); 701 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
714 } 702 }
715 __ Jump(lr); 703 __ Jump(lr);
716 } 704 }
717 705
718
719 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 706 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
720 Generate_JSConstructStubHelper(masm, false, true, false); 707 Generate_JSConstructStubHelper(masm, false, true, false);
721 } 708 }
722 709
723
724 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 710 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
725 Generate_JSConstructStubHelper(masm, true, false, false); 711 Generate_JSConstructStubHelper(masm, true, false, false);
726 } 712 }
727 713
728
729 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 714 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
730 Generate_JSConstructStubHelper(masm, false, false, false); 715 Generate_JSConstructStubHelper(masm, false, false, false);
731 } 716 }
732 717
733
734 void Builtins::Generate_JSBuiltinsConstructStubForDerived( 718 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
735 MacroAssembler* masm) { 719 MacroAssembler* masm) {
736 Generate_JSConstructStubHelper(masm, false, false, true); 720 Generate_JSConstructStubHelper(masm, false, false, true);
737 } 721 }
738 722
739 // static 723 // static
740 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { 724 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
741 // ----------- S t a t e ------------- 725 // ----------- S t a t e -------------
742 // -- r0 : the value to pass to the generator 726 // -- r0 : the value to pass to the generator
743 // -- r1 : the JSGeneratorObject to resume 727 // -- r1 : the JSGeneratorObject to resume
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
811 // Dispatch on the kind of generator object. 795 // Dispatch on the kind of generator object.
812 Label old_generator; 796 Label old_generator;
813 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 797 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
814 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset)); 798 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
815 __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE); 799 __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
816 __ b(ne, &old_generator); 800 __ b(ne, &old_generator);
817 801
818 // New-style (ignition/turbofan) generator object 802 // New-style (ignition/turbofan) generator object
819 { 803 {
820 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 804 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
821 __ ldr(r0, 805 __ ldr(r0, FieldMemOperand(
822 FieldMemOperand(r0, SharedFunctionInfo::kFormalParameterCountOffset)); 806 r0, SharedFunctionInfo::kFormalParameterCountOffset));
823 __ SmiUntag(r0); 807 __ SmiUntag(r0);
824 // We abuse new.target both to indicate that this is a resume call and to 808 // We abuse new.target both to indicate that this is a resume call and to
825 // pass in the generator object. In ordinary calls, new.target is always 809 // pass in the generator object. In ordinary calls, new.target is always
826 // undefined because generator functions are non-constructable. 810 // undefined because generator functions are non-constructable.
827 __ Move(r3, r1); 811 __ Move(r3, r1);
828 __ Move(r1, r4); 812 __ Move(r1, r4);
829 __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 813 __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
830 __ Jump(r5); 814 __ Jump(r5);
831 } 815 }
832 816
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
893 } 877 }
894 __ b(&stepping_prepared); 878 __ b(&stepping_prepared);
895 } 879 }
896 880
897 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 881 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
898 FrameScope scope(masm, StackFrame::INTERNAL); 882 FrameScope scope(masm, StackFrame::INTERNAL);
899 __ push(r1); 883 __ push(r1);
900 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 884 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
901 } 885 }
902 886
903
904 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; 887 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
905 888
906
907 // Clobbers r2; preserves all other registers. 889 // Clobbers r2; preserves all other registers.
908 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, 890 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
909 IsTagged argc_is_tagged) { 891 IsTagged argc_is_tagged) {
910 // Check the stack for overflow. We are not trying to catch 892 // Check the stack for overflow. We are not trying to catch
911 // interruptions (e.g. debug break and preemption) here, so the "real stack 893 // interruptions (e.g. debug break and preemption) here, so the "real stack
912 // limit" is checked. 894 // limit" is checked.
913 Label okay; 895 Label okay;
914 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 896 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
915 // Make r2 the space we have left. The stack might already be overflowed 897 // Make r2 the space we have left. The stack might already be overflowed
916 // here which will cause r2 to become negative. 898 // here which will cause r2 to become negative.
917 __ sub(r2, sp, r2); 899 __ sub(r2, sp, r2);
918 // Check if the arguments will overflow the stack. 900 // Check if the arguments will overflow the stack.
919 if (argc_is_tagged == kArgcIsSmiTagged) { 901 if (argc_is_tagged == kArgcIsSmiTagged) {
920 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc)); 902 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
921 } else { 903 } else {
922 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); 904 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
923 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2)); 905 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
924 } 906 }
925 __ b(gt, &okay); // Signed comparison. 907 __ b(gt, &okay); // Signed comparison.
926 908
927 // Out of stack space. 909 // Out of stack space.
928 __ CallRuntime(Runtime::kThrowStackOverflow); 910 __ CallRuntime(Runtime::kThrowStackOverflow);
929 911
930 __ bind(&okay); 912 __ bind(&okay);
931 } 913 }
932 914
933
934 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 915 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
935 bool is_construct) { 916 bool is_construct) {
936 // Called from Generate_JS_Entry 917 // Called from Generate_JS_Entry
937 // r0: new.target 918 // r0: new.target
938 // r1: function 919 // r1: function
939 // r2: receiver 920 // r2: receiver
940 // r3: argc 921 // r3: argc
941 // r4: argv 922 // r4: argv
942 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered 923 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
943 ProfileEntryHookStub::MaybeCallEntryHook(masm); 924 ProfileEntryHookStub::MaybeCallEntryHook(masm);
(...skipping 23 matching lines...) Expand all
967 // Copy arguments to the stack in a loop. 948 // Copy arguments to the stack in a loop.
968 // r1: function 949 // r1: function
969 // r3: argc 950 // r3: argc
970 // r4: argv, i.e. points to first arg 951 // r4: argv, i.e. points to first arg
971 Label loop, entry; 952 Label loop, entry;
972 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); 953 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
973 // r2 points past last arg. 954 // r2 points past last arg.
974 __ b(&entry); 955 __ b(&entry);
975 __ bind(&loop); 956 __ bind(&loop);
976 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter 957 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
977 __ ldr(r0, MemOperand(r0)); // dereference handle 958 __ ldr(r0, MemOperand(r0)); // dereference handle
978 __ push(r0); // push parameter 959 __ push(r0); // push parameter
979 __ bind(&entry); 960 __ bind(&entry);
980 __ cmp(r4, r2); 961 __ cmp(r4, r2);
981 __ b(ne, &loop); 962 __ b(ne, &loop);
982 963
983 // Setup new.target and argc. 964 // Setup new.target and argc.
984 __ mov(r0, Operand(r3)); 965 __ mov(r0, Operand(r3));
985 __ mov(r3, Operand(r5)); 966 __ mov(r3, Operand(r5));
986 967
987 // Initialize all JavaScript callee-saved registers, since they will be seen 968 // Initialize all JavaScript callee-saved registers, since they will be seen
988 // by the garbage collector as part of handlers. 969 // by the garbage collector as part of handlers.
(...skipping 15 matching lines...) Expand all
1004 985
1005 // Exit the JS frame and remove the parameters (except function), and 986 // Exit the JS frame and remove the parameters (except function), and
1006 // return. 987 // return.
1007 // Respect ABI stack constraint. 988 // Respect ABI stack constraint.
1008 } 989 }
1009 __ Jump(lr); 990 __ Jump(lr);
1010 991
1011 // r0: result 992 // r0: result
1012 } 993 }
1013 994
1014
1015 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 995 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1016 Generate_JSEntryTrampolineHelper(masm, false); 996 Generate_JSEntryTrampolineHelper(masm, false);
1017 } 997 }
1018 998
1019
1020 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 999 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1021 Generate_JSEntryTrampolineHelper(masm, true); 1000 Generate_JSEntryTrampolineHelper(masm, true);
1022 } 1001 }
1023 1002
1024 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { 1003 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1025 Register args_count = scratch; 1004 Register args_count = scratch;
1026 1005
1027 // Get the arguments + receiver count. 1006 // Get the arguments + receiver count.
1028 __ ldr(args_count, 1007 __ ldr(args_count,
1029 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 1008 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
1466 1445
1467 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { 1446 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1468 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); 1447 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1469 } 1448 }
1470 1449
1471 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1450 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1472 GenerateTailCallToReturnedCode(masm, 1451 GenerateTailCallToReturnedCode(masm,
1473 Runtime::kCompileOptimized_NotConcurrent); 1452 Runtime::kCompileOptimized_NotConcurrent);
1474 } 1453 }
1475 1454
1476
1477 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 1455 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1478 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); 1456 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1479 } 1457 }
1480 1458
1481 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { 1459 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1482 // ----------- S t a t e ------------- 1460 // ----------- S t a t e -------------
1483 // -- r0 : argument count (preserved for callee) 1461 // -- r0 : argument count (preserved for callee)
1484 // -- r1 : new target (preserved for callee) 1462 // -- r1 : new target (preserved for callee)
1485 // -- r3 : target function (preserved for callee) 1463 // -- r3 : target function (preserved for callee)
1486 // ----------------------------------- 1464 // -----------------------------------
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1536 FrameScope scope(masm, StackFrame::MANUAL); 1514 FrameScope scope(masm, StackFrame::MANUAL);
1537 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit()); 1515 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1538 __ PrepareCallCFunction(2, 0, r2); 1516 __ PrepareCallCFunction(2, 0, r2);
1539 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); 1517 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1540 __ CallCFunction( 1518 __ CallCFunction(
1541 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 1519 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1542 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit()); 1520 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1543 __ mov(pc, r0); 1521 __ mov(pc, r0);
1544 } 1522 }
1545 1523
1546 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 1524 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1547 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 1525 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1548 MacroAssembler* masm) { \ 1526 MacroAssembler* masm) { \
1549 GenerateMakeCodeYoungAgainCommon(masm); \ 1527 GenerateMakeCodeYoungAgainCommon(masm); \
1550 } \ 1528 } \
1551 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 1529 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1552 MacroAssembler* masm) { \ 1530 MacroAssembler* masm) { \
1553 GenerateMakeCodeYoungAgainCommon(masm); \ 1531 GenerateMakeCodeYoungAgainCommon(masm); \
1554 } 1532 }
1555 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 1533 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1556 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 1534 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1557 1535
1558
1559 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 1536 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1560 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 1537 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1561 // that make_code_young doesn't do any garbage collection which allows us to 1538 // that make_code_young doesn't do any garbage collection which allows us to
1562 // save/restore the registers without worrying about which of them contain 1539 // save/restore the registers without worrying about which of them contain
1563 // pointers. 1540 // pointers.
1564 1541
1565 // The following registers must be saved and restored when calling through to 1542 // The following registers must be saved and restored when calling through to
1566 // the runtime: 1543 // the runtime:
1567 // r0 - contains return address (beginning of patch sequence) 1544 // r0 - contains return address (beginning of patch sequence)
1568 // r1 - isolate 1545 // r1 - isolate
1569 // r3 - new target 1546 // r3 - new target
1570 FrameScope scope(masm, StackFrame::MANUAL); 1547 FrameScope scope(masm, StackFrame::MANUAL);
1571 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit()); 1548 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1572 __ PrepareCallCFunction(2, 0, r2); 1549 __ PrepareCallCFunction(2, 0, r2);
1573 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); 1550 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1574 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function( 1551 __ CallCFunction(
1575 masm->isolate()), 2); 1552 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1553 2);
1576 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit()); 1554 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1577 1555
1578 // Perform prologue operations usually performed by the young code stub. 1556 // Perform prologue operations usually performed by the young code stub.
1579 __ PushStandardFrame(r1); 1557 __ PushStandardFrame(r1);
1580 1558
1581 // Jump to point after the code-age stub. 1559 // Jump to point after the code-age stub.
1582 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength)); 1560 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1583 __ mov(pc, r0); 1561 __ mov(pc, r0);
1584 } 1562 }
1585 1563
1586
1587 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 1564 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1588 GenerateMakeCodeYoungAgainCommon(masm); 1565 GenerateMakeCodeYoungAgainCommon(masm);
1589 } 1566 }
1590 1567
1591
1592 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { 1568 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1593 Generate_MarkCodeAsExecutedOnce(masm); 1569 Generate_MarkCodeAsExecutedOnce(masm);
1594 } 1570 }
1595 1571
1596
1597 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 1572 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1598 SaveFPRegsMode save_doubles) { 1573 SaveFPRegsMode save_doubles) {
1599 { 1574 {
1600 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1575 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1601 1576
1602 // Preserve registers across notification, this is important for compiled 1577 // Preserve registers across notification, this is important for compiled
1603 // stubs that tail call the runtime on deopts passing their parameters in 1578 // stubs that tail call the runtime on deopts passing their parameters in
1604 // registers. 1579 // registers.
1605 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); 1580 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1606 // Pass the function and deoptimization type to the runtime system. 1581 // Pass the function and deoptimization type to the runtime system.
1607 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); 1582 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1608 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); 1583 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1609 } 1584 }
1610 1585
1611 __ add(sp, sp, Operand(kPointerSize)); // Ignore state 1586 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1612 __ mov(pc, lr); // Jump to miss handler 1587 __ mov(pc, lr); // Jump to miss handler
1613 } 1588 }
1614 1589
1615
1616 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1590 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1617 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1591 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1618 } 1592 }
1619 1593
1620
1621 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1594 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1622 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1595 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1623 } 1596 }
1624 1597
1625
1626 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1598 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1627 Deoptimizer::BailoutType type) { 1599 Deoptimizer::BailoutType type) {
1628 { 1600 {
1629 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1601 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1630 // Pass the function and deoptimization type to the runtime system. 1602 // Pass the function and deoptimization type to the runtime system.
1631 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); 1603 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1632 __ push(r0); 1604 __ push(r0);
1633 __ CallRuntime(Runtime::kNotifyDeoptimized); 1605 __ CallRuntime(Runtime::kNotifyDeoptimized);
1634 } 1606 }
1635 1607
(...skipping 14 matching lines...) Expand all
1650 __ cmp(r6, 1622 __ cmp(r6,
1651 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); 1623 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1652 __ b(ne, &unknown_state); 1624 __ b(ne, &unknown_state);
1653 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state. 1625 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1654 __ Ret(); 1626 __ Ret();
1655 1627
1656 __ bind(&unknown_state); 1628 __ bind(&unknown_state);
1657 __ stop("no cases left"); 1629 __ stop("no cases left");
1658 } 1630 }
1659 1631
1660
1661 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1632 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1662 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1633 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1663 } 1634 }
1664 1635
1665
1666 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1636 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1667 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1637 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1668 } 1638 }
1669 1639
1670
1671 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1640 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1672 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1641 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1673 } 1642 }
1674 1643
1675
1676 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 1644 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1677 Register function_template_info, 1645 Register function_template_info,
1678 Register scratch0, Register scratch1, 1646 Register scratch0, Register scratch1,
1679 Register scratch2, 1647 Register scratch2,
1680 Label* receiver_check_failed) { 1648 Label* receiver_check_failed) {
1681 Register signature = scratch0; 1649 Register signature = scratch0;
1682 Register map = scratch1; 1650 Register map = scratch1;
1683 Register constructor = scratch2; 1651 Register constructor = scratch2;
1684 1652
1685 // If there is no signature, return the holder. 1653 // If there is no signature, return the holder.
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1729 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask)); 1697 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
1730 __ b(eq, receiver_check_failed); 1698 __ b(eq, receiver_check_failed);
1731 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); 1699 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1732 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1700 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1733 // Iterate. 1701 // Iterate.
1734 __ b(&prototype_loop_start); 1702 __ b(&prototype_loop_start);
1735 1703
1736 __ bind(&receiver_check_passed); 1704 __ bind(&receiver_check_passed);
1737 } 1705 }
1738 1706
1739
1740 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { 1707 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1741 // ----------- S t a t e ------------- 1708 // ----------- S t a t e -------------
1742 // -- r0 : number of arguments excluding receiver 1709 // -- r0 : number of arguments excluding receiver
1743 // -- r1 : callee 1710 // -- r1 : callee
1744 // -- lr : return address 1711 // -- lr : return address
1745 // -- sp[0] : last argument 1712 // -- sp[0] : last argument
1746 // -- ... 1713 // -- ...
1747 // -- sp[4 * (argc - 1)] : first argument 1714 // -- sp[4 * (argc - 1)] : first argument
1748 // -- sp[4 * argc] : receiver 1715 // -- sp[4 * argc] : receiver
1749 // ----------------------------------- 1716 // -----------------------------------
(...skipping 15 matching lines...) Expand all
1765 __ Jump(r4); 1732 __ Jump(r4);
1766 1733
1767 // Compatible receiver check failed: throw an Illegal Invocation exception. 1734 // Compatible receiver check failed: throw an Illegal Invocation exception.
1768 __ bind(&receiver_check_failed); 1735 __ bind(&receiver_check_failed);
1769 // Drop the arguments (including the receiver) 1736 // Drop the arguments (including the receiver)
1770 __ add(r0, r0, Operand(1)); 1737 __ add(r0, r0, Operand(1));
1771 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2)); 1738 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1772 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); 1739 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1773 } 1740 }
1774 1741
1775
1776 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1742 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1777 // Lookup the function in the JavaScript frame. 1743 // Lookup the function in the JavaScript frame.
1778 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1744 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1779 { 1745 {
1780 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1746 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1781 // Pass function as argument. 1747 // Pass function as argument.
1782 __ push(r0); 1748 __ push(r0);
1783 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 1749 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1784 } 1750 }
1785 1751
1786 // If the code object is null, just return to the unoptimized code. 1752 // If the code object is null, just return to the unoptimized code.
1787 Label skip; 1753 Label skip;
1788 __ cmp(r0, Operand(Smi::FromInt(0))); 1754 __ cmp(r0, Operand(Smi::FromInt(0)));
1789 __ b(ne, &skip); 1755 __ b(ne, &skip);
1790 __ Ret(); 1756 __ Ret();
1791 1757
1792 __ bind(&skip); 1758 __ bind(&skip);
1793 1759
1794 // Load deoptimization data from the code object. 1760 // Load deoptimization data from the code object.
1795 // <deopt_data> = <code>[#deoptimization_data_offset] 1761 // <deopt_data> = <code>[#deoptimization_data_offset]
1796 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset)); 1762 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1797 1763
1798 { ConstantPoolUnavailableScope constant_pool_unavailable(masm); 1764 {
1765 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1799 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start 1766 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1800 1767
1801 if (FLAG_enable_embedded_constant_pool) { 1768 if (FLAG_enable_embedded_constant_pool) {
1802 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0); 1769 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
1803 } 1770 }
1804 1771
1805 // Load the OSR entrypoint offset from the deoptimization data. 1772 // Load the OSR entrypoint offset from the deoptimization data.
1806 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1773 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1807 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt( 1774 __ ldr(r1, FieldMemOperand(
1808 DeoptimizationInputData::kOsrPcOffsetIndex))); 1775 r1, FixedArray::OffsetOfElementAt(
1776 DeoptimizationInputData::kOsrPcOffsetIndex)));
1809 1777
1810 // Compute the target address = code start + osr_offset 1778 // Compute the target address = code start + osr_offset
1811 __ add(lr, r0, Operand::SmiUntag(r1)); 1779 __ add(lr, r0, Operand::SmiUntag(r1));
1812 1780
1813 // And "return" to the OSR entry point of the function. 1781 // And "return" to the OSR entry point of the function.
1814 __ Ret(); 1782 __ Ret();
1815 } 1783 }
1816 } 1784 }
1817 1785
1818
1819 // static 1786 // static
1820 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, 1787 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1821 int field_index) { 1788 int field_index) {
1822 // ----------- S t a t e ------------- 1789 // ----------- S t a t e -------------
1823 // -- r0 : number of arguments 1790 // -- r0 : number of arguments
1824 // -- r1 : function 1791 // -- r1 : function
1825 // -- cp : context 1792 // -- cp : context
1826 // -- lr : return address 1793 // -- lr : return address
1827 // -- sp[0] : receiver 1794 // -- sp[0] : receiver
1828 // ----------------------------------- 1795 // -----------------------------------
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1929 } 1896 }
1930 1897
1931 // 4c. The receiver is not callable, throw an appropriate TypeError. 1898 // 4c. The receiver is not callable, throw an appropriate TypeError.
1932 __ bind(&receiver_not_callable); 1899 __ bind(&receiver_not_callable);
1933 { 1900 {
1934 __ str(r1, MemOperand(sp, 0)); 1901 __ str(r1, MemOperand(sp, 0));
1935 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1902 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1936 } 1903 }
1937 } 1904 }
1938 1905
1939
1940 // static 1906 // static
1941 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1907 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1942 // 1. Make sure we have at least one argument. 1908 // 1. Make sure we have at least one argument.
1943 // r0: actual number of arguments 1909 // r0: actual number of arguments
1944 { 1910 {
1945 Label done; 1911 Label done;
1946 __ cmp(r0, Operand::Zero()); 1912 __ cmp(r0, Operand::Zero());
1947 __ b(ne, &done); 1913 __ b(ne, &done);
1948 __ PushRoot(Heap::kUndefinedValueRootIndex); 1914 __ PushRoot(Heap::kUndefinedValueRootIndex);
1949 __ add(r0, r0, Operand(1)); 1915 __ add(r0, r0, Operand(1));
(...skipping 23 matching lines...) Expand all
1973 // Adjust the actual number of arguments and remove the top element 1939 // Adjust the actual number of arguments and remove the top element
1974 // (which is a copy of the last argument). 1940 // (which is a copy of the last argument).
1975 __ sub(r0, r0, Operand(1)); 1941 __ sub(r0, r0, Operand(1));
1976 __ pop(); 1942 __ pop();
1977 } 1943 }
1978 1944
1979 // 4. Call the callable. 1945 // 4. Call the callable.
1980 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1946 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1981 } 1947 }
1982 1948
1983
1984 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1949 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1985 // ----------- S t a t e ------------- 1950 // ----------- S t a t e -------------
1986 // -- r0 : argc 1951 // -- r0 : argc
1987 // -- sp[0] : argumentsList 1952 // -- sp[0] : argumentsList
1988 // -- sp[4] : thisArgument 1953 // -- sp[4] : thisArgument
1989 // -- sp[8] : target 1954 // -- sp[8] : target
1990 // -- sp[12] : receiver 1955 // -- sp[12] : receiver
1991 // ----------------------------------- 1956 // -----------------------------------
1992 1957
1993 // 1. Load target into r1 (if present), argumentsList into r0 (if present), 1958 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2028 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1993 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2029 1994
2030 // 3b. The target is not callable, throw an appropriate TypeError. 1995 // 3b. The target is not callable, throw an appropriate TypeError.
2031 __ bind(&target_not_callable); 1996 __ bind(&target_not_callable);
2032 { 1997 {
2033 __ str(r1, MemOperand(sp, 0)); 1998 __ str(r1, MemOperand(sp, 0));
2034 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1999 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2035 } 2000 }
2036 } 2001 }
2037 2002
2038
2039 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 2003 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2040 // ----------- S t a t e ------------- 2004 // ----------- S t a t e -------------
2041 // -- r0 : argc 2005 // -- r0 : argc
2042 // -- sp[0] : new.target (optional) 2006 // -- sp[0] : new.target (optional)
2043 // -- sp[4] : argumentsList 2007 // -- sp[4] : argumentsList
2044 // -- sp[8] : target 2008 // -- sp[8] : target
2045 // -- sp[12] : receiver 2009 // -- sp[12] : receiver
2046 // ----------------------------------- 2010 // -----------------------------------
2047 2011
2048 // 1. Load target into r1 (if present), argumentsList into r0 (if present), 2012 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
2098 } 2062 }
2099 2063
2100 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 2064 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2101 __ bind(&new_target_not_constructor); 2065 __ bind(&new_target_not_constructor);
2102 { 2066 {
2103 __ str(r3, MemOperand(sp, 0)); 2067 __ str(r3, MemOperand(sp, 0));
2104 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 2068 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2105 } 2069 }
2106 } 2070 }
2107 2071
2108
2109 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 2072 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2110 Label* stack_overflow) { 2073 Label* stack_overflow) {
2111 // ----------- S t a t e ------------- 2074 // ----------- S t a t e -------------
2112 // -- r0 : actual number of arguments 2075 // -- r0 : actual number of arguments
2113 // -- r1 : function (passed through to callee) 2076 // -- r1 : function (passed through to callee)
2114 // -- r2 : expected number of arguments 2077 // -- r2 : expected number of arguments
2115 // -- r3 : new target (passed through to callee) 2078 // -- r3 : new target (passed through to callee)
2116 // ----------------------------------- 2079 // -----------------------------------
2117 // Check the stack for overflow. We are not trying to catch 2080 // Check the stack for overflow. We are not trying to catch
2118 // interruptions (e.g. debug break and preemption) here, so the "real stack 2081 // interruptions (e.g. debug break and preemption) here, so the "real stack
2119 // limit" is checked. 2082 // limit" is checked.
2120 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); 2083 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
2121 // Make r5 the space we have left. The stack might already be overflowed 2084 // Make r5 the space we have left. The stack might already be overflowed
2122 // here which will cause r5 to become negative. 2085 // here which will cause r5 to become negative.
2123 __ sub(r5, sp, r5); 2086 __ sub(r5, sp, r5);
2124 // Check if the arguments will overflow the stack. 2087 // Check if the arguments will overflow the stack.
2125 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2)); 2088 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
2126 __ b(le, stack_overflow); // Signed comparison. 2089 __ b(le, stack_overflow); // Signed comparison.
2127 } 2090 }
2128 2091
2129
2130 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 2092 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2131 __ SmiTag(r0); 2093 __ SmiTag(r0);
2132 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2094 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2133 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | 2095 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
2134 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) | 2096 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
2135 fp.bit() | lr.bit()); 2097 fp.bit() | lr.bit());
2136 __ add(fp, sp, 2098 __ add(fp, sp,
2137 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); 2099 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
2138 } 2100 }
2139 2101
2140
2141 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 2102 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2142 // ----------- S t a t e ------------- 2103 // ----------- S t a t e -------------
2143 // -- r0 : result being passed through 2104 // -- r0 : result being passed through
2144 // ----------------------------------- 2105 // -----------------------------------
2145 // Get the number of arguments passed (as a smi), tear down the frame and 2106 // Get the number of arguments passed (as a smi), tear down the frame and
2146 // then tear down the parameters. 2107 // then tear down the parameters.
2147 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + 2108 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2148 kPointerSize))); 2109 kPointerSize)));
2149 2110
2150 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR); 2111 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
2151 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1)); 2112 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
2152 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver 2113 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
2153 } 2114 }
2154 2115
2155
2156 // static 2116 // static
2157 void Builtins::Generate_Apply(MacroAssembler* masm) { 2117 void Builtins::Generate_Apply(MacroAssembler* masm) {
2158 // ----------- S t a t e ------------- 2118 // ----------- S t a t e -------------
2159 // -- r0 : argumentsList 2119 // -- r0 : argumentsList
2160 // -- r1 : target 2120 // -- r1 : target
2161 // -- r3 : new.target (checked to be constructor or undefined) 2121 // -- r3 : new.target (checked to be constructor or undefined)
2162 // -- sp[0] : thisArgument 2122 // -- sp[0] : thisArgument
2163 // ----------------------------------- 2123 // -----------------------------------
2164 2124
2165 // Create the list of arguments from the array-like argumentsList. 2125 // Create the list of arguments from the array-like argumentsList.
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
2469 2429
2470 // The function is a "classConstructor", need to raise an exception. 2430 // The function is a "classConstructor", need to raise an exception.
2471 __ bind(&class_constructor); 2431 __ bind(&class_constructor);
2472 { 2432 {
2473 FrameScope frame(masm, StackFrame::INTERNAL); 2433 FrameScope frame(masm, StackFrame::INTERNAL);
2474 __ push(r1); 2434 __ push(r1);
2475 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); 2435 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2476 } 2436 }
2477 } 2437 }
2478 2438
2479
2480 namespace { 2439 namespace {
2481 2440
2482 void Generate_PushBoundArguments(MacroAssembler* masm) { 2441 void Generate_PushBoundArguments(MacroAssembler* masm) {
2483 // ----------- S t a t e ------------- 2442 // ----------- S t a t e -------------
2484 // -- r0 : the number of arguments (not including the receiver) 2443 // -- r0 : the number of arguments (not including the receiver)
2485 // -- r1 : target (checked to be a JSBoundFunction) 2444 // -- r1 : target (checked to be a JSBoundFunction)
2486 // -- r3 : new.target (only in case of [[Construct]]) 2445 // -- r3 : new.target (only in case of [[Construct]])
2487 // ----------------------------------- 2446 // -----------------------------------
2488 2447
2489 // Load [[BoundArguments]] into r2 and length of that into r4. 2448 // Load [[BoundArguments]] into r2 and length of that into r4.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2548 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 2507 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2549 __ add(r0, r0, Operand(1)); 2508 __ add(r0, r0, Operand(1));
2550 __ b(gt, &loop); 2509 __ b(gt, &loop);
2551 } 2510 }
2552 } 2511 }
2553 __ bind(&no_bound_arguments); 2512 __ bind(&no_bound_arguments);
2554 } 2513 }
2555 2514
2556 } // namespace 2515 } // namespace
2557 2516
2558
2559 // static 2517 // static
2560 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, 2518 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2561 TailCallMode tail_call_mode) { 2519 TailCallMode tail_call_mode) {
2562 // ----------- S t a t e ------------- 2520 // ----------- S t a t e -------------
2563 // -- r0 : the number of arguments (not including the receiver) 2521 // -- r0 : the number of arguments (not including the receiver)
2564 // -- r1 : the function to call (checked to be a JSBoundFunction) 2522 // -- r1 : the function to call (checked to be a JSBoundFunction)
2565 // ----------------------------------- 2523 // -----------------------------------
2566 __ AssertBoundFunction(r1); 2524 __ AssertBoundFunction(r1);
2567 2525
2568 if (tail_call_mode == TailCallMode::kAllow) { 2526 if (tail_call_mode == TailCallMode::kAllow) {
2569 PrepareForTailCall(masm, r0, r3, r4, r5); 2527 PrepareForTailCall(masm, r0, r3, r4, r5);
2570 } 2528 }
2571 2529
2572 // Patch the receiver to [[BoundThis]]. 2530 // Patch the receiver to [[BoundThis]].
2573 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset)); 2531 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2574 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 2532 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2575 2533
2576 // Push the [[BoundArguments]] onto the stack. 2534 // Push the [[BoundArguments]] onto the stack.
2577 Generate_PushBoundArguments(masm); 2535 Generate_PushBoundArguments(masm);
2578 2536
2579 // Call the [[BoundTargetFunction]] via the Call builtin. 2537 // Call the [[BoundTargetFunction]] via the Call builtin.
2580 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset)); 2538 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2581 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, 2539 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2582 masm->isolate()))); 2540 masm->isolate())));
2583 __ ldr(ip, MemOperand(ip)); 2541 __ ldr(ip, MemOperand(ip));
2584 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); 2542 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2585 } 2543 }
2586 2544
2587
2588 // static 2545 // static
2589 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2546 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2590 TailCallMode tail_call_mode) { 2547 TailCallMode tail_call_mode) {
2591 // ----------- S t a t e ------------- 2548 // ----------- S t a t e -------------
2592 // -- r0 : the number of arguments (not including the receiver) 2549 // -- r0 : the number of arguments (not including the receiver)
2593 // -- r1 : the target to call (can be any Object). 2550 // -- r1 : the target to call (can be any Object).
2594 // ----------------------------------- 2551 // -----------------------------------
2595 2552
2596 Label non_callable, non_function, non_smi; 2553 Label non_callable, non_function, non_smi;
2597 __ JumpIfSmi(r1, &non_callable); 2554 __ JumpIfSmi(r1, &non_callable);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2638 2595
2639 // 3. Call to something that is not callable. 2596 // 3. Call to something that is not callable.
2640 __ bind(&non_callable); 2597 __ bind(&non_callable);
2641 { 2598 {
2642 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2599 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2643 __ Push(r1); 2600 __ Push(r1);
2644 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2601 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2645 } 2602 }
2646 } 2603 }
2647 2604
2648
2649 // static 2605 // static
2650 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2606 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2651 // ----------- S t a t e ------------- 2607 // ----------- S t a t e -------------
2652 // -- r0 : the number of arguments (not including the receiver) 2608 // -- r0 : the number of arguments (not including the receiver)
2653 // -- r1 : the constructor to call (checked to be a JSFunction) 2609 // -- r1 : the constructor to call (checked to be a JSFunction)
2654 // -- r3 : the new target (checked to be a constructor) 2610 // -- r3 : the new target (checked to be a constructor)
2655 // ----------------------------------- 2611 // -----------------------------------
2656 __ AssertFunction(r1); 2612 __ AssertFunction(r1);
2657 2613
2658 // Calling convention for function specific ConstructStubs require 2614 // Calling convention for function specific ConstructStubs require
2659 // r2 to contain either an AllocationSite or undefined. 2615 // r2 to contain either an AllocationSite or undefined.
2660 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2616 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2661 2617
2662 // Tail call to the function-specific construct stub (still in the caller 2618 // Tail call to the function-specific construct stub (still in the caller
2663 // context at this point). 2619 // context at this point).
2664 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 2620 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2665 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset)); 2621 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2666 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); 2622 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2667 } 2623 }
2668 2624
2669
2670 // static 2625 // static
2671 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2626 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2672 // ----------- S t a t e ------------- 2627 // ----------- S t a t e -------------
2673 // -- r0 : the number of arguments (not including the receiver) 2628 // -- r0 : the number of arguments (not including the receiver)
2674 // -- r1 : the function to call (checked to be a JSBoundFunction) 2629 // -- r1 : the function to call (checked to be a JSBoundFunction)
2675 // -- r3 : the new target (checked to be a constructor) 2630 // -- r3 : the new target (checked to be a constructor)
2676 // ----------------------------------- 2631 // -----------------------------------
2677 __ AssertBoundFunction(r1); 2632 __ AssertBoundFunction(r1);
2678 2633
2679 // Push the [[BoundArguments]] onto the stack. 2634 // Push the [[BoundArguments]] onto the stack.
2680 Generate_PushBoundArguments(masm); 2635 Generate_PushBoundArguments(masm);
2681 2636
2682 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. 2637 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2683 __ cmp(r1, r3); 2638 __ cmp(r1, r3);
2684 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset), 2639 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2685 eq); 2640 eq);
2686 2641
2687 // Construct the [[BoundTargetFunction]] via the Construct builtin. 2642 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2688 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset)); 2643 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2689 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); 2644 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2690 __ ldr(ip, MemOperand(ip)); 2645 __ ldr(ip, MemOperand(ip));
2691 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); 2646 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2692 } 2647 }
2693 2648
2694
2695 // static 2649 // static
2696 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 2650 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2697 // ----------- S t a t e ------------- 2651 // ----------- S t a t e -------------
2698 // -- r0 : the number of arguments (not including the receiver) 2652 // -- r0 : the number of arguments (not including the receiver)
2699 // -- r1 : the constructor to call (checked to be a JSProxy) 2653 // -- r1 : the constructor to call (checked to be a JSProxy)
2700 // -- r3 : the new target (either the same as the constructor or 2654 // -- r3 : the new target (either the same as the constructor or
2701 // the JSFunction on which new was invoked initially) 2655 // the JSFunction on which new was invoked initially)
2702 // ----------------------------------- 2656 // -----------------------------------
2703 2657
2704 // Call into the Runtime for Proxy [[Construct]]. 2658 // Call into the Runtime for Proxy [[Construct]].
2705 __ Push(r1); 2659 __ Push(r1);
2706 __ Push(r3); 2660 __ Push(r3);
2707 // Include the pushed new_target, constructor and the receiver. 2661 // Include the pushed new_target, constructor and the receiver.
2708 __ add(r0, r0, Operand(3)); 2662 __ add(r0, r0, Operand(3));
2709 // Tail-call to the runtime. 2663 // Tail-call to the runtime.
2710 __ JumpToExternalReference( 2664 __ JumpToExternalReference(
2711 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); 2665 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2712 } 2666 }
2713 2667
2714
2715 // static 2668 // static
2716 void Builtins::Generate_Construct(MacroAssembler* masm) { 2669 void Builtins::Generate_Construct(MacroAssembler* masm) {
2717 // ----------- S t a t e ------------- 2670 // ----------- S t a t e -------------
2718 // -- r0 : the number of arguments (not including the receiver) 2671 // -- r0 : the number of arguments (not including the receiver)
2719 // -- r1 : the constructor to call (can be any Object) 2672 // -- r1 : the constructor to call (can be any Object)
2720 // -- r3 : the new target (either the same as the constructor or 2673 // -- r3 : the new target (either the same as the constructor or
2721 // the JSFunction on which new was invoked initially) 2674 // the JSFunction on which new was invoked initially)
2722 // ----------------------------------- 2675 // -----------------------------------
2723 2676
2724 // Check if target is a Smi. 2677 // Check if target is a Smi.
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
2956 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 2909 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2957 __ Call(r4); 2910 __ Call(r4);
2958 2911
2959 // Store offset of return address for deoptimizer. 2912 // Store offset of return address for deoptimizer.
2960 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 2913 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2961 2914
2962 // Exit frame and return. 2915 // Exit frame and return.
2963 LeaveArgumentsAdaptorFrame(masm); 2916 LeaveArgumentsAdaptorFrame(masm);
2964 __ Jump(lr); 2917 __ Jump(lr);
2965 2918
2966
2967 // ------------------------------------------- 2919 // -------------------------------------------
2968 // Dont adapt arguments. 2920 // Dont adapt arguments.
2969 // ------------------------------------------- 2921 // -------------------------------------------
2970 __ bind(&dont_adapt_arguments); 2922 __ bind(&dont_adapt_arguments);
2971 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 2923 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2972 __ Jump(r4); 2924 __ Jump(r4);
2973 2925
2974 __ bind(&stack_overflow); 2926 __ bind(&stack_overflow);
2975 { 2927 {
2976 FrameScope frame(masm, StackFrame::MANUAL); 2928 FrameScope frame(masm, StackFrame::MANUAL);
2977 __ CallRuntime(Runtime::kThrowStackOverflow); 2929 __ CallRuntime(Runtime::kThrowStackOverflow);
2978 __ bkpt(0); 2930 __ bkpt(0);
2979 } 2931 }
2980 } 2932 }
2981 2933
2982
2983 #undef __ 2934 #undef __
2984 2935
2985 } // namespace internal 2936 } // namespace internal
2986 } // namespace v8 2937 } // namespace v8
2987 2938
2988 #endif // V8_TARGET_ARCH_ARM 2939 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/builtins.cc ('k') | src/builtins/arm64/builtins-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698