OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
11 #include "src/runtime/runtime.h" | 11 #include "src/runtime/runtime.h" |
12 | 12 |
13 namespace v8 { | 13 namespace v8 { |
14 namespace internal { | 14 namespace internal { |
15 | 15 |
16 | |
17 #define __ ACCESS_MASM(masm) | 16 #define __ ACCESS_MASM(masm) |
18 | 17 |
19 | |
20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
21 ExitFrameType exit_frame_type) { | 19 ExitFrameType exit_frame_type) { |
22 // ----------- S t a t e ------------- | 20 // ----------- S t a t e ------------- |
23 // -- r3 : number of arguments excluding receiver | 21 // -- r3 : number of arguments excluding receiver |
24 // -- r4 : target | 22 // -- r4 : target |
25 // -- r6 : new.target | 23 // -- r6 : new.target |
26 // -- sp[0] : last argument | 24 // -- sp[0] : last argument |
27 // -- ... | 25 // -- ... |
28 // -- sp[4 * (argc - 1)] : first argument | 26 // -- sp[4 * (argc - 1)] : first argument |
29 // -- sp[4 * argc] : receiver | 27 // -- sp[4 * argc] : receiver |
(...skipping 13 matching lines...) Expand all Loading... |
43 | 41 |
44 // Insert extra arguments. | 42 // Insert extra arguments. |
45 __ SmiTag(r3); | 43 __ SmiTag(r3); |
46 __ Push(r3, r4, r6); | 44 __ Push(r3, r4, r6); |
47 __ SmiUntag(r3); | 45 __ SmiUntag(r3); |
48 | 46 |
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), | 47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), |
50 exit_frame_type == BUILTIN_EXIT); | 48 exit_frame_type == BUILTIN_EXIT); |
51 } | 49 } |
52 | 50 |
53 | |
54 // Load the built-in InternalArray function from the current context. | 51 // Load the built-in InternalArray function from the current context. |
55 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | 52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
56 Register result) { | 53 Register result) { |
57 // Load the InternalArray function from the current native context. | 54 // Load the InternalArray function from the current native context. |
58 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); |
59 } | 56 } |
60 | 57 |
61 | |
62 // Load the built-in Array function from the current context. | 58 // Load the built-in Array function from the current context. |
63 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | 59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
64 // Load the Array function from the current native context. | 60 // Load the Array function from the current native context. |
65 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); |
66 } | 62 } |
67 | 63 |
68 | |
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
70 // ----------- S t a t e ------------- | 65 // ----------- S t a t e ------------- |
71 // -- r3 : number of arguments | 66 // -- r3 : number of arguments |
72 // -- lr : return address | 67 // -- lr : return address |
73 // -- sp[...]: constructor arguments | 68 // -- sp[...]: constructor arguments |
74 // ----------------------------------- | 69 // ----------------------------------- |
75 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
76 | 71 |
77 // Get the InternalArray function. | 72 // Get the InternalArray function. |
78 GenerateLoadInternalArrayFunction(masm, r4); | 73 GenerateLoadInternalArrayFunction(masm, r4); |
79 | 74 |
80 if (FLAG_debug_code) { | 75 if (FLAG_debug_code) { |
81 // Initial map for the builtin InternalArray functions should be maps. | 76 // Initial map for the builtin InternalArray functions should be maps. |
82 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | 77 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
83 __ TestIfSmi(r5, r0); | 78 __ TestIfSmi(r5, r0); |
84 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); | 79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); |
85 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | 80 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
86 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | 81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); |
87 } | 82 } |
88 | 83 |
89 // Run the native code for the InternalArray function called as a normal | 84 // Run the native code for the InternalArray function called as a normal |
90 // function. | 85 // function. |
91 // tail call a stub | 86 // tail call a stub |
92 InternalArrayConstructorStub stub(masm->isolate()); | 87 InternalArrayConstructorStub stub(masm->isolate()); |
93 __ TailCallStub(&stub); | 88 __ TailCallStub(&stub); |
94 } | 89 } |
95 | 90 |
96 | |
97 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
98 // ----------- S t a t e ------------- | 92 // ----------- S t a t e ------------- |
99 // -- r3 : number of arguments | 93 // -- r3 : number of arguments |
100 // -- lr : return address | 94 // -- lr : return address |
101 // -- sp[...]: constructor arguments | 95 // -- sp[...]: constructor arguments |
102 // ----------------------------------- | 96 // ----------------------------------- |
103 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 97 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
104 | 98 |
105 // Get the Array function. | 99 // Get the Array function. |
106 GenerateLoadArrayFunction(masm, r4); | 100 GenerateLoadArrayFunction(masm, r4); |
107 | 101 |
108 if (FLAG_debug_code) { | 102 if (FLAG_debug_code) { |
109 // Initial map for the builtin Array functions should be maps. | 103 // Initial map for the builtin Array functions should be maps. |
110 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); | 104 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
111 __ TestIfSmi(r5, r0); | 105 __ TestIfSmi(r5, r0); |
112 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); | 106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); |
113 __ CompareObjectType(r5, r6, r7, MAP_TYPE); | 107 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
114 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
115 } | 109 } |
116 | 110 |
117 __ mr(r6, r4); | 111 __ mr(r6, r4); |
118 // Run the native code for the Array function called as a normal function. | 112 // Run the native code for the Array function called as a normal function. |
119 // tail call a stub | 113 // tail call a stub |
120 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 114 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
121 ArrayConstructorStub stub(masm->isolate()); | 115 ArrayConstructorStub stub(masm->isolate()); |
122 __ TailCallStub(&stub); | 116 __ TailCallStub(&stub); |
123 } | 117 } |
124 | 118 |
125 | |
126 // static | 119 // static |
127 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | 120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { |
128 // ----------- S t a t e ------------- | 121 // ----------- S t a t e ------------- |
129 // -- r3 : number of arguments | 122 // -- r3 : number of arguments |
130 // -- r4 : function | 123 // -- r4 : function |
131 // -- cp : context | 124 // -- cp : context |
132 // -- lr : return address | 125 // -- lr : return address |
133 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 126 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
134 // -- sp[argc * 4] : receiver | 127 // -- sp[argc * 4] : receiver |
135 // ----------------------------------- | 128 // ----------------------------------- |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
271 __ Drop(r5); | 264 __ Drop(r5); |
272 __ Ret(1); | 265 __ Ret(1); |
273 } | 266 } |
274 | 267 |
275 // 2b. No arguments, return +0. | 268 // 2b. No arguments, return +0. |
276 __ bind(&no_arguments); | 269 __ bind(&no_arguments); |
277 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 270 __ LoadSmiLiteral(r3, Smi::FromInt(0)); |
278 __ Ret(1); | 271 __ Ret(1); |
279 } | 272 } |
280 | 273 |
281 | |
282 // static | 274 // static |
283 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | 275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
284 // ----------- S t a t e ------------- | 276 // ----------- S t a t e ------------- |
285 // -- r3 : number of arguments | 277 // -- r3 : number of arguments |
286 // -- r4 : constructor function | 278 // -- r4 : constructor function |
287 // -- r6 : new target | 279 // -- r6 : new target |
288 // -- cp : context | 280 // -- cp : context |
289 // -- lr : return address | 281 // -- lr : return address |
290 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 282 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
291 // -- sp[argc * 4] : receiver | 283 // -- sp[argc * 4] : receiver |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
354 } | 346 } |
355 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | 347 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); |
356 | 348 |
357 __ bind(&drop_frame_and_ret); | 349 __ bind(&drop_frame_and_ret); |
358 { | 350 { |
359 __ Drop(r9); | 351 __ Drop(r9); |
360 __ Ret(1); | 352 __ Ret(1); |
361 } | 353 } |
362 } | 354 } |
363 | 355 |
364 | |
365 // static | 356 // static |
366 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | 357 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
367 // ----------- S t a t e ------------- | 358 // ----------- S t a t e ------------- |
368 // -- r3 : number of arguments | 359 // -- r3 : number of arguments |
369 // -- r4 : constructor function | 360 // -- r4 : constructor function |
370 // -- cp : context | 361 // -- cp : context |
371 // -- lr : return address | 362 // -- lr : return address |
372 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 363 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
373 // -- sp[argc * 4] : receiver | 364 // -- sp[argc * 4] : receiver |
374 // ----------------------------------- | 365 // ----------------------------------- |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
509 } | 500 } |
510 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); | 501 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); |
511 | 502 |
512 __ bind(&drop_frame_and_ret); | 503 __ bind(&drop_frame_and_ret); |
513 { | 504 { |
514 __ Drop(r9); | 505 __ Drop(r9); |
515 __ Ret(1); | 506 __ Ret(1); |
516 } | 507 } |
517 } | 508 } |
518 | 509 |
519 | |
520 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 510 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
521 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 511 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
522 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); | 512 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); |
523 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 513 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
524 __ JumpToJSEntry(ip); | 514 __ JumpToJSEntry(ip); |
525 } | 515 } |
526 | 516 |
527 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | 517 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
528 Runtime::FunctionId function_id) { | 518 Runtime::FunctionId function_id) { |
529 // ----------- S t a t e ------------- | 519 // ----------- S t a t e ------------- |
(...skipping 13 matching lines...) Expand all Loading... |
543 __ mr(r5, r3); | 533 __ mr(r5, r3); |
544 | 534 |
545 // Restore target function and new target. | 535 // Restore target function and new target. |
546 __ Pop(r3, r4, r6); | 536 __ Pop(r3, r4, r6); |
547 __ SmiUntag(r3); | 537 __ SmiUntag(r3); |
548 } | 538 } |
549 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); | 539 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); |
550 __ JumpToJSEntry(ip); | 540 __ JumpToJSEntry(ip); |
551 } | 541 } |
552 | 542 |
553 | |
554 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 543 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
555 // Checking whether the queued function is ready for install is optional, | 544 // Checking whether the queued function is ready for install is optional, |
556 // since we come across interrupts and stack checks elsewhere. However, | 545 // since we come across interrupts and stack checks elsewhere. However, |
557 // not checking may delay installing ready functions, and always checking | 546 // not checking may delay installing ready functions, and always checking |
558 // would be quite expensive. A good compromise is to first check against | 547 // would be quite expensive. A good compromise is to first check against |
559 // stack limit as a cue for an interrupt signal. | 548 // stack limit as a cue for an interrupt signal. |
560 Label ok; | 549 Label ok; |
561 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 550 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
562 __ cmpl(sp, ip); | 551 __ cmpl(sp, ip); |
563 __ bge(&ok); | 552 __ bge(&ok); |
564 | 553 |
565 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | 554 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
566 | 555 |
567 __ bind(&ok); | 556 __ bind(&ok); |
568 GenerateTailCallToSharedCode(masm); | 557 GenerateTailCallToSharedCode(masm); |
569 } | 558 } |
570 | 559 |
571 | |
572 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 560 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
573 bool is_api_function, | 561 bool is_api_function, |
574 bool create_implicit_receiver, | 562 bool create_implicit_receiver, |
575 bool check_derived_construct) { | 563 bool check_derived_construct) { |
576 // ----------- S t a t e ------------- | 564 // ----------- S t a t e ------------- |
577 // -- r3 : number of arguments | 565 // -- r3 : number of arguments |
578 // -- r4 : constructor function | 566 // -- r4 : constructor function |
579 // -- r5 : allocation site or undefined | 567 // -- r5 : allocation site or undefined |
580 // -- r6 : new target | 568 // -- r6 : new target |
581 // -- cp : context | 569 // -- cp : context |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
718 | 706 |
719 __ SmiToPtrArrayOffset(r4, r4); | 707 __ SmiToPtrArrayOffset(r4, r4); |
720 __ add(sp, sp, r4); | 708 __ add(sp, sp, r4); |
721 __ addi(sp, sp, Operand(kPointerSize)); | 709 __ addi(sp, sp, Operand(kPointerSize)); |
722 if (create_implicit_receiver) { | 710 if (create_implicit_receiver) { |
723 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); | 711 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); |
724 } | 712 } |
725 __ blr(); | 713 __ blr(); |
726 } | 714 } |
727 | 715 |
728 | |
729 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | 716 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
730 Generate_JSConstructStubHelper(masm, false, true, false); | 717 Generate_JSConstructStubHelper(masm, false, true, false); |
731 } | 718 } |
732 | 719 |
733 | |
734 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | 720 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
735 Generate_JSConstructStubHelper(masm, true, false, false); | 721 Generate_JSConstructStubHelper(masm, true, false, false); |
736 } | 722 } |
737 | 723 |
738 | |
739 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | 724 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
740 Generate_JSConstructStubHelper(masm, false, false, false); | 725 Generate_JSConstructStubHelper(masm, false, false, false); |
741 } | 726 } |
742 | 727 |
743 | |
744 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | 728 void Builtins::Generate_JSBuiltinsConstructStubForDerived( |
745 MacroAssembler* masm) { | 729 MacroAssembler* masm) { |
746 Generate_JSConstructStubHelper(masm, false, false, true); | 730 Generate_JSConstructStubHelper(masm, false, false, true); |
747 } | 731 } |
748 | 732 |
749 // static | 733 // static |
750 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | 734 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { |
751 // ----------- S t a t e ------------- | 735 // ----------- S t a t e ------------- |
752 // -- r3 : the value to pass to the generator | 736 // -- r3 : the value to pass to the generator |
753 // -- r4 : the JSGeneratorObject to resume | 737 // -- r4 : the JSGeneratorObject to resume |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
917 } | 901 } |
918 __ b(&stepping_prepared); | 902 __ b(&stepping_prepared); |
919 } | 903 } |
920 | 904 |
921 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | 905 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
922 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 906 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
923 __ push(r4); | 907 __ push(r4); |
924 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | 908 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); |
925 } | 909 } |
926 | 910 |
927 | |
928 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 911 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
929 | 912 |
930 | |
931 // Clobbers r5; preserves all other registers. | 913 // Clobbers r5; preserves all other registers. |
932 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | 914 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
933 IsTagged argc_is_tagged) { | 915 IsTagged argc_is_tagged) { |
934 // Check the stack for overflow. We are not trying to catch | 916 // Check the stack for overflow. We are not trying to catch |
935 // interruptions (e.g. debug break and preemption) here, so the "real stack | 917 // interruptions (e.g. debug break and preemption) here, so the "real stack |
936 // limit" is checked. | 918 // limit" is checked. |
937 Label okay; | 919 Label okay; |
938 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 920 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
939 // Make r5 the space we have left. The stack might already be overflowed | 921 // Make r5 the space we have left. The stack might already be overflowed |
940 // here which will cause r5 to become negative. | 922 // here which will cause r5 to become negative. |
941 __ sub(r5, sp, r5); | 923 __ sub(r5, sp, r5); |
942 // Check if the arguments will overflow the stack. | 924 // Check if the arguments will overflow the stack. |
943 if (argc_is_tagged == kArgcIsSmiTagged) { | 925 if (argc_is_tagged == kArgcIsSmiTagged) { |
944 __ SmiToPtrArrayOffset(r0, argc); | 926 __ SmiToPtrArrayOffset(r0, argc); |
945 } else { | 927 } else { |
946 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 928 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
947 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); | 929 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); |
948 } | 930 } |
949 __ cmp(r5, r0); | 931 __ cmp(r5, r0); |
950 __ bgt(&okay); // Signed comparison. | 932 __ bgt(&okay); // Signed comparison. |
951 | 933 |
952 // Out of stack space. | 934 // Out of stack space. |
953 __ CallRuntime(Runtime::kThrowStackOverflow); | 935 __ CallRuntime(Runtime::kThrowStackOverflow); |
954 | 936 |
955 __ bind(&okay); | 937 __ bind(&okay); |
956 } | 938 } |
957 | 939 |
958 | |
959 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 940 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
960 bool is_construct) { | 941 bool is_construct) { |
961 // Called from Generate_JS_Entry | 942 // Called from Generate_JS_Entry |
962 // r3: new.target | 943 // r3: new.target |
963 // r4: function | 944 // r4: function |
964 // r5: receiver | 945 // r5: receiver |
965 // r6: argc | 946 // r6: argc |
966 // r7: argv | 947 // r7: argv |
967 // r0,r8-r9, cp may be clobbered | 948 // r0,r8-r9, cp may be clobbered |
968 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 949 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1024 __ Call(builtin, RelocInfo::CODE_TARGET); | 1005 __ Call(builtin, RelocInfo::CODE_TARGET); |
1025 | 1006 |
1026 // Exit the JS frame and remove the parameters (except function), and | 1007 // Exit the JS frame and remove the parameters (except function), and |
1027 // return. | 1008 // return. |
1028 } | 1009 } |
1029 __ blr(); | 1010 __ blr(); |
1030 | 1011 |
1031 // r3: result | 1012 // r3: result |
1032 } | 1013 } |
1033 | 1014 |
1034 | |
1035 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 1015 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
1036 Generate_JSEntryTrampolineHelper(masm, false); | 1016 Generate_JSEntryTrampolineHelper(masm, false); |
1037 } | 1017 } |
1038 | 1018 |
1039 | |
1040 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 1019 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
1041 Generate_JSEntryTrampolineHelper(masm, true); | 1020 Generate_JSEntryTrampolineHelper(masm, true); |
1042 } | 1021 } |
1043 | 1022 |
1044 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { | 1023 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { |
1045 Register args_count = scratch; | 1024 Register args_count = scratch; |
1046 | 1025 |
1047 // Get the arguments + receiver count. | 1026 // Get the arguments + receiver count. |
1048 __ LoadP(args_count, | 1027 __ LoadP(args_count, |
1049 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | 1028 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1282 Code::kHeaderSize - kHeapObjectTag)); | 1261 Code::kHeaderSize - kHeapObjectTag)); |
1283 __ mtlr(r0); | 1262 __ mtlr(r0); |
1284 | 1263 |
1285 // Initialize the dispatch table register. | 1264 // Initialize the dispatch table register. |
1286 __ mov(kInterpreterDispatchTableRegister, | 1265 __ mov(kInterpreterDispatchTableRegister, |
1287 Operand(ExternalReference::interpreter_dispatch_table_address( | 1266 Operand(ExternalReference::interpreter_dispatch_table_address( |
1288 masm->isolate()))); | 1267 masm->isolate()))); |
1289 | 1268 |
1290 // Get the bytecode array pointer from the frame. | 1269 // Get the bytecode array pointer from the frame. |
1291 __ LoadP(kInterpreterBytecodeArrayRegister, | 1270 __ LoadP(kInterpreterBytecodeArrayRegister, |
1292 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | 1271 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); |
1293 | 1272 |
1294 if (FLAG_debug_code) { | 1273 if (FLAG_debug_code) { |
1295 // Check function data field is actually a BytecodeArray object. | 1274 // Check function data field is actually a BytecodeArray object. |
1296 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); | 1275 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); |
1297 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 1276 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
1298 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg, | 1277 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg, |
1299 BYTECODE_ARRAY_TYPE); | 1278 BYTECODE_ARRAY_TYPE); |
1300 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | 1279 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); |
1301 } | 1280 } |
1302 | 1281 |
1303 // Get the target bytecode offset from the frame. | 1282 // Get the target bytecode offset from the frame. |
1304 __ LoadP(kInterpreterBytecodeOffsetRegister, | 1283 __ LoadP(kInterpreterBytecodeOffsetRegister, |
1305 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | 1284 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); |
1306 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | 1285 __ SmiUntag(kInterpreterBytecodeOffsetRegister); |
1307 | 1286 |
1308 // Dispatch to the target bytecode. | 1287 // Dispatch to the target bytecode. |
1309 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, | 1288 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, |
1310 kInterpreterBytecodeOffsetRegister)); | 1289 kInterpreterBytecodeOffsetRegister)); |
1311 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); | 1290 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); |
1312 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | 1291 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); |
1313 __ Jump(ip); | 1292 __ Jump(ip); |
1314 } | 1293 } |
1315 | 1294 |
1316 | |
1317 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 1295 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
1318 // ----------- S t a t e ------------- | 1296 // ----------- S t a t e ------------- |
1319 // -- r3 : argument count (preserved for callee) | 1297 // -- r3 : argument count (preserved for callee) |
1320 // -- r6 : new target (preserved for callee) | 1298 // -- r6 : new target (preserved for callee) |
1321 // -- r4 : target function (preserved for callee) | 1299 // -- r4 : target function (preserved for callee) |
1322 // ----------------------------------- | 1300 // ----------------------------------- |
1323 // First lookup code, maybe we don't need to compile! | 1301 // First lookup code, maybe we don't need to compile! |
1324 Label gotta_call_runtime; | 1302 Label gotta_call_runtime; |
1325 Label maybe_call_runtime; | 1303 Label maybe_call_runtime; |
1326 Label try_shared; | 1304 Label try_shared; |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1460 | 1438 |
1461 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | 1439 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { |
1462 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | 1440 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); |
1463 } | 1441 } |
1464 | 1442 |
1465 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 1443 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
1466 GenerateTailCallToReturnedCode(masm, | 1444 GenerateTailCallToReturnedCode(masm, |
1467 Runtime::kCompileOptimized_NotConcurrent); | 1445 Runtime::kCompileOptimized_NotConcurrent); |
1468 } | 1446 } |
1469 | 1447 |
1470 | |
1471 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 1448 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
1472 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | 1449 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
1473 } | 1450 } |
1474 | 1451 |
1475 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | 1452 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { |
1476 // ----------- S t a t e ------------- | 1453 // ----------- S t a t e ------------- |
1477 // -- r3 : argument count (preserved for callee) | 1454 // -- r3 : argument count (preserved for callee) |
1478 // -- r4 : new target (preserved for callee) | 1455 // -- r4 : new target (preserved for callee) |
1479 // -- r6 : target function (preserved for callee) | 1456 // -- r6 : target function (preserved for callee) |
1480 // ----------------------------------- | 1457 // ----------------------------------- |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1543 MacroAssembler* masm) { \ | 1520 MacroAssembler* masm) { \ |
1544 GenerateMakeCodeYoungAgainCommon(masm); \ | 1521 GenerateMakeCodeYoungAgainCommon(masm); \ |
1545 } \ | 1522 } \ |
1546 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | 1523 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
1547 MacroAssembler* masm) { \ | 1524 MacroAssembler* masm) { \ |
1548 GenerateMakeCodeYoungAgainCommon(masm); \ | 1525 GenerateMakeCodeYoungAgainCommon(masm); \ |
1549 } | 1526 } |
1550 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | 1527 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
1551 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | 1528 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
1552 | 1529 |
1553 | |
1554 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | 1530 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
1555 // For now, we are relying on the fact that make_code_young doesn't do any | 1531 // For now, we are relying on the fact that make_code_young doesn't do any |
1556 // garbage collection which allows us to save/restore the registers without | 1532 // garbage collection which allows us to save/restore the registers without |
1557 // worrying about which of them contain pointers. We also don't build an | 1533 // worrying about which of them contain pointers. We also don't build an |
1558 // internal frame to make the code faster, since we shouldn't have to do stack | 1534 // internal frame to make the code faster, since we shouldn't have to do stack |
1559 // crawls in MakeCodeYoung. This seems a bit fragile. | 1535 // crawls in MakeCodeYoung. This seems a bit fragile. |
1560 | 1536 |
1561 // Point r3 at the start of the PlatformCodeAge sequence. | 1537 // Point r3 at the start of the PlatformCodeAge sequence. |
1562 __ mr(r3, ip); | 1538 __ mr(r3, ip); |
1563 | 1539 |
(...skipping 16 matching lines...) Expand all Loading... |
1580 __ mr(ip, r3); | 1556 __ mr(ip, r3); |
1581 | 1557 |
1582 // Perform prologue operations usually performed by the young code stub. | 1558 // Perform prologue operations usually performed by the young code stub. |
1583 __ PushStandardFrame(r4); | 1559 __ PushStandardFrame(r4); |
1584 | 1560 |
1585 // Jump to point after the code-age stub. | 1561 // Jump to point after the code-age stub. |
1586 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength)); | 1562 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength)); |
1587 __ Jump(r3); | 1563 __ Jump(r3); |
1588 } | 1564 } |
1589 | 1565 |
1590 | |
1591 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | 1566 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
1592 GenerateMakeCodeYoungAgainCommon(masm); | 1567 GenerateMakeCodeYoungAgainCommon(masm); |
1593 } | 1568 } |
1594 | 1569 |
1595 | |
1596 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | 1570 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { |
1597 Generate_MarkCodeAsExecutedOnce(masm); | 1571 Generate_MarkCodeAsExecutedOnce(masm); |
1598 } | 1572 } |
1599 | 1573 |
1600 | |
1601 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 1574 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
1602 SaveFPRegsMode save_doubles) { | 1575 SaveFPRegsMode save_doubles) { |
1603 { | 1576 { |
1604 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1577 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1605 | 1578 |
1606 // Preserve registers across notification, this is important for compiled | 1579 // Preserve registers across notification, this is important for compiled |
1607 // stubs that tail call the runtime on deopts passing their parameters in | 1580 // stubs that tail call the runtime on deopts passing their parameters in |
1608 // registers. | 1581 // registers. |
1609 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 1582 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
1610 // Pass the function and deoptimization type to the runtime system. | 1583 // Pass the function and deoptimization type to the runtime system. |
1611 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | 1584 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); |
1612 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 1585 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
1613 } | 1586 } |
1614 | 1587 |
1615 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state | 1588 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state |
1616 __ blr(); // Jump to miss handler | 1589 __ blr(); // Jump to miss handler |
1617 } | 1590 } |
1618 | 1591 |
1619 | |
1620 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 1592 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
1621 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 1593 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
1622 } | 1594 } |
1623 | 1595 |
1624 | |
1625 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 1596 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
1626 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 1597 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
1627 } | 1598 } |
1628 | 1599 |
1629 | |
1630 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1600 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
1631 Deoptimizer::BailoutType type) { | 1601 Deoptimizer::BailoutType type) { |
1632 { | 1602 { |
1633 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1603 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1634 // Pass the function and deoptimization type to the runtime system. | 1604 // Pass the function and deoptimization type to the runtime system. |
1635 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); | 1605 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); |
1636 __ push(r3); | 1606 __ push(r3); |
1637 __ CallRuntime(Runtime::kNotifyDeoptimized); | 1607 __ CallRuntime(Runtime::kNotifyDeoptimized); |
1638 } | 1608 } |
1639 | 1609 |
(...skipping 16 matching lines...) Expand all Loading... |
1656 r9, | 1626 r9, |
1657 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER))); | 1627 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER))); |
1658 __ bne(&unknown_state); | 1628 __ bne(&unknown_state); |
1659 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. | 1629 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. |
1660 __ Ret(); | 1630 __ Ret(); |
1661 | 1631 |
1662 __ bind(&unknown_state); | 1632 __ bind(&unknown_state); |
1663 __ stop("no cases left"); | 1633 __ stop("no cases left"); |
1664 } | 1634 } |
1665 | 1635 |
1666 | |
1667 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 1636 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
1668 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 1637 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
1669 } | 1638 } |
1670 | 1639 |
1671 | |
1672 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | 1640 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
1673 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | 1641 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
1674 } | 1642 } |
1675 | 1643 |
1676 | |
1677 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 1644 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
1678 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1645 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
1679 } | 1646 } |
1680 | 1647 |
1681 | |
1682 // Clobbers registers {r7, r8, r9, r10}. | 1648 // Clobbers registers {r7, r8, r9, r10}. |
1683 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | 1649 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
1684 Register function_template_info, | 1650 Register function_template_info, |
1685 Label* receiver_check_failed) { | 1651 Label* receiver_check_failed) { |
1686 Register signature = r7; | 1652 Register signature = r7; |
1687 Register map = r8; | 1653 Register map = r8; |
1688 Register constructor = r9; | 1654 Register constructor = r9; |
1689 Register scratch = r10; | 1655 Register scratch = r10; |
1690 | 1656 |
1691 // If there is no signature, return the holder. | 1657 // If there is no signature, return the holder. |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1737 __ beq(receiver_check_failed, cr0); | 1703 __ beq(receiver_check_failed, cr0); |
1738 | 1704 |
1739 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | 1705 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); |
1740 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1706 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
1741 // Iterate. | 1707 // Iterate. |
1742 __ b(&prototype_loop_start); | 1708 __ b(&prototype_loop_start); |
1743 | 1709 |
1744 __ bind(&receiver_check_passed); | 1710 __ bind(&receiver_check_passed); |
1745 } | 1711 } |
1746 | 1712 |
1747 | |
1748 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | 1713 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
1749 // ----------- S t a t e ------------- | 1714 // ----------- S t a t e ------------- |
1750 // -- r3 : number of arguments excluding receiver | 1715 // -- r3 : number of arguments excluding receiver |
1751 // -- r4 : callee | 1716 // -- r4 : callee |
1752 // -- lr : return address | 1717 // -- lr : return address |
1753 // -- sp[0] : last argument | 1718 // -- sp[0] : last argument |
1754 // -- ... | 1719 // -- ... |
1755 // -- sp[4 * (argc - 1)] : first argument | 1720 // -- sp[4 * (argc - 1)] : first argument |
1756 // -- sp[4 * argc] : receiver | 1721 // -- sp[4 * argc] : receiver |
1757 // ----------------------------------- | 1722 // ----------------------------------- |
1758 | 1723 |
1759 | |
1760 // Load the FunctionTemplateInfo. | 1724 // Load the FunctionTemplateInfo. |
1761 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 1725 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
1762 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); | 1726 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); |
1763 | 1727 |
1764 // Do the compatible receiver check. | 1728 // Do the compatible receiver check. |
1765 Label receiver_check_failed; | 1729 Label receiver_check_failed; |
1766 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2)); | 1730 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2)); |
1767 __ LoadPX(r5, MemOperand(sp, r11)); | 1731 __ LoadPX(r5, MemOperand(sp, r11)); |
1768 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed); | 1732 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed); |
1769 | 1733 |
1770 // Get the callback offset from the FunctionTemplateInfo, and jump to the | 1734 // Get the callback offset from the FunctionTemplateInfo, and jump to the |
1771 // beginning of the code. | 1735 // beginning of the code. |
1772 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset)); | 1736 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset)); |
1773 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset)); | 1737 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset)); |
1774 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1738 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1775 __ JumpToJSEntry(ip); | 1739 __ JumpToJSEntry(ip); |
1776 | 1740 |
1777 // Compatible receiver check failed: throw an Illegal Invocation exception. | 1741 // Compatible receiver check failed: throw an Illegal Invocation exception. |
1778 __ bind(&receiver_check_failed); | 1742 __ bind(&receiver_check_failed); |
1779 // Drop the arguments (including the receiver); | 1743 // Drop the arguments (including the receiver); |
1780 __ addi(r11, r11, Operand(kPointerSize)); | 1744 __ addi(r11, r11, Operand(kPointerSize)); |
1781 __ add(sp, sp, r11); | 1745 __ add(sp, sp, r11); |
1782 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | 1746 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
1783 } | 1747 } |
1784 | 1748 |
1785 | |
1786 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1749 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
1787 // Lookup the function in the JavaScript frame. | 1750 // Lookup the function in the JavaScript frame. |
1788 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1751 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1789 { | 1752 { |
1790 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 1753 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1791 // Pass function as argument. | 1754 // Pass function as argument. |
1792 __ push(r3); | 1755 __ push(r3); |
1793 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | 1756 __ CallRuntime(Runtime::kCompileForOnStackReplacement); |
1794 } | 1757 } |
1795 | 1758 |
(...skipping 26 matching lines...) Expand all Loading... |
1822 | 1785 |
1823 // Compute the target address = code start + osr_offset | 1786 // Compute the target address = code start + osr_offset |
1824 __ add(r0, r3, r4); | 1787 __ add(r0, r3, r4); |
1825 | 1788 |
1826 // And "return" to the OSR entry point of the function. | 1789 // And "return" to the OSR entry point of the function. |
1827 __ mtlr(r0); | 1790 __ mtlr(r0); |
1828 __ blr(); | 1791 __ blr(); |
1829 } | 1792 } |
1830 } | 1793 } |
1831 | 1794 |
1832 | |
1833 // static | 1795 // static |
1834 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | 1796 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, |
1835 int field_index) { | 1797 int field_index) { |
1836 // ----------- S t a t e ------------- | 1798 // ----------- S t a t e ------------- |
1837 // -- r3 : number of arguments | 1799 // -- r3 : number of arguments |
1838 // -- r4 : function | 1800 // -- r4 : function |
1839 // -- cp : context | 1801 // -- cp : context |
1840 // -- lr : return address | 1802 // -- lr : return address |
1841 // -- sp[0] : receiver | 1803 // -- sp[0] : receiver |
1842 // ----------------------------------- | 1804 // ----------------------------------- |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1950 } | 1912 } |
1951 | 1913 |
1952 // 4c. The receiver is not callable, throw an appropriate TypeError. | 1914 // 4c. The receiver is not callable, throw an appropriate TypeError. |
1953 __ bind(&receiver_not_callable); | 1915 __ bind(&receiver_not_callable); |
1954 { | 1916 { |
1955 __ StoreP(r4, MemOperand(sp, 0)); | 1917 __ StoreP(r4, MemOperand(sp, 0)); |
1956 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | 1918 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
1957 } | 1919 } |
1958 } | 1920 } |
1959 | 1921 |
1960 | |
1961 // static | 1922 // static |
1962 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | 1923 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
1963 // 1. Make sure we have at least one argument. | 1924 // 1. Make sure we have at least one argument. |
1964 // r3: actual number of arguments | 1925 // r3: actual number of arguments |
1965 { | 1926 { |
1966 Label done; | 1927 Label done; |
1967 __ cmpi(r3, Operand::Zero()); | 1928 __ cmpi(r3, Operand::Zero()); |
1968 __ bne(&done); | 1929 __ bne(&done); |
1969 __ PushRoot(Heap::kUndefinedValueRootIndex); | 1930 __ PushRoot(Heap::kUndefinedValueRootIndex); |
1970 __ addi(r3, r3, Operand(1)); | 1931 __ addi(r3, r3, Operand(1)); |
1971 __ bind(&done); | 1932 __ bind(&done); |
1972 } | 1933 } |
1973 | 1934 |
1974 // 2. Get the callable to call (passed as receiver) from the stack. | 1935 // 2. Get the callable to call (passed as receiver) from the stack. |
1975 // r3: actual number of arguments | 1936 // r3: actual number of arguments |
1976 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); | 1937 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); |
1977 __ LoadPX(r4, MemOperand(sp, r5)); | 1938 __ LoadPX(r4, MemOperand(sp, r5)); |
1978 | 1939 |
1979 // 3. Shift arguments and return address one slot down on the stack | 1940 // 3. Shift arguments and return address one slot down on the stack |
1980 // (overwriting the original receiver). Adjust argument count to make | 1941 // (overwriting the original receiver). Adjust argument count to make |
1981 // the original first argument the new receiver. | 1942 // the original first argument the new receiver. |
1982 // r3: actual number of arguments | 1943 // r3: actual number of arguments |
1983 // r4: callable | 1944 // r4: callable |
1984 { | 1945 { |
1985 Label loop; | 1946 Label loop; |
1986 // Calculate the copy start address (destination). Copy end address is sp. | 1947 // Calculate the copy start address (destination). Copy end address is sp. |
1987 __ add(r5, sp, r5); | 1948 __ add(r5, sp, r5); |
1988 | 1949 |
1989 | |
1990 __ mtctr(r3); | 1950 __ mtctr(r3); |
1991 __ bind(&loop); | 1951 __ bind(&loop); |
1992 __ LoadP(ip, MemOperand(r5, -kPointerSize)); | 1952 __ LoadP(ip, MemOperand(r5, -kPointerSize)); |
1993 __ StoreP(ip, MemOperand(r5)); | 1953 __ StoreP(ip, MemOperand(r5)); |
1994 __ subi(r5, r5, Operand(kPointerSize)); | 1954 __ subi(r5, r5, Operand(kPointerSize)); |
1995 __ bdnz(&loop); | 1955 __ bdnz(&loop); |
1996 // Adjust the actual number of arguments and remove the top element | 1956 // Adjust the actual number of arguments and remove the top element |
1997 // (which is a copy of the last argument). | 1957 // (which is a copy of the last argument). |
1998 __ subi(r3, r3, Operand(1)); | 1958 __ subi(r3, r3, Operand(1)); |
1999 __ pop(); | 1959 __ pop(); |
2000 } | 1960 } |
2001 | 1961 |
2002 // 4. Call the callable. | 1962 // 4. Call the callable. |
2003 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1963 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
2004 } | 1964 } |
2005 | 1965 |
2006 | |
2007 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | 1966 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
2008 // ----------- S t a t e ------------- | 1967 // ----------- S t a t e ------------- |
2009 // -- r3 : argc | 1968 // -- r3 : argc |
2010 // -- sp[0] : argumentsList | 1969 // -- sp[0] : argumentsList |
2011 // -- sp[4] : thisArgument | 1970 // -- sp[4] : thisArgument |
2012 // -- sp[8] : target | 1971 // -- sp[8] : target |
2013 // -- sp[12] : receiver | 1972 // -- sp[12] : receiver |
2014 // ----------------------------------- | 1973 // ----------------------------------- |
2015 | 1974 |
2016 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | 1975 // 1. Load target into r4 (if present), argumentsList into r3 (if present), |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2059 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 2018 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
2060 | 2019 |
2061 // 3b. The target is not callable, throw an appropriate TypeError. | 2020 // 3b. The target is not callable, throw an appropriate TypeError. |
2062 __ bind(&target_not_callable); | 2021 __ bind(&target_not_callable); |
2063 { | 2022 { |
2064 __ StoreP(r4, MemOperand(sp, 0)); | 2023 __ StoreP(r4, MemOperand(sp, 0)); |
2065 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | 2024 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
2066 } | 2025 } |
2067 } | 2026 } |
2068 | 2027 |
2069 | |
2070 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 2028 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
2071 // ----------- S t a t e ------------- | 2029 // ----------- S t a t e ------------- |
2072 // -- r3 : argc | 2030 // -- r3 : argc |
2073 // -- sp[0] : new.target (optional) | 2031 // -- sp[0] : new.target (optional) |
2074 // -- sp[4] : argumentsList | 2032 // -- sp[4] : argumentsList |
2075 // -- sp[8] : target | 2033 // -- sp[8] : target |
2076 // -- sp[12] : receiver | 2034 // -- sp[12] : receiver |
2077 // ----------------------------------- | 2035 // ----------------------------------- |
2078 | 2036 |
2079 // 1. Load target into r4 (if present), argumentsList into r3 (if present), | 2037 // 1. Load target into r4 (if present), argumentsList into r3 (if present), |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2137 } | 2095 } |
2138 | 2096 |
2139 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | 2097 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
2140 __ bind(&new_target_not_constructor); | 2098 __ bind(&new_target_not_constructor); |
2141 { | 2099 { |
2142 __ StoreP(r6, MemOperand(sp, 0)); | 2100 __ StoreP(r6, MemOperand(sp, 0)); |
2143 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | 2101 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
2144 } | 2102 } |
2145 } | 2103 } |
2146 | 2104 |
2147 | |
2148 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 2105 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
2149 Label* stack_overflow) { | 2106 Label* stack_overflow) { |
2150 // ----------- S t a t e ------------- | 2107 // ----------- S t a t e ------------- |
2151 // -- r3 : actual number of arguments | 2108 // -- r3 : actual number of arguments |
2152 // -- r4 : function (passed through to callee) | 2109 // -- r4 : function (passed through to callee) |
2153 // -- r5 : expected number of arguments | 2110 // -- r5 : expected number of arguments |
2154 // -- r6 : new target (passed through to callee) | 2111 // -- r6 : new target (passed through to callee) |
2155 // ----------------------------------- | 2112 // ----------------------------------- |
2156 // Check the stack for overflow. We are not trying to catch | 2113 // Check the stack for overflow. We are not trying to catch |
2157 // interruptions (e.g. debug break and preemption) here, so the "real stack | 2114 // interruptions (e.g. debug break and preemption) here, so the "real stack |
2158 // limit" is checked. | 2115 // limit" is checked. |
2159 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); | 2116 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); |
2160 // Make r8 the space we have left. The stack might already be overflowed | 2117 // Make r8 the space we have left. The stack might already be overflowed |
2161 // here which will cause r8 to become negative. | 2118 // here which will cause r8 to become negative. |
2162 __ sub(r8, sp, r8); | 2119 __ sub(r8, sp, r8); |
2163 // Check if the arguments will overflow the stack. | 2120 // Check if the arguments will overflow the stack. |
2164 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); | 2121 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); |
2165 __ cmp(r8, r0); | 2122 __ cmp(r8, r0); |
2166 __ ble(stack_overflow); // Signed comparison. | 2123 __ ble(stack_overflow); // Signed comparison. |
2167 } | 2124 } |
2168 | 2125 |
2169 | |
2170 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 2126 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
2171 __ SmiTag(r3); | 2127 __ SmiTag(r3); |
2172 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2128 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2173 __ mflr(r0); | 2129 __ mflr(r0); |
2174 __ push(r0); | 2130 __ push(r0); |
2175 if (FLAG_enable_embedded_constant_pool) { | 2131 if (FLAG_enable_embedded_constant_pool) { |
2176 __ Push(fp, kConstantPoolRegister, r7, r4, r3); | 2132 __ Push(fp, kConstantPoolRegister, r7, r4, r3); |
2177 } else { | 2133 } else { |
2178 __ Push(fp, r7, r4, r3); | 2134 __ Push(fp, r7, r4, r3); |
2179 } | 2135 } |
2180 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | 2136 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
2181 kPointerSize)); | 2137 kPointerSize)); |
2182 } | 2138 } |
2183 | 2139 |
2184 | |
2185 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 2140 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
2186 // ----------- S t a t e ------------- | 2141 // ----------- S t a t e ------------- |
2187 // -- r3 : result being passed through | 2142 // -- r3 : result being passed through |
2188 // ----------------------------------- | 2143 // ----------------------------------- |
2189 // Get the number of arguments passed (as a smi), tear down the frame and | 2144 // Get the number of arguments passed (as a smi), tear down the frame and |
2190 // then tear down the parameters. | 2145 // then tear down the parameters. |
2191 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 2146 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
2192 kPointerSize))); | 2147 kPointerSize))); |
2193 int stack_adjustment = kPointerSize; // adjust for receiver | 2148 int stack_adjustment = kPointerSize; // adjust for receiver |
2194 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); | 2149 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); |
2195 __ SmiToPtrArrayOffset(r0, r4); | 2150 __ SmiToPtrArrayOffset(r0, r4); |
2196 __ add(sp, sp, r0); | 2151 __ add(sp, sp, r0); |
2197 } | 2152 } |
2198 | 2153 |
2199 | |
2200 // static | 2154 // static |
2201 void Builtins::Generate_Apply(MacroAssembler* masm) { | 2155 void Builtins::Generate_Apply(MacroAssembler* masm) { |
2202 // ----------- S t a t e ------------- | 2156 // ----------- S t a t e ------------- |
2203 // -- r3 : argumentsList | 2157 // -- r3 : argumentsList |
2204 // -- r4 : target | 2158 // -- r4 : target |
2205 // -- r6 : new.target (checked to be constructor or undefined) | 2159 // -- r6 : new.target (checked to be constructor or undefined) |
2206 // -- sp[0] : thisArgument | 2160 // -- sp[0] : thisArgument |
2207 // ----------------------------------- | 2161 // ----------------------------------- |
2208 | 2162 |
2209 // Create the list of arguments from the array-like argumentsList. | 2163 // Create the list of arguments from the array-like argumentsList. |
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2519 | 2473 |
2520 // The function is a "classConstructor", need to raise an exception. | 2474 // The function is a "classConstructor", need to raise an exception. |
2521 __ bind(&class_constructor); | 2475 __ bind(&class_constructor); |
2522 { | 2476 { |
2523 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); | 2477 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); |
2524 __ push(r4); | 2478 __ push(r4); |
2525 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | 2479 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); |
2526 } | 2480 } |
2527 } | 2481 } |
2528 | 2482 |
2529 | |
2530 namespace { | 2483 namespace { |
2531 | 2484 |
2532 void Generate_PushBoundArguments(MacroAssembler* masm) { | 2485 void Generate_PushBoundArguments(MacroAssembler* masm) { |
2533 // ----------- S t a t e ------------- | 2486 // ----------- S t a t e ------------- |
2534 // -- r3 : the number of arguments (not including the receiver) | 2487 // -- r3 : the number of arguments (not including the receiver) |
2535 // -- r4 : target (checked to be a JSBoundFunction) | 2488 // -- r4 : target (checked to be a JSBoundFunction) |
2536 // -- r6 : new.target (only in case of [[Construct]]) | 2489 // -- r6 : new.target (only in case of [[Construct]]) |
2537 // ----------------------------------- | 2490 // ----------------------------------- |
2538 | 2491 |
2539 // Load [[BoundArguments]] into r5 and length of that into r7. | 2492 // Load [[BoundArguments]] into r5 and length of that into r7. |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2602 __ addi(r8, r8, Operand(kPointerSize)); | 2555 __ addi(r8, r8, Operand(kPointerSize)); |
2603 __ bdnz(&loop); | 2556 __ bdnz(&loop); |
2604 __ add(r3, r3, r7); | 2557 __ add(r3, r3, r7); |
2605 } | 2558 } |
2606 } | 2559 } |
2607 __ bind(&no_bound_arguments); | 2560 __ bind(&no_bound_arguments); |
2608 } | 2561 } |
2609 | 2562 |
2610 } // namespace | 2563 } // namespace |
2611 | 2564 |
2612 | |
2613 // static | 2565 // static |
2614 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | 2566 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
2615 TailCallMode tail_call_mode) { | 2567 TailCallMode tail_call_mode) { |
2616 // ----------- S t a t e ------------- | 2568 // ----------- S t a t e ------------- |
2617 // -- r3 : the number of arguments (not including the receiver) | 2569 // -- r3 : the number of arguments (not including the receiver) |
2618 // -- r4 : the function to call (checked to be a JSBoundFunction) | 2570 // -- r4 : the function to call (checked to be a JSBoundFunction) |
2619 // ----------------------------------- | 2571 // ----------------------------------- |
2620 __ AssertBoundFunction(r4); | 2572 __ AssertBoundFunction(r4); |
2621 | 2573 |
2622 if (tail_call_mode == TailCallMode::kAllow) { | 2574 if (tail_call_mode == TailCallMode::kAllow) { |
(...skipping 11 matching lines...) Expand all Loading... |
2634 // Call the [[BoundTargetFunction]] via the Call builtin. | 2586 // Call the [[BoundTargetFunction]] via the Call builtin. |
2635 __ LoadP(r4, | 2587 __ LoadP(r4, |
2636 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2588 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); |
2637 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | 2589 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, |
2638 masm->isolate()))); | 2590 masm->isolate()))); |
2639 __ LoadP(ip, MemOperand(ip)); | 2591 __ LoadP(ip, MemOperand(ip)); |
2640 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2592 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
2641 __ JumpToJSEntry(ip); | 2593 __ JumpToJSEntry(ip); |
2642 } | 2594 } |
2643 | 2595 |
2644 | |
2645 // static | 2596 // static |
2646 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | 2597 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
2647 TailCallMode tail_call_mode) { | 2598 TailCallMode tail_call_mode) { |
2648 // ----------- S t a t e ------------- | 2599 // ----------- S t a t e ------------- |
2649 // -- r3 : the number of arguments (not including the receiver) | 2600 // -- r3 : the number of arguments (not including the receiver) |
2650 // -- r4 : the target to call (can be any Object). | 2601 // -- r4 : the target to call (can be any Object). |
2651 // ----------------------------------- | 2602 // ----------------------------------- |
2652 | 2603 |
2653 Label non_callable, non_function, non_smi; | 2604 Label non_callable, non_function, non_smi; |
2654 __ JumpIfSmi(r4, &non_callable); | 2605 __ JumpIfSmi(r4, &non_callable); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2696 | 2647 |
2697 // 3. Call to something that is not callable. | 2648 // 3. Call to something that is not callable. |
2698 __ bind(&non_callable); | 2649 __ bind(&non_callable); |
2699 { | 2650 { |
2700 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2651 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2701 __ Push(r4); | 2652 __ Push(r4); |
2702 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2653 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2703 } | 2654 } |
2704 } | 2655 } |
2705 | 2656 |
2706 | |
2707 // static | 2657 // static |
2708 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2658 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2709 // ----------- S t a t e ------------- | 2659 // ----------- S t a t e ------------- |
2710 // -- r3 : the number of arguments (not including the receiver) | 2660 // -- r3 : the number of arguments (not including the receiver) |
2711 // -- r4 : the constructor to call (checked to be a JSFunction) | 2661 // -- r4 : the constructor to call (checked to be a JSFunction) |
2712 // -- r6 : the new target (checked to be a constructor) | 2662 // -- r6 : the new target (checked to be a constructor) |
2713 // ----------------------------------- | 2663 // ----------------------------------- |
2714 __ AssertFunction(r4); | 2664 __ AssertFunction(r4); |
2715 | 2665 |
2716 // Calling convention for function specific ConstructStubs require | 2666 // Calling convention for function specific ConstructStubs require |
2717 // r5 to contain either an AllocationSite or undefined. | 2667 // r5 to contain either an AllocationSite or undefined. |
2718 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 2668 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
2719 | 2669 |
2720 // Tail call to the function-specific construct stub (still in the caller | 2670 // Tail call to the function-specific construct stub (still in the caller |
2721 // context at this point). | 2671 // context at this point). |
2722 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2672 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
2723 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); | 2673 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); |
2724 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2674 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); |
2725 __ JumpToJSEntry(ip); | 2675 __ JumpToJSEntry(ip); |
2726 } | 2676 } |
2727 | 2677 |
2728 | |
2729 // static | 2678 // static |
2730 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | 2679 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
2731 // ----------- S t a t e ------------- | 2680 // ----------- S t a t e ------------- |
2732 // -- r3 : the number of arguments (not including the receiver) | 2681 // -- r3 : the number of arguments (not including the receiver) |
2733 // -- r4 : the function to call (checked to be a JSBoundFunction) | 2682 // -- r4 : the function to call (checked to be a JSBoundFunction) |
2734 // -- r6 : the new target (checked to be a constructor) | 2683 // -- r6 : the new target (checked to be a constructor) |
2735 // ----------------------------------- | 2684 // ----------------------------------- |
2736 __ AssertBoundFunction(r4); | 2685 __ AssertBoundFunction(r4); |
2737 | 2686 |
2738 // Push the [[BoundArguments]] onto the stack. | 2687 // Push the [[BoundArguments]] onto the stack. |
2739 Generate_PushBoundArguments(masm); | 2688 Generate_PushBoundArguments(masm); |
2740 | 2689 |
2741 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | 2690 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. |
2742 Label skip; | 2691 Label skip; |
2743 __ cmp(r4, r6); | 2692 __ cmp(r4, r6); |
2744 __ bne(&skip); | 2693 __ bne(&skip); |
2745 __ LoadP(r6, | 2694 __ LoadP(r6, |
2746 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2695 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); |
2747 __ bind(&skip); | 2696 __ bind(&skip); |
2748 | 2697 |
2749 // Construct the [[BoundTargetFunction]] via the Construct builtin. | 2698 // Construct the [[BoundTargetFunction]] via the Construct builtin. |
2750 __ LoadP(r4, | 2699 __ LoadP(r4, |
2751 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); | 2700 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset)); |
2752 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | 2701 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); |
2753 __ LoadP(ip, MemOperand(ip)); | 2702 __ LoadP(ip, MemOperand(ip)); |
2754 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | 2703 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
2755 __ JumpToJSEntry(ip); | 2704 __ JumpToJSEntry(ip); |
2756 } | 2705 } |
2757 | 2706 |
2758 | |
2759 // static | 2707 // static |
2760 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | 2708 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
2761 // ----------- S t a t e ------------- | 2709 // ----------- S t a t e ------------- |
2762 // -- r3 : the number of arguments (not including the receiver) | 2710 // -- r3 : the number of arguments (not including the receiver) |
2763 // -- r4 : the constructor to call (checked to be a JSProxy) | 2711 // -- r4 : the constructor to call (checked to be a JSProxy) |
2764 // -- r6 : the new target (either the same as the constructor or | 2712 // -- r6 : the new target (either the same as the constructor or |
2765 // the JSFunction on which new was invoked initially) | 2713 // the JSFunction on which new was invoked initially) |
2766 // ----------------------------------- | 2714 // ----------------------------------- |
2767 | 2715 |
2768 // Call into the Runtime for Proxy [[Construct]]. | 2716 // Call into the Runtime for Proxy [[Construct]]. |
2769 __ Push(r4, r6); | 2717 __ Push(r4, r6); |
2770 // Include the pushed new_target, constructor and the receiver. | 2718 // Include the pushed new_target, constructor and the receiver. |
2771 __ addi(r3, r3, Operand(3)); | 2719 __ addi(r3, r3, Operand(3)); |
2772 // Tail-call to the runtime. | 2720 // Tail-call to the runtime. |
2773 __ JumpToExternalReference( | 2721 __ JumpToExternalReference( |
2774 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | 2722 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); |
2775 } | 2723 } |
2776 | 2724 |
2777 | |
2778 // static | 2725 // static |
2779 void Builtins::Generate_Construct(MacroAssembler* masm) { | 2726 void Builtins::Generate_Construct(MacroAssembler* masm) { |
2780 // ----------- S t a t e ------------- | 2727 // ----------- S t a t e ------------- |
2781 // -- r3 : the number of arguments (not including the receiver) | 2728 // -- r3 : the number of arguments (not including the receiver) |
2782 // -- r4 : the constructor to call (can be any Object) | 2729 // -- r4 : the constructor to call (can be any Object) |
2783 // -- r6 : the new target (either the same as the constructor or | 2730 // -- r6 : the new target (either the same as the constructor or |
2784 // the JSFunction on which new was invoked initially) | 2731 // the JSFunction on which new was invoked initially) |
2785 // ----------------------------------- | 2732 // ----------------------------------- |
2786 | 2733 |
2787 // Check if target is a Smi. | 2734 // Check if target is a Smi. |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3033 // r6 : new target (passed through to callee) | 2980 // r6 : new target (passed through to callee) |
3034 __ CallJSEntry(ip); | 2981 __ CallJSEntry(ip); |
3035 | 2982 |
3036 // Store offset of return address for deoptimizer. | 2983 // Store offset of return address for deoptimizer. |
3037 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | 2984 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); |
3038 | 2985 |
3039 // Exit frame and return. | 2986 // Exit frame and return. |
3040 LeaveArgumentsAdaptorFrame(masm); | 2987 LeaveArgumentsAdaptorFrame(masm); |
3041 __ blr(); | 2988 __ blr(); |
3042 | 2989 |
3043 | |
3044 // ------------------------------------------- | 2990 // ------------------------------------------- |
3045 // Dont adapt arguments. | 2991 // Dont adapt arguments. |
3046 // ------------------------------------------- | 2992 // ------------------------------------------- |
3047 __ bind(&dont_adapt_arguments); | 2993 __ bind(&dont_adapt_arguments); |
3048 __ JumpToJSEntry(ip); | 2994 __ JumpToJSEntry(ip); |
3049 | 2995 |
3050 __ bind(&stack_overflow); | 2996 __ bind(&stack_overflow); |
3051 { | 2997 { |
3052 FrameScope frame(masm, StackFrame::MANUAL); | 2998 FrameScope frame(masm, StackFrame::MANUAL); |
3053 __ CallRuntime(Runtime::kThrowStackOverflow); | 2999 __ CallRuntime(Runtime::kThrowStackOverflow); |
3054 __ bkpt(0); | 3000 __ bkpt(0); |
3055 } | 3001 } |
3056 } | 3002 } |
3057 | 3003 |
3058 | |
3059 #undef __ | 3004 #undef __ |
3060 } // namespace internal | 3005 } // namespace internal |
3061 } // namespace v8 | 3006 } // namespace v8 |
3062 | 3007 |
3063 #endif // V8_TARGET_ARCH_PPC | 3008 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |