Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/builtins/mips/builtins-mips.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/builtins/mips/OWNERS ('k') | src/builtins/mips64/OWNERS » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h" 11 #include "src/runtime/runtime.h"
12 12
13
14 namespace v8 { 13 namespace v8 {
15 namespace internal { 14 namespace internal {
16 15
17
18 #define __ ACCESS_MASM(masm) 16 #define __ ACCESS_MASM(masm)
19 17
20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
21 ExitFrameType exit_frame_type) { 19 ExitFrameType exit_frame_type) {
22 // ----------- S t a t e ------------- 20 // ----------- S t a t e -------------
23 // -- a0 : number of arguments excluding receiver 21 // -- a0 : number of arguments excluding receiver
24 // -- a1 : target 22 // -- a1 : target
25 // -- a3 : new.target 23 // -- a3 : new.target
26 // -- sp[0] : last argument 24 // -- sp[0] : last argument
27 // -- ... 25 // -- ...
(...skipping 15 matching lines...) Expand all
43 41
44 // Insert extra arguments. 42 // Insert extra arguments.
45 __ SmiTag(a0); 43 __ SmiTag(a0);
46 __ Push(a0, a1, a3); 44 __ Push(a0, a1, a3);
47 __ SmiUntag(a0); 45 __ SmiUntag(a0);
48 46
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), PROTECT, 47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), PROTECT,
50 exit_frame_type == BUILTIN_EXIT); 48 exit_frame_type == BUILTIN_EXIT);
51 } 49 }
52 50
53
54 // Load the built-in InternalArray function from the current context. 51 // Load the built-in InternalArray function from the current context.
55 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, 52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
56 Register result) { 53 Register result) {
57 // Load the InternalArray function from the native context. 54 // Load the InternalArray function from the native context.
58 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
59 } 56 }
60 57
61
62 // Load the built-in Array function from the current context. 58 // Load the built-in Array function from the current context.
63 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { 59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
64 // Load the Array function from the native context. 60 // Load the Array function from the native context.
65 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
66 } 62 }
67 63
68
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
70 // ----------- S t a t e ------------- 65 // ----------- S t a t e -------------
71 // -- a0 : number of arguments 66 // -- a0 : number of arguments
72 // -- ra : return address 67 // -- ra : return address
73 // -- sp[...]: constructor arguments 68 // -- sp[...]: constructor arguments
74 // ----------------------------------- 69 // -----------------------------------
75 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
76 71
77 // Get the InternalArray function. 72 // Get the InternalArray function.
78 GenerateLoadInternalArrayFunction(masm, a1); 73 GenerateLoadInternalArrayFunction(masm, a1);
79 74
80 if (FLAG_debug_code) { 75 if (FLAG_debug_code) {
81 // Initial map for the builtin InternalArray functions should be maps. 76 // Initial map for the builtin InternalArray functions should be maps.
82 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 77 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
83 __ SmiTst(a2, t0); 78 __ SmiTst(a2, t0);
84 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, 79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0,
85 t0, Operand(zero_reg)); 80 Operand(zero_reg));
86 __ GetObjectType(a2, a3, t0); 81 __ GetObjectType(a2, a3, t0);
87 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, 82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0,
88 t0, Operand(MAP_TYPE)); 83 Operand(MAP_TYPE));
89 } 84 }
90 85
91 // Run the native code for the InternalArray function called as a normal 86 // Run the native code for the InternalArray function called as a normal
92 // function. 87 // function.
93 // Tail call a stub. 88 // Tail call a stub.
94 InternalArrayConstructorStub stub(masm->isolate()); 89 InternalArrayConstructorStub stub(masm->isolate());
95 __ TailCallStub(&stub); 90 __ TailCallStub(&stub);
96 } 91 }
97 92
98
99 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
100 // ----------- S t a t e ------------- 94 // ----------- S t a t e -------------
101 // -- a0 : number of arguments 95 // -- a0 : number of arguments
102 // -- ra : return address 96 // -- ra : return address
103 // -- sp[...]: constructor arguments 97 // -- sp[...]: constructor arguments
104 // ----------------------------------- 98 // -----------------------------------
105 Label generic_array_code; 99 Label generic_array_code;
106 100
107 // Get the Array function. 101 // Get the Array function.
108 GenerateLoadArrayFunction(masm, a1); 102 GenerateLoadArrayFunction(masm, a1);
109 103
110 if (FLAG_debug_code) { 104 if (FLAG_debug_code) {
111 // Initial map for the builtin Array functions should be maps. 105 // Initial map for the builtin Array functions should be maps.
112 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 106 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
113 __ SmiTst(a2, t0); 107 __ SmiTst(a2, t0);
114 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, 108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0,
115 t0, Operand(zero_reg)); 109 Operand(zero_reg));
116 __ GetObjectType(a2, a3, t0); 110 __ GetObjectType(a2, a3, t0);
117 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, 111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0,
118 t0, Operand(MAP_TYPE)); 112 Operand(MAP_TYPE));
119 } 113 }
120 114
121 // Run the native code for the Array function called as a normal function. 115 // Run the native code for the Array function called as a normal function.
122 // Tail call a stub. 116 // Tail call a stub.
123 __ mov(a3, a1); 117 __ mov(a3, a1);
124 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 118 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
125 ArrayConstructorStub stub(masm->isolate()); 119 ArrayConstructorStub stub(masm->isolate());
126 __ TailCallStub(&stub); 120 __ TailCallStub(&stub);
127 } 121 }
128 122
129
130 // static 123 // static
131 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { 124 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
132 // ----------- S t a t e ------------- 125 // ----------- S t a t e -------------
133 // -- a0 : number of arguments 126 // -- a0 : number of arguments
134 // -- a1 : function 127 // -- a1 : function
135 // -- cp : context 128 // -- cp : context
136 // -- ra : return address 129 // -- ra : return address
137 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 130 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
138 // -- sp[argc * 4] : receiver 131 // -- sp[argc * 4] : receiver
139 // ----------------------------------- 132 // -----------------------------------
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
270 __ Lsa(sp, sp, t0, kPointerSizeLog2); 263 __ Lsa(sp, sp, t0, kPointerSizeLog2);
271 __ DropAndRet(1); 264 __ DropAndRet(1);
272 } 265 }
273 266
274 // 2b. No arguments, return +0. 267 // 2b. No arguments, return +0.
275 __ bind(&no_arguments); 268 __ bind(&no_arguments);
276 __ Move(v0, Smi::FromInt(0)); 269 __ Move(v0, Smi::FromInt(0));
277 __ DropAndRet(1); 270 __ DropAndRet(1);
278 } 271 }
279 272
280
281 // static 273 // static
282 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { 274 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
283 // ----------- S t a t e ------------- 275 // ----------- S t a t e -------------
284 // -- a0 : number of arguments 276 // -- a0 : number of arguments
285 // -- a1 : constructor function 277 // -- a1 : constructor function
286 // -- a3 : new target 278 // -- a3 : new target
287 // -- cp : context 279 // -- cp : context
288 // -- ra : return address 280 // -- ra : return address
289 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 281 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
290 // -- sp[argc * 4] : receiver 282 // -- sp[argc * 4] : receiver
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
350 } 342 }
351 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); 343 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
352 344
353 __ bind(&drop_frame_and_ret); 345 __ bind(&drop_frame_and_ret);
354 { 346 {
355 __ Lsa(sp, sp, t0, kPointerSizeLog2); 347 __ Lsa(sp, sp, t0, kPointerSizeLog2);
356 __ DropAndRet(1); 348 __ DropAndRet(1);
357 } 349 }
358 } 350 }
359 351
360
361 // static 352 // static
362 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 353 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
363 // ----------- S t a t e ------------- 354 // ----------- S t a t e -------------
364 // -- a0 : number of arguments 355 // -- a0 : number of arguments
365 // -- a1 : constructor function 356 // -- a1 : constructor function
366 // -- cp : context 357 // -- cp : context
367 // -- ra : return address 358 // -- ra : return address
368 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 359 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
369 // -- sp[argc * 4] : receiver 360 // -- sp[argc * 4] : receiver
370 // ----------------------------------- 361 // -----------------------------------
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
422 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); 413 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
423 } 414 }
424 415
425 __ bind(&drop_frame_and_ret); 416 __ bind(&drop_frame_and_ret);
426 { 417 {
427 __ Lsa(sp, sp, t0, kPointerSizeLog2); 418 __ Lsa(sp, sp, t0, kPointerSizeLog2);
428 __ DropAndRet(1); 419 __ DropAndRet(1);
429 } 420 }
430 } 421 }
431 422
432
433 // static 423 // static
434 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 424 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
435 // ----------- S t a t e ------------- 425 // ----------- S t a t e -------------
436 // -- a0 : number of arguments 426 // -- a0 : number of arguments
437 // -- a1 : constructor function 427 // -- a1 : constructor function
438 // -- a3 : new target 428 // -- a3 : new target
439 // -- cp : context 429 // -- cp : context
440 // -- ra : return address 430 // -- ra : return address
441 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 431 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
442 // -- sp[argc * 4] : receiver 432 // -- sp[argc * 4] : receiver
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
537 527
538 // Restore target function and new target. 528 // Restore target function and new target.
539 __ Pop(a0, a1, a3); 529 __ Pop(a0, a1, a3);
540 __ SmiUntag(a0); 530 __ SmiUntag(a0);
541 } 531 }
542 532
543 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); 533 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
544 __ Jump(at); 534 __ Jump(at);
545 } 535 }
546 536
547
548 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 537 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
549 // Checking whether the queued function is ready for install is optional, 538 // Checking whether the queued function is ready for install is optional,
550 // since we come across interrupts and stack checks elsewhere. However, 539 // since we come across interrupts and stack checks elsewhere. However,
551 // not checking may delay installing ready functions, and always checking 540 // not checking may delay installing ready functions, and always checking
552 // would be quite expensive. A good compromise is to first check against 541 // would be quite expensive. A good compromise is to first check against
553 // stack limit as a cue for an interrupt signal. 542 // stack limit as a cue for an interrupt signal.
554 Label ok; 543 Label ok;
555 __ LoadRoot(t0, Heap::kStackLimitRootIndex); 544 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
556 __ Branch(&ok, hs, sp, Operand(t0)); 545 __ Branch(&ok, hs, sp, Operand(t0));
557 546
558 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); 547 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
559 548
560 __ bind(&ok); 549 __ bind(&ok);
561 GenerateTailCallToSharedCode(masm); 550 GenerateTailCallToSharedCode(masm);
562 } 551 }
563 552
564
565 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 553 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
566 bool is_api_function, 554 bool is_api_function,
567 bool create_implicit_receiver, 555 bool create_implicit_receiver,
568 bool check_derived_construct) { 556 bool check_derived_construct) {
569 // ----------- S t a t e ------------- 557 // ----------- S t a t e -------------
570 // -- a0 : number of arguments 558 // -- a0 : number of arguments
571 // -- a1 : constructor function 559 // -- a1 : constructor function
572 // -- a2 : allocation site or undefined 560 // -- a2 : allocation site or undefined
573 // -- a3 : new target 561 // -- a3 : new target
574 // -- cp : context 562 // -- cp : context
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
705 } 693 }
706 694
707 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1); 695 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
708 __ Addu(sp, sp, kPointerSize); 696 __ Addu(sp, sp, kPointerSize);
709 if (create_implicit_receiver) { 697 if (create_implicit_receiver) {
710 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); 698 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
711 } 699 }
712 __ Ret(); 700 __ Ret();
713 } 701 }
714 702
715
716 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 703 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
717 Generate_JSConstructStubHelper(masm, false, true, false); 704 Generate_JSConstructStubHelper(masm, false, true, false);
718 } 705 }
719 706
720
721 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 707 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
722 Generate_JSConstructStubHelper(masm, true, false, false); 708 Generate_JSConstructStubHelper(masm, true, false, false);
723 } 709 }
724 710
725
726 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 711 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
727 Generate_JSConstructStubHelper(masm, false, false, false); 712 Generate_JSConstructStubHelper(masm, false, false, false);
728 } 713 }
729 714
730
731 void Builtins::Generate_JSBuiltinsConstructStubForDerived( 715 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
732 MacroAssembler* masm) { 716 MacroAssembler* masm) {
733 Generate_JSConstructStubHelper(masm, false, false, true); 717 Generate_JSConstructStubHelper(masm, false, false, true);
734 } 718 }
735 719
736
737 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 720 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
738 FrameScope scope(masm, StackFrame::INTERNAL); 721 FrameScope scope(masm, StackFrame::INTERNAL);
739 __ Push(a1); 722 __ Push(a1);
740 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 723 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
741 } 724 }
742 725
743
744 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; 726 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
745 727
746
747 // Clobbers a2; preserves all other registers. 728 // Clobbers a2; preserves all other registers.
748 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, 729 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
749 IsTagged argc_is_tagged) { 730 IsTagged argc_is_tagged) {
750 // Check the stack for overflow. We are not trying to catch 731 // Check the stack for overflow. We are not trying to catch
751 // interruptions (e.g. debug break and preemption) here, so the "real stack 732 // interruptions (e.g. debug break and preemption) here, so the "real stack
752 // limit" is checked. 733 // limit" is checked.
753 Label okay; 734 Label okay;
754 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 735 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
755 // Make a2 the space we have left. The stack might already be overflowed 736 // Make a2 the space we have left. The stack might already be overflowed
756 // here which will cause a2 to become negative. 737 // here which will cause a2 to become negative.
757 __ Subu(a2, sp, a2); 738 __ Subu(a2, sp, a2);
758 // Check if the arguments will overflow the stack. 739 // Check if the arguments will overflow the stack.
759 if (argc_is_tagged == kArgcIsSmiTagged) { 740 if (argc_is_tagged == kArgcIsSmiTagged) {
760 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); 741 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
761 } else { 742 } else {
762 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); 743 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
763 __ sll(t3, argc, kPointerSizeLog2); 744 __ sll(t3, argc, kPointerSizeLog2);
764 } 745 }
765 // Signed comparison. 746 // Signed comparison.
766 __ Branch(&okay, gt, a2, Operand(t3)); 747 __ Branch(&okay, gt, a2, Operand(t3));
767 748
768 // Out of stack space. 749 // Out of stack space.
769 __ CallRuntime(Runtime::kThrowStackOverflow); 750 __ CallRuntime(Runtime::kThrowStackOverflow);
770 751
771 __ bind(&okay); 752 __ bind(&okay);
772 } 753 }
773 754
774
775 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 755 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
776 bool is_construct) { 756 bool is_construct) {
777 // Called from JSEntryStub::GenerateBody 757 // Called from JSEntryStub::GenerateBody
778 758
779 // ----------- S t a t e ------------- 759 // ----------- S t a t e -------------
780 // -- a0: new.target 760 // -- a0: new.target
781 // -- a1: function 761 // -- a1: function
782 // -- a2: receiver_pointer 762 // -- a2: receiver_pointer
783 // -- a3: argc 763 // -- a3: argc
784 // -- s0: argv 764 // -- s0: argv
(...skipping 19 matching lines...) Expand all
804 784
805 // Remember new.target. 785 // Remember new.target.
806 __ mov(t1, a0); 786 __ mov(t1, a0);
807 787
808 // Copy arguments to the stack in a loop. 788 // Copy arguments to the stack in a loop.
809 // a3: argc 789 // a3: argc
810 // s0: argv, i.e. points to first arg 790 // s0: argv, i.e. points to first arg
811 Label loop, entry; 791 Label loop, entry;
812 __ Lsa(t2, s0, a3, kPointerSizeLog2); 792 __ Lsa(t2, s0, a3, kPointerSizeLog2);
813 __ b(&entry); 793 __ b(&entry);
814 __ nop(); // Branch delay slot nop. 794 __ nop(); // Branch delay slot nop.
815 // t2 points past last arg. 795 // t2 points past last arg.
816 __ bind(&loop); 796 __ bind(&loop);
817 __ lw(t0, MemOperand(s0)); // Read next parameter. 797 __ lw(t0, MemOperand(s0)); // Read next parameter.
818 __ addiu(s0, s0, kPointerSize); 798 __ addiu(s0, s0, kPointerSize);
819 __ lw(t0, MemOperand(t0)); // Dereference handle. 799 __ lw(t0, MemOperand(t0)); // Dereference handle.
820 __ push(t0); // Push parameter. 800 __ push(t0); // Push parameter.
821 __ bind(&entry); 801 __ bind(&entry);
822 __ Branch(&loop, ne, s0, Operand(t2)); 802 __ Branch(&loop, ne, s0, Operand(t2));
823 803
824 // Setup new.target and argc. 804 // Setup new.target and argc.
825 __ mov(a0, a3); 805 __ mov(a0, a3);
826 __ mov(a3, t1); 806 __ mov(a3, t1);
827 807
828 // Initialize all JavaScript callee-saved registers, since they will be seen 808 // Initialize all JavaScript callee-saved registers, since they will be seen
829 // by the garbage collector as part of handlers. 809 // by the garbage collector as part of handlers.
830 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); 810 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
(...skipping 10 matching lines...) Expand all
841 ? masm->isolate()->builtins()->Construct() 821 ? masm->isolate()->builtins()->Construct()
842 : masm->isolate()->builtins()->Call(); 822 : masm->isolate()->builtins()->Call();
843 __ Call(builtin, RelocInfo::CODE_TARGET); 823 __ Call(builtin, RelocInfo::CODE_TARGET);
844 824
845 // Leave internal frame. 825 // Leave internal frame.
846 } 826 }
847 827
848 __ Jump(ra); 828 __ Jump(ra);
849 } 829 }
850 830
851
852 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 831 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
853 Generate_JSEntryTrampolineHelper(masm, false); 832 Generate_JSEntryTrampolineHelper(masm, false);
854 } 833 }
855 834
856
857 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 835 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
858 Generate_JSEntryTrampolineHelper(masm, true); 836 Generate_JSEntryTrampolineHelper(masm, true);
859 } 837 }
860 838
861 // static 839 // static
862 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { 840 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
863 // ----------- S t a t e ------------- 841 // ----------- S t a t e -------------
864 // -- v0 : the value to pass to the generator 842 // -- v0 : the value to pass to the generator
865 // -- a1 : the JSGeneratorObject to resume 843 // -- a1 : the JSGeneratorObject to resume
866 // -- a2 : the resume mode (tagged) 844 // -- a2 : the resume mode (tagged)
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
932 Label old_generator; 910 Label old_generator;
933 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); 911 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
934 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); 912 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
935 __ GetObjectType(a3, a3, a3); 913 __ GetObjectType(a3, a3, a3);
936 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE)); 914 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
937 915
938 // New-style (ignition/turbofan) generator object. 916 // New-style (ignition/turbofan) generator object.
939 { 917 {
940 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); 918 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
941 __ lw(a0, 919 __ lw(a0,
942 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); 920 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
943 __ SmiUntag(a0); 921 __ SmiUntag(a0);
944 // We abuse new.target both to indicate that this is a resume call and to 922 // We abuse new.target both to indicate that this is a resume call and to
945 // pass in the generator object. In ordinary calls, new.target is always 923 // pass in the generator object. In ordinary calls, new.target is always
946 // undefined because generator functions are non-constructable. 924 // undefined because generator functions are non-constructable.
947 __ Move(a3, a1); 925 __ Move(a3, a1);
948 __ Move(a1, t0); 926 __ Move(a1, t0);
949 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 927 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
950 __ Jump(a2); 928 __ Jump(a2);
951 } 929 }
952 930
(...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after
1461 1439
1462 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { 1440 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1463 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); 1441 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1464 } 1442 }
1465 1443
1466 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1444 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1467 GenerateTailCallToReturnedCode(masm, 1445 GenerateTailCallToReturnedCode(masm,
1468 Runtime::kCompileOptimized_NotConcurrent); 1446 Runtime::kCompileOptimized_NotConcurrent);
1469 } 1447 }
1470 1448
1471
1472 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 1449 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1473 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); 1450 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1474 } 1451 }
1475 1452
1476 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { 1453 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1477 // ----------- S t a t e ------------- 1454 // ----------- S t a t e -------------
1478 // -- a0 : argument count (preserved for callee) 1455 // -- a0 : argument count (preserved for callee)
1479 // -- a1 : new target (preserved for callee) 1456 // -- a1 : new target (preserved for callee)
1480 // -- a3 : target function (preserved for callee) 1457 // -- a3 : target function (preserved for callee)
1481 // ----------------------------------- 1458 // -----------------------------------
(...skipping 29 matching lines...) Expand all
1511 } 1488 }
1512 1489
1513 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { 1490 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1514 // For now, we are relying on the fact that make_code_young doesn't do any 1491 // For now, we are relying on the fact that make_code_young doesn't do any
1515 // garbage collection which allows us to save/restore the registers without 1492 // garbage collection which allows us to save/restore the registers without
1516 // worrying about which of them contain pointers. We also don't build an 1493 // worrying about which of them contain pointers. We also don't build an
1517 // internal frame to make the code faster, since we shouldn't have to do stack 1494 // internal frame to make the code faster, since we shouldn't have to do stack
1518 // crawls in MakeCodeYoung. This seems a bit fragile. 1495 // crawls in MakeCodeYoung. This seems a bit fragile.
1519 1496
1520 // Set a0 to point to the head of the PlatformCodeAge sequence. 1497 // Set a0 to point to the head of the PlatformCodeAge sequence.
1521 __ Subu(a0, a0, 1498 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1522 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1523 1499
1524 // The following registers must be saved and restored when calling through to 1500 // The following registers must be saved and restored when calling through to
1525 // the runtime: 1501 // the runtime:
1526 // a0 - contains return address (beginning of patch sequence) 1502 // a0 - contains return address (beginning of patch sequence)
1527 // a1 - isolate 1503 // a1 - isolate
1528 // a3 - new target 1504 // a3 - new target
1529 RegList saved_regs = 1505 RegList saved_regs =
1530 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); 1506 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1531 FrameScope scope(masm, StackFrame::MANUAL); 1507 FrameScope scope(masm, StackFrame::MANUAL);
1532 __ MultiPush(saved_regs); 1508 __ MultiPush(saved_regs);
1533 __ PrepareCallCFunction(2, 0, a2); 1509 __ PrepareCallCFunction(2, 0, a2);
1534 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); 1510 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1535 __ CallCFunction( 1511 __ CallCFunction(
1536 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 1512 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1537 __ MultiPop(saved_regs); 1513 __ MultiPop(saved_regs);
1538 __ Jump(a0); 1514 __ Jump(a0);
1539 } 1515 }
1540 1516
1541 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 1517 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1542 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 1518 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1543 MacroAssembler* masm) { \ 1519 MacroAssembler* masm) { \
1544 GenerateMakeCodeYoungAgainCommon(masm); \ 1520 GenerateMakeCodeYoungAgainCommon(masm); \
1545 } \ 1521 } \
1546 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 1522 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1547 MacroAssembler* masm) { \ 1523 MacroAssembler* masm) { \
1548 GenerateMakeCodeYoungAgainCommon(masm); \ 1524 GenerateMakeCodeYoungAgainCommon(masm); \
1549 } 1525 }
1550 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 1526 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1551 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 1527 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1552 1528
1553
1554 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 1529 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1555 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 1530 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1556 // that make_code_young doesn't do any garbage collection which allows us to 1531 // that make_code_young doesn't do any garbage collection which allows us to
1557 // save/restore the registers without worrying about which of them contain 1532 // save/restore the registers without worrying about which of them contain
1558 // pointers. 1533 // pointers.
1559 1534
1560 // Set a0 to point to the head of the PlatformCodeAge sequence. 1535 // Set a0 to point to the head of the PlatformCodeAge sequence.
1561 __ Subu(a0, a0, 1536 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1562 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1563 1537
1564 // The following registers must be saved and restored when calling through to 1538 // The following registers must be saved and restored when calling through to
1565 // the runtime: 1539 // the runtime:
1566 // a0 - contains return address (beginning of patch sequence) 1540 // a0 - contains return address (beginning of patch sequence)
1567 // a1 - isolate 1541 // a1 - isolate
1568 // a3 - new target 1542 // a3 - new target
1569 RegList saved_regs = 1543 RegList saved_regs =
1570 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); 1544 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1571 FrameScope scope(masm, StackFrame::MANUAL); 1545 FrameScope scope(masm, StackFrame::MANUAL);
1572 __ MultiPush(saved_regs); 1546 __ MultiPush(saved_regs);
1573 __ PrepareCallCFunction(2, 0, a2); 1547 __ PrepareCallCFunction(2, 0, a2);
1574 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); 1548 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1575 __ CallCFunction( 1549 __ CallCFunction(
1576 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 1550 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1577 2); 1551 2);
1578 __ MultiPop(saved_regs); 1552 __ MultiPop(saved_regs);
1579 1553
1580 // Perform prologue operations usually performed by the young code stub. 1554 // Perform prologue operations usually performed by the young code stub.
1581 __ PushStandardFrame(a1); 1555 __ PushStandardFrame(a1);
1582 1556
1583 // Jump to point after the code-age stub. 1557 // Jump to point after the code-age stub.
1584 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength)); 1558 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1585 __ Jump(a0); 1559 __ Jump(a0);
1586 } 1560 }
1587 1561
1588
1589 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 1562 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1590 GenerateMakeCodeYoungAgainCommon(masm); 1563 GenerateMakeCodeYoungAgainCommon(masm);
1591 } 1564 }
1592 1565
1593
1594 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { 1566 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1595 Generate_MarkCodeAsExecutedOnce(masm); 1567 Generate_MarkCodeAsExecutedOnce(masm);
1596 } 1568 }
1597 1569
1598
1599 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 1570 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1600 SaveFPRegsMode save_doubles) { 1571 SaveFPRegsMode save_doubles) {
1601 { 1572 {
1602 FrameScope scope(masm, StackFrame::INTERNAL); 1573 FrameScope scope(masm, StackFrame::INTERNAL);
1603 1574
1604 // Preserve registers across notification, this is important for compiled 1575 // Preserve registers across notification, this is important for compiled
1605 // stubs that tail call the runtime on deopts passing their parameters in 1576 // stubs that tail call the runtime on deopts passing their parameters in
1606 // registers. 1577 // registers.
1607 __ MultiPush(kJSCallerSaved | kCalleeSaved); 1578 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1608 // Pass the function and deoptimization type to the runtime system. 1579 // Pass the function and deoptimization type to the runtime system.
1609 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); 1580 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1610 __ MultiPop(kJSCallerSaved | kCalleeSaved); 1581 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1611 } 1582 }
1612 1583
1613 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state 1584 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1614 __ Jump(ra); // Jump to miss handler 1585 __ Jump(ra); // Jump to miss handler
1615 } 1586 }
1616 1587
1617
1618 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1588 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1619 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1589 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1620 } 1590 }
1621 1591
1622
1623 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1592 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1624 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1593 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1625 } 1594 }
1626 1595
1627
1628 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1596 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1629 Deoptimizer::BailoutType type) { 1597 Deoptimizer::BailoutType type) {
1630 { 1598 {
1631 FrameScope scope(masm, StackFrame::INTERNAL); 1599 FrameScope scope(masm, StackFrame::INTERNAL);
1632 // Pass the function and deoptimization type to the runtime system. 1600 // Pass the function and deoptimization type to the runtime system.
1633 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); 1601 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1634 __ push(a0); 1602 __ push(a0);
1635 __ CallRuntime(Runtime::kNotifyDeoptimized); 1603 __ CallRuntime(Runtime::kNotifyDeoptimized);
1636 } 1604 }
1637 1605
(...skipping 15 matching lines...) Expand all
1653 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); 1621 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1654 1622
1655 __ Ret(USE_DELAY_SLOT); 1623 __ Ret(USE_DELAY_SLOT);
1656 // Safe to fill delay slot Addu will emit one instruction. 1624 // Safe to fill delay slot Addu will emit one instruction.
1657 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state. 1625 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1658 1626
1659 __ bind(&unknown_state); 1627 __ bind(&unknown_state);
1660 __ stop("no cases left"); 1628 __ stop("no cases left");
1661 } 1629 }
1662 1630
1663
1664 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1631 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1665 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1632 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1666 } 1633 }
1667 1634
1668
1669 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1635 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1670 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1636 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1671 } 1637 }
1672 1638
1673
1674 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1639 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1675 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1640 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1676 } 1641 }
1677 1642
1678
1679 // Clobbers {t2, t3, t4, t5}. 1643 // Clobbers {t2, t3, t4, t5}.
1680 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 1644 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1681 Register function_template_info, 1645 Register function_template_info,
1682 Label* receiver_check_failed) { 1646 Label* receiver_check_failed) {
1683 Register signature = t2; 1647 Register signature = t2;
1684 Register map = t3; 1648 Register map = t3;
1685 Register constructor = t4; 1649 Register constructor = t4;
1686 Register scratch = t5; 1650 Register scratch = t5;
1687 1651
1688 // If there is no signature, return the holder. 1652 // If there is no signature, return the holder.
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1731 __ DecodeField<Map::HasHiddenPrototype>(scratch); 1695 __ DecodeField<Map::HasHiddenPrototype>(scratch);
1732 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); 1696 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1733 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); 1697 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1734 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1698 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1735 1699
1736 __ Branch(&prototype_loop_start); 1700 __ Branch(&prototype_loop_start);
1737 1701
1738 __ bind(&receiver_check_passed); 1702 __ bind(&receiver_check_passed);
1739 } 1703 }
1740 1704
1741
1742 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { 1705 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1743 // ----------- S t a t e ------------- 1706 // ----------- S t a t e -------------
1744 // -- a0 : number of arguments excluding receiver 1707 // -- a0 : number of arguments excluding receiver
1745 // -- a1 : callee 1708 // -- a1 : callee
1746 // -- ra : return address 1709 // -- ra : return address
1747 // -- sp[0] : last argument 1710 // -- sp[0] : last argument
1748 // -- ... 1711 // -- ...
1749 // -- sp[4 * (argc - 1)] : first argument 1712 // -- sp[4 * (argc - 1)] : first argument
1750 // -- sp[4 * argc] : receiver 1713 // -- sp[4 * argc] : receiver
1751 // ----------------------------------- 1714 // -----------------------------------
(...skipping 16 matching lines...) Expand all
1768 __ Jump(t2); 1731 __ Jump(t2);
1769 1732
1770 // Compatible receiver check failed: throw an Illegal Invocation exception. 1733 // Compatible receiver check failed: throw an Illegal Invocation exception.
1771 __ bind(&receiver_check_failed); 1734 __ bind(&receiver_check_failed);
1772 // Drop the arguments (including the receiver); 1735 // Drop the arguments (including the receiver);
1773 __ Addu(t8, t8, Operand(kPointerSize)); 1736 __ Addu(t8, t8, Operand(kPointerSize));
1774 __ addu(sp, t8, zero_reg); 1737 __ addu(sp, t8, zero_reg);
1775 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); 1738 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1776 } 1739 }
1777 1740
1778
1779 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1741 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1780 // Lookup the function in the JavaScript frame. 1742 // Lookup the function in the JavaScript frame.
1781 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1743 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1782 { 1744 {
1783 FrameScope scope(masm, StackFrame::INTERNAL); 1745 FrameScope scope(masm, StackFrame::INTERNAL);
1784 // Pass function as argument. 1746 // Pass function as argument.
1785 __ push(a0); 1747 __ push(a0);
1786 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 1748 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1787 } 1749 }
1788 1750
1789 // If the code object is null, just return to the unoptimized code. 1751 // If the code object is null, just return to the unoptimized code.
1790 __ Ret(eq, v0, Operand(Smi::FromInt(0))); 1752 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1791 1753
1792 // Load deoptimization data from the code object. 1754 // Load deoptimization data from the code object.
1793 // <deopt_data> = <code>[#deoptimization_data_offset] 1755 // <deopt_data> = <code>[#deoptimization_data_offset]
1794 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1756 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1795 1757
1796 // Load the OSR entrypoint offset from the deoptimization data. 1758 // Load the OSR entrypoint offset from the deoptimization data.
1797 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1759 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1798 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( 1760 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1799 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); 1761 DeoptimizationInputData::kOsrPcOffsetIndex) -
1762 kHeapObjectTag));
1800 __ SmiUntag(a1); 1763 __ SmiUntag(a1);
1801 1764
1802 // Compute the target address = code_obj + header_size + osr_offset 1765 // Compute the target address = code_obj + header_size + osr_offset
1803 // <entry_addr> = <code_obj> + #header_size + <osr_offset> 1766 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1804 __ addu(v0, v0, a1); 1767 __ addu(v0, v0, a1);
1805 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); 1768 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1806 1769
1807 // And "return" to the OSR entry point of the function. 1770 // And "return" to the OSR entry point of the function.
1808 __ Ret(); 1771 __ Ret();
1809 } 1772 }
1810 1773
1811
1812 // static 1774 // static
1813 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, 1775 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1814 int field_index) { 1776 int field_index) {
1815 // ----------- S t a t e ------------- 1777 // ----------- S t a t e -------------
1816 // -- a0 : number of arguments 1778 // -- a0 : number of arguments
1817 // -- a1 : function 1779 // -- a1 : function
1818 // -- cp : context 1780 // -- cp : context
1819 // -- sp[0] : receiver 1781 // -- sp[0] : receiver
1820 // ----------------------------------- 1782 // -----------------------------------
1821 1783
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1930 } 1892 }
1931 1893
1932 // 4c. The receiver is not callable, throw an appropriate TypeError. 1894 // 4c. The receiver is not callable, throw an appropriate TypeError.
1933 __ bind(&receiver_not_callable); 1895 __ bind(&receiver_not_callable);
1934 { 1896 {
1935 __ sw(a1, MemOperand(sp)); 1897 __ sw(a1, MemOperand(sp));
1936 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1898 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1937 } 1899 }
1938 } 1900 }
1939 1901
1940
1941 // static 1902 // static
1942 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1903 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1943 // 1. Make sure we have at least one argument. 1904 // 1. Make sure we have at least one argument.
1944 // a0: actual number of arguments 1905 // a0: actual number of arguments
1945 { 1906 {
1946 Label done; 1907 Label done;
1947 __ Branch(&done, ne, a0, Operand(zero_reg)); 1908 __ Branch(&done, ne, a0, Operand(zero_reg));
1948 __ PushRoot(Heap::kUndefinedValueRootIndex); 1909 __ PushRoot(Heap::kUndefinedValueRootIndex);
1949 __ Addu(a0, a0, Operand(1)); 1910 __ Addu(a0, a0, Operand(1));
1950 __ bind(&done); 1911 __ bind(&done);
(...skipping 22 matching lines...) Expand all
1973 // Adjust the actual number of arguments and remove the top element 1934 // Adjust the actual number of arguments and remove the top element
1974 // (which is a copy of the last argument). 1935 // (which is a copy of the last argument).
1975 __ Subu(a0, a0, Operand(1)); 1936 __ Subu(a0, a0, Operand(1));
1976 __ Pop(); 1937 __ Pop();
1977 } 1938 }
1978 1939
1979 // 4. Call the callable. 1940 // 4. Call the callable.
1980 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1941 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1981 } 1942 }
1982 1943
1983
1984 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1944 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1985 // ----------- S t a t e ------------- 1945 // ----------- S t a t e -------------
1986 // -- a0 : argc 1946 // -- a0 : argc
1987 // -- sp[0] : argumentsList 1947 // -- sp[0] : argumentsList
1988 // -- sp[4] : thisArgument 1948 // -- sp[4] : thisArgument
1989 // -- sp[8] : target 1949 // -- sp[8] : target
1990 // -- sp[12] : receiver 1950 // -- sp[12] : receiver
1991 // ----------------------------------- 1951 // -----------------------------------
1992 1952
1993 // 1. Load target into a1 (if present), argumentsList into a0 (if present), 1953 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2037 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1997 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2038 1998
2039 // 3b. The target is not callable, throw an appropriate TypeError. 1999 // 3b. The target is not callable, throw an appropriate TypeError.
2040 __ bind(&target_not_callable); 2000 __ bind(&target_not_callable);
2041 { 2001 {
2042 __ sw(a1, MemOperand(sp)); 2002 __ sw(a1, MemOperand(sp));
2043 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 2003 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2044 } 2004 }
2045 } 2005 }
2046 2006
2047
2048 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 2007 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2049 // ----------- S t a t e ------------- 2008 // ----------- S t a t e -------------
2050 // -- a0 : argc 2009 // -- a0 : argc
2051 // -- sp[0] : new.target (optional) 2010 // -- sp[0] : new.target (optional)
2052 // -- sp[4] : argumentsList 2011 // -- sp[4] : argumentsList
2053 // -- sp[8] : target 2012 // -- sp[8] : target
2054 // -- sp[12] : receiver 2013 // -- sp[12] : receiver
2055 // ----------------------------------- 2014 // -----------------------------------
2056 2015
2057 // 1. Load target into a1 (if present), argumentsList into a0 (if present), 2016 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2116 } 2075 }
2117 2076
2118 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 2077 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2119 __ bind(&new_target_not_constructor); 2078 __ bind(&new_target_not_constructor);
2120 { 2079 {
2121 __ sw(a3, MemOperand(sp)); 2080 __ sw(a3, MemOperand(sp));
2122 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 2081 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2123 } 2082 }
2124 } 2083 }
2125 2084
2126
2127 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 2085 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2128 Label* stack_overflow) { 2086 Label* stack_overflow) {
2129 // ----------- S t a t e ------------- 2087 // ----------- S t a t e -------------
2130 // -- a0 : actual number of arguments 2088 // -- a0 : actual number of arguments
2131 // -- a1 : function (passed through to callee) 2089 // -- a1 : function (passed through to callee)
2132 // -- a2 : expected number of arguments 2090 // -- a2 : expected number of arguments
2133 // -- a3 : new target (passed through to callee) 2091 // -- a3 : new target (passed through to callee)
2134 // ----------------------------------- 2092 // -----------------------------------
2135 // Check the stack for overflow. We are not trying to catch 2093 // Check the stack for overflow. We are not trying to catch
2136 // interruptions (e.g. debug break and preemption) here, so the "real stack 2094 // interruptions (e.g. debug break and preemption) here, so the "real stack
2137 // limit" is checked. 2095 // limit" is checked.
2138 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex); 2096 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
2139 // Make t1 the space we have left. The stack might already be overflowed 2097 // Make t1 the space we have left. The stack might already be overflowed
2140 // here which will cause t1 to become negative. 2098 // here which will cause t1 to become negative.
2141 __ subu(t1, sp, t1); 2099 __ subu(t1, sp, t1);
2142 // Check if the arguments will overflow the stack. 2100 // Check if the arguments will overflow the stack.
2143 __ sll(at, a2, kPointerSizeLog2); 2101 __ sll(at, a2, kPointerSizeLog2);
2144 // Signed comparison. 2102 // Signed comparison.
2145 __ Branch(stack_overflow, le, t1, Operand(at)); 2103 __ Branch(stack_overflow, le, t1, Operand(at));
2146 } 2104 }
2147 2105
2148
2149 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 2106 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2150 __ sll(a0, a0, kSmiTagSize); 2107 __ sll(a0, a0, kSmiTagSize);
2151 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2108 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2152 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); 2109 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
2153 __ Addu(fp, sp, 2110 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2154 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); 2111 kPointerSize));
2155 } 2112 }
2156 2113
2157
2158 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 2114 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2159 // ----------- S t a t e ------------- 2115 // ----------- S t a t e -------------
2160 // -- v0 : result being passed through 2116 // -- v0 : result being passed through
2161 // ----------------------------------- 2117 // -----------------------------------
2162 // Get the number of arguments passed (as a smi), tear down the frame and 2118 // Get the number of arguments passed (as a smi), tear down the frame and
2163 // then tear down the parameters. 2119 // then tear down the parameters.
2164 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + 2120 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2165 kPointerSize))); 2121 kPointerSize)));
2166 __ mov(sp, fp); 2122 __ mov(sp, fp);
2167 __ MultiPop(fp.bit() | ra.bit()); 2123 __ MultiPop(fp.bit() | ra.bit());
2168 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize); 2124 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
2169 // Adjust for the receiver. 2125 // Adjust for the receiver.
2170 __ Addu(sp, sp, Operand(kPointerSize)); 2126 __ Addu(sp, sp, Operand(kPointerSize));
2171 } 2127 }
2172 2128
2173
2174 // static 2129 // static
2175 void Builtins::Generate_Apply(MacroAssembler* masm) { 2130 void Builtins::Generate_Apply(MacroAssembler* masm) {
2176 // ----------- S t a t e ------------- 2131 // ----------- S t a t e -------------
2177 // -- a0 : argumentsList 2132 // -- a0 : argumentsList
2178 // -- a1 : target 2133 // -- a1 : target
2179 // -- a3 : new.target (checked to be constructor or undefined) 2134 // -- a3 : new.target (checked to be constructor or undefined)
2180 // -- sp[0] : thisArgument 2135 // -- sp[0] : thisArgument
2181 // ----------------------------------- 2136 // -----------------------------------
2182 2137
2183 // Create the list of arguments from the array-like argumentsList. 2138 // Create the list of arguments from the array-like argumentsList.
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
2487 2442
2488 // The function is a "classConstructor", need to raise an exception. 2443 // The function is a "classConstructor", need to raise an exception.
2489 __ bind(&class_constructor); 2444 __ bind(&class_constructor);
2490 { 2445 {
2491 FrameScope frame(masm, StackFrame::INTERNAL); 2446 FrameScope frame(masm, StackFrame::INTERNAL);
2492 __ Push(a1); 2447 __ Push(a1);
2493 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); 2448 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2494 } 2449 }
2495 } 2450 }
2496 2451
2497
2498 // static 2452 // static
2499 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, 2453 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2500 TailCallMode tail_call_mode) { 2454 TailCallMode tail_call_mode) {
2501 // ----------- S t a t e ------------- 2455 // ----------- S t a t e -------------
2502 // -- a0 : the number of arguments (not including the receiver) 2456 // -- a0 : the number of arguments (not including the receiver)
2503 // -- a1 : the function to call (checked to be a JSBoundFunction) 2457 // -- a1 : the function to call (checked to be a JSBoundFunction)
2504 // ----------------------------------- 2458 // -----------------------------------
2505 __ AssertBoundFunction(a1); 2459 __ AssertBoundFunction(a1);
2506 2460
2507 if (tail_call_mode == TailCallMode::kAllow) { 2461 if (tail_call_mode == TailCallMode::kAllow) {
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
2582 2536
2583 // Call the [[BoundTargetFunction]] via the Call builtin. 2537 // Call the [[BoundTargetFunction]] via the Call builtin.
2584 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); 2538 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2585 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, 2539 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2586 masm->isolate()))); 2540 masm->isolate())));
2587 __ lw(at, MemOperand(at)); 2541 __ lw(at, MemOperand(at));
2588 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); 2542 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2589 __ Jump(at); 2543 __ Jump(at);
2590 } 2544 }
2591 2545
2592
2593 // static 2546 // static
2594 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2547 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2595 TailCallMode tail_call_mode) { 2548 TailCallMode tail_call_mode) {
2596 // ----------- S t a t e ------------- 2549 // ----------- S t a t e -------------
2597 // -- a0 : the number of arguments (not including the receiver) 2550 // -- a0 : the number of arguments (not including the receiver)
2598 // -- a1 : the target to call (can be any Object). 2551 // -- a1 : the target to call (can be any Object).
2599 // ----------------------------------- 2552 // -----------------------------------
2600 2553
2601 Label non_callable, non_function, non_smi; 2554 Label non_callable, non_function, non_smi;
2602 __ JumpIfSmi(a1, &non_callable); 2555 __ JumpIfSmi(a1, &non_callable);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2642 2595
2643 // 3. Call to something that is not callable. 2596 // 3. Call to something that is not callable.
2644 __ bind(&non_callable); 2597 __ bind(&non_callable);
2645 { 2598 {
2646 FrameScope scope(masm, StackFrame::INTERNAL); 2599 FrameScope scope(masm, StackFrame::INTERNAL);
2647 __ Push(a1); 2600 __ Push(a1);
2648 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2601 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2649 } 2602 }
2650 } 2603 }
2651 2604
2652
2653 // static 2605 // static
2654 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2606 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2655 // ----------- S t a t e ------------- 2607 // ----------- S t a t e -------------
2656 // -- a0 : the number of arguments (not including the receiver) 2608 // -- a0 : the number of arguments (not including the receiver)
2657 // -- a1 : the constructor to call (checked to be a JSFunction) 2609 // -- a1 : the constructor to call (checked to be a JSFunction)
2658 // -- a3 : the new target (checked to be a constructor) 2610 // -- a3 : the new target (checked to be a constructor)
2659 // ----------------------------------- 2611 // -----------------------------------
2660 __ AssertFunction(a1); 2612 __ AssertFunction(a1);
2661 2613
2662 // Calling convention for function specific ConstructStubs require 2614 // Calling convention for function specific ConstructStubs require
2663 // a2 to contain either an AllocationSite or undefined. 2615 // a2 to contain either an AllocationSite or undefined.
2664 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2616 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2665 2617
2666 // Tail call to the function-specific construct stub (still in the caller 2618 // Tail call to the function-specific construct stub (still in the caller
2667 // context at this point). 2619 // context at this point).
2668 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 2620 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2669 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); 2621 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2670 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); 2622 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2671 __ Jump(at); 2623 __ Jump(at);
2672 } 2624 }
2673 2625
2674
2675 // static 2626 // static
2676 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2627 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2677 // ----------- S t a t e ------------- 2628 // ----------- S t a t e -------------
2678 // -- a0 : the number of arguments (not including the receiver) 2629 // -- a0 : the number of arguments (not including the receiver)
2679 // -- a1 : the function to call (checked to be a JSBoundFunction) 2630 // -- a1 : the function to call (checked to be a JSBoundFunction)
2680 // -- a3 : the new target (checked to be a constructor) 2631 // -- a3 : the new target (checked to be a constructor)
2681 // ----------------------------------- 2632 // -----------------------------------
2682 __ AssertBoundFunction(a1); 2633 __ AssertBoundFunction(a1);
2683 2634
2684 // Load [[BoundArguments]] into a2 and length of that into t0. 2635 // Load [[BoundArguments]] into a2 and length of that into t0.
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
2756 } 2707 }
2757 2708
2758 // Construct the [[BoundTargetFunction]] via the Construct builtin. 2709 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2759 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); 2710 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2760 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); 2711 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2761 __ lw(at, MemOperand(at)); 2712 __ lw(at, MemOperand(at));
2762 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); 2713 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2763 __ Jump(at); 2714 __ Jump(at);
2764 } 2715 }
2765 2716
2766
2767 // static 2717 // static
2768 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 2718 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2769 // ----------- S t a t e ------------- 2719 // ----------- S t a t e -------------
2770 // -- a0 : the number of arguments (not including the receiver) 2720 // -- a0 : the number of arguments (not including the receiver)
2771 // -- a1 : the constructor to call (checked to be a JSProxy) 2721 // -- a1 : the constructor to call (checked to be a JSProxy)
2772 // -- a3 : the new target (either the same as the constructor or 2722 // -- a3 : the new target (either the same as the constructor or
2773 // the JSFunction on which new was invoked initially) 2723 // the JSFunction on which new was invoked initially)
2774 // ----------------------------------- 2724 // -----------------------------------
2775 2725
2776 // Call into the Runtime for Proxy [[Construct]]. 2726 // Call into the Runtime for Proxy [[Construct]].
2777 __ Push(a1, a3); 2727 __ Push(a1, a3);
2778 // Include the pushed new_target, constructor and the receiver. 2728 // Include the pushed new_target, constructor and the receiver.
2779 __ Addu(a0, a0, Operand(3)); 2729 __ Addu(a0, a0, Operand(3));
2780 // Tail-call to the runtime. 2730 // Tail-call to the runtime.
2781 __ JumpToExternalReference( 2731 __ JumpToExternalReference(
2782 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); 2732 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2783 } 2733 }
2784 2734
2785
2786 // static 2735 // static
2787 void Builtins::Generate_Construct(MacroAssembler* masm) { 2736 void Builtins::Generate_Construct(MacroAssembler* masm) {
2788 // ----------- S t a t e ------------- 2737 // ----------- S t a t e -------------
2789 // -- a0 : the number of arguments (not including the receiver) 2738 // -- a0 : the number of arguments (not including the receiver)
2790 // -- a1 : the constructor to call (can be any Object) 2739 // -- a1 : the constructor to call (can be any Object)
2791 // -- a3 : the new target (either the same as the constructor or 2740 // -- a3 : the new target (either the same as the constructor or
2792 // the JSFunction on which new was invoked initially) 2741 // the JSFunction on which new was invoked initially)
2793 // ----------------------------------- 2742 // -----------------------------------
2794 2743
2795 // Check if target is a Smi. 2744 // Check if target is a Smi.
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
2941 // ----------- S t a t e ------------- 2890 // ----------- S t a t e -------------
2942 // -- a0: actual arguments count 2891 // -- a0: actual arguments count
2943 // -- a1: function (passed through to callee) 2892 // -- a1: function (passed through to callee)
2944 // -- a2: expected arguments count 2893 // -- a2: expected arguments count
2945 // -- a3: new target (passed through to callee) 2894 // -- a3: new target (passed through to callee)
2946 // ----------------------------------- 2895 // -----------------------------------
2947 2896
2948 Label invoke, dont_adapt_arguments, stack_overflow; 2897 Label invoke, dont_adapt_arguments, stack_overflow;
2949 2898
2950 Label enough, too_few; 2899 Label enough, too_few;
2951 __ Branch(&dont_adapt_arguments, eq, 2900 __ Branch(&dont_adapt_arguments, eq, a2,
2952 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 2901 Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2953 // We use Uless as the number of argument should always be greater than 0. 2902 // We use Uless as the number of argument should always be greater than 0.
2954 __ Branch(&too_few, Uless, a0, Operand(a2)); 2903 __ Branch(&too_few, Uless, a0, Operand(a2));
2955 2904
2956 { // Enough parameters: actual >= expected. 2905 { // Enough parameters: actual >= expected.
2957 // a0: actual number of arguments as a smi 2906 // a0: actual number of arguments as a smi
2958 // a1: function 2907 // a1: function
2959 // a2: expected number of arguments 2908 // a2: expected number of arguments
2960 // a3: new target (passed through to callee) 2909 // a3: new target (passed through to callee)
2961 __ bind(&enough); 2910 __ bind(&enough);
2962 EnterArgumentsAdaptorFrame(masm); 2911 EnterArgumentsAdaptorFrame(masm);
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
3044 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 2993 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3045 __ Call(t0); 2994 __ Call(t0);
3046 2995
3047 // Store offset of return address for deoptimizer. 2996 // Store offset of return address for deoptimizer.
3048 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 2997 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3049 2998
3050 // Exit frame and return. 2999 // Exit frame and return.
3051 LeaveArgumentsAdaptorFrame(masm); 3000 LeaveArgumentsAdaptorFrame(masm);
3052 __ Ret(); 3001 __ Ret();
3053 3002
3054
3055 // ------------------------------------------- 3003 // -------------------------------------------
3056 // Don't adapt arguments. 3004 // Don't adapt arguments.
3057 // ------------------------------------------- 3005 // -------------------------------------------
3058 __ bind(&dont_adapt_arguments); 3006 __ bind(&dont_adapt_arguments);
3059 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 3007 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3060 __ Jump(t0); 3008 __ Jump(t0);
3061 3009
3062 __ bind(&stack_overflow); 3010 __ bind(&stack_overflow);
3063 { 3011 {
3064 FrameScope frame(masm, StackFrame::MANUAL); 3012 FrameScope frame(masm, StackFrame::MANUAL);
3065 __ CallRuntime(Runtime::kThrowStackOverflow); 3013 __ CallRuntime(Runtime::kThrowStackOverflow);
3066 __ break_(0xCC); 3014 __ break_(0xCC);
3067 } 3015 }
3068 } 3016 }
3069 3017
3070
3071 #undef __ 3018 #undef __
3072 3019
3073 } // namespace internal 3020 } // namespace internal
3074 } // namespace v8 3021 } // namespace v8
3075 3022
3076 #endif // V8_TARGET_ARCH_MIPS 3023 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/builtins/mips/OWNERS ('k') | src/builtins/mips64/OWNERS » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698