OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
187 __ bind(&to_string); | 187 __ bind(&to_string); |
188 { | 188 { |
189 ToStringStub stub(masm->isolate()); | 189 ToStringStub stub(masm->isolate()); |
190 __ TailCallStub(&stub); | 190 __ TailCallStub(&stub); |
191 } | 191 } |
192 | 192 |
193 // 3b. Convert symbol in a0 to a string. | 193 // 3b. Convert symbol in a0 to a string. |
194 __ bind(&symbol_descriptive_string); | 194 __ bind(&symbol_descriptive_string); |
195 { | 195 { |
196 __ Push(a0); | 196 __ Push(a0); |
197 __ TailCallRuntime(Runtime::kSymbolDescriptiveString, 1); | 197 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); |
198 } | 198 } |
199 } | 199 } |
200 | 200 |
201 | 201 |
202 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | 202 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
203 // ----------- S t a t e ------------- | 203 // ----------- S t a t e ------------- |
204 // -- a0 : number of arguments | 204 // -- a0 : number of arguments |
205 // -- a1 : constructor function | 205 // -- a1 : constructor function |
206 // -- a3 : new target | 206 // -- a3 : new target |
207 // -- ra : return address | 207 // -- ra : return address |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
268 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | 268 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); |
269 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); | 269 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); |
270 __ Ret(); | 270 __ Ret(); |
271 } | 271 } |
272 | 272 |
273 // 5. Fallback to the runtime to create new object. | 273 // 5. Fallback to the runtime to create new object. |
274 __ bind(&new_object); | 274 __ bind(&new_object); |
275 { | 275 { |
276 FrameScope scope(masm, StackFrame::INTERNAL); | 276 FrameScope scope(masm, StackFrame::INTERNAL); |
277 __ Push(a0, a1, a3); // first argument, constructor, new target | 277 __ Push(a0, a1, a3); // first argument, constructor, new target |
278 __ CallRuntime(Runtime::kNewObject, 2); | 278 __ CallRuntime(Runtime::kNewObject); |
279 __ Pop(a0); | 279 __ Pop(a0); |
280 } | 280 } |
281 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | 281 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); |
282 __ Ret(); | 282 __ Ret(); |
283 } | 283 } |
284 | 284 |
285 | 285 |
286 static void CallRuntimePassFunction( | 286 static void CallRuntimePassFunction( |
287 MacroAssembler* masm, Runtime::FunctionId function_id) { | 287 MacroAssembler* masm, Runtime::FunctionId function_id) { |
288 // ----------- S t a t e ------------- | 288 // ----------- S t a t e ------------- |
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
448 // filler map. | 448 // filler map. |
449 __ LoadRoot(t3, Heap::kOnePointerFillerMapRootIndex); | 449 __ LoadRoot(t3, Heap::kOnePointerFillerMapRootIndex); |
450 __ InitializeFieldsWithFiller(t1, a4, t3); | 450 __ InitializeFieldsWithFiller(t1, a4, t3); |
451 | 451 |
452 // a6: slack tracking counter value before decreasing. | 452 // a6: slack tracking counter value before decreasing. |
453 __ Branch(&allocated, ne, a6, Operand(Map::kSlackTrackingCounterEnd)); | 453 __ Branch(&allocated, ne, a6, Operand(Map::kSlackTrackingCounterEnd)); |
454 | 454 |
455 // Push the constructor, new_target and the object to the stack, | 455 // Push the constructor, new_target and the object to the stack, |
456 // and then the initial map as an argument to the runtime call. | 456 // and then the initial map as an argument to the runtime call. |
457 __ Push(a1, a3, t0, a2); | 457 __ Push(a1, a3, t0, a2); |
458 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 458 __ CallRuntime(Runtime::kFinalizeInstanceSize); |
459 __ Pop(a1, a3, t0); | 459 __ Pop(a1, a3, t0); |
460 | 460 |
461 // Continue with JSObject being successfully allocated. | 461 // Continue with JSObject being successfully allocated. |
462 // a1: constructor function | 462 // a1: constructor function |
463 // a3: new target | 463 // a3: new target |
464 // t0: JSObject | 464 // t0: JSObject |
465 __ jmp(&allocated); | 465 __ jmp(&allocated); |
466 | 466 |
467 __ bind(&no_inobject_slack_tracking); | 467 __ bind(&no_inobject_slack_tracking); |
468 } | 468 } |
469 | 469 |
470 __ InitializeFieldsWithFiller(t1, a4, t3); | 470 __ InitializeFieldsWithFiller(t1, a4, t3); |
471 | 471 |
472 // Continue with JSObject being successfully allocated. | 472 // Continue with JSObject being successfully allocated. |
473 // a1: constructor function | 473 // a1: constructor function |
474 // a3: new target | 474 // a3: new target |
475 // t0: JSObject | 475 // t0: JSObject |
476 __ jmp(&allocated); | 476 __ jmp(&allocated); |
477 } | 477 } |
478 | 478 |
479 // Allocate the new receiver object using the runtime call. | 479 // Allocate the new receiver object using the runtime call. |
480 // a1: constructor function | 480 // a1: constructor function |
481 // a3: new target | 481 // a3: new target |
482 __ bind(&rt_call); | 482 __ bind(&rt_call); |
483 | 483 |
484 // Push the constructor and new_target twice, second pair as arguments | 484 // Push the constructor and new_target twice, second pair as arguments |
485 // to the runtime call. | 485 // to the runtime call. |
486 __ Push(a1, a3, a1, a3); // constructor function, new target | 486 __ Push(a1, a3, a1, a3); // constructor function, new target |
487 __ CallRuntime(Runtime::kNewObject, 2); | 487 __ CallRuntime(Runtime::kNewObject); |
488 __ mov(t0, v0); | 488 __ mov(t0, v0); |
489 __ Pop(a1, a3); | 489 __ Pop(a1, a3); |
490 | 490 |
491 // Receiver for constructor call allocated. | 491 // Receiver for constructor call allocated. |
492 // a1: constructor function | 492 // a1: constructor function |
493 // a3: new target | 493 // a3: new target |
494 // t0: JSObject | 494 // t0: JSObject |
495 __ bind(&allocated); | 495 __ bind(&allocated); |
496 | 496 |
497 __ ld(a0, MemOperand(sp)); | 497 __ ld(a0, MemOperand(sp)); |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
611 | 611 |
612 | 612 |
613 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | 613 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
614 Generate_JSConstructStubHelper(masm, false, false); | 614 Generate_JSConstructStubHelper(masm, false, false); |
615 } | 615 } |
616 | 616 |
617 | 617 |
618 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | 618 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
619 FrameScope scope(masm, StackFrame::INTERNAL); | 619 FrameScope scope(masm, StackFrame::INTERNAL); |
620 __ Push(a1); | 620 __ Push(a1); |
621 __ CallRuntime(Runtime::kThrowConstructedNonConstructable, 1); | 621 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); |
622 } | 622 } |
623 | 623 |
624 | 624 |
625 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 625 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
626 | 626 |
627 | 627 |
628 // Clobbers a2; preserves all other registers. | 628 // Clobbers a2; preserves all other registers. |
629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | 629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
630 IsTagged argc_is_tagged) { | 630 IsTagged argc_is_tagged) { |
631 // Check the stack for overflow. We are not trying to catch | 631 // Check the stack for overflow. We are not trying to catch |
632 // interruptions (e.g. debug break and preemption) here, so the "real stack | 632 // interruptions (e.g. debug break and preemption) here, so the "real stack |
633 // limit" is checked. | 633 // limit" is checked. |
634 Label okay; | 634 Label okay; |
635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
636 // Make a2 the space we have left. The stack might already be overflowed | 636 // Make a2 the space we have left. The stack might already be overflowed |
637 // here which will cause r2 to become negative. | 637 // here which will cause r2 to become negative. |
638 __ dsubu(a2, sp, a2); | 638 __ dsubu(a2, sp, a2); |
639 // Check if the arguments will overflow the stack. | 639 // Check if the arguments will overflow the stack. |
640 if (argc_is_tagged == kArgcIsSmiTagged) { | 640 if (argc_is_tagged == kArgcIsSmiTagged) { |
641 __ SmiScale(a7, v0, kPointerSizeLog2); | 641 __ SmiScale(a7, v0, kPointerSizeLog2); |
642 } else { | 642 } else { |
643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
644 __ dsll(a7, argc, kPointerSizeLog2); | 644 __ dsll(a7, argc, kPointerSizeLog2); |
645 } | 645 } |
646 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. | 646 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. |
647 | 647 |
648 // Out of stack space. | 648 // Out of stack space. |
649 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 649 __ CallRuntime(Runtime::kThrowStackOverflow); |
650 | 650 |
651 __ bind(&okay); | 651 __ bind(&okay); |
652 } | 652 } |
653 | 653 |
654 | 654 |
655 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 655 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
656 bool is_construct) { | 656 bool is_construct) { |
657 // Called from JSEntryStub::GenerateBody | 657 // Called from JSEntryStub::GenerateBody |
658 | 658 |
659 // ----------- S t a t e ------------- | 659 // ----------- S t a t e ------------- |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
791 { | 791 { |
792 // Load frame size (word) from the BytecodeArray object. | 792 // Load frame size (word) from the BytecodeArray object. |
793 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, | 793 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, |
794 BytecodeArray::kFrameSizeOffset)); | 794 BytecodeArray::kFrameSizeOffset)); |
795 | 795 |
796 // Do a stack check to ensure we don't go over the limit. | 796 // Do a stack check to ensure we don't go over the limit. |
797 Label ok; | 797 Label ok; |
798 __ Dsubu(a5, sp, Operand(a4)); | 798 __ Dsubu(a5, sp, Operand(a4)); |
799 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 799 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
800 __ Branch(&ok, hs, a5, Operand(a2)); | 800 __ Branch(&ok, hs, a5, Operand(a2)); |
801 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 801 __ CallRuntime(Runtime::kThrowStackOverflow); |
802 __ bind(&ok); | 802 __ bind(&ok); |
803 | 803 |
804 // If ok, push undefined as the initial value for all register file entries. | 804 // If ok, push undefined as the initial value for all register file entries. |
805 Label loop_header; | 805 Label loop_header; |
806 Label loop_check; | 806 Label loop_check; |
807 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); | 807 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); |
808 __ Branch(&loop_check); | 808 __ Branch(&loop_check); |
809 __ bind(&loop_header); | 809 __ bind(&loop_header); |
810 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | 810 // TODO(rmcilroy): Consider doing more than one push per loop iteration. |
811 __ push(a5); | 811 __ push(a5); |
812 // Continue loop if not done. | 812 // Continue loop if not done. |
813 __ bind(&loop_check); | 813 __ bind(&loop_check); |
814 __ Dsubu(a4, a4, Operand(kPointerSize)); | 814 __ Dsubu(a4, a4, Operand(kPointerSize)); |
815 __ Branch(&loop_header, ge, a4, Operand(zero_reg)); | 815 __ Branch(&loop_header, ge, a4, Operand(zero_reg)); |
816 } | 816 } |
817 | 817 |
818 // TODO(rmcilroy): List of things not currently dealt with here but done in | 818 // TODO(rmcilroy): List of things not currently dealt with here but done in |
819 // fullcodegen's prologue: | 819 // fullcodegen's prologue: |
820 // - Support profiler (specifically profiling_counter). | 820 // - Support profiler (specifically profiling_counter). |
821 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. | 821 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. |
822 // - Allow simulator stop operations if FLAG_stop_at is set. | 822 // - Allow simulator stop operations if FLAG_stop_at is set. |
823 // - Code aging of the BytecodeArray object. | 823 // - Code aging of the BytecodeArray object. |
824 | 824 |
825 // Perform stack guard check. | 825 // Perform stack guard check. |
826 { | 826 { |
827 Label ok; | 827 Label ok; |
828 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 828 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
829 __ Branch(&ok, hs, sp, Operand(at)); | 829 __ Branch(&ok, hs, sp, Operand(at)); |
830 __ push(kInterpreterBytecodeArrayRegister); | 830 __ push(kInterpreterBytecodeArrayRegister); |
831 __ CallRuntime(Runtime::kStackGuard, 0); | 831 __ CallRuntime(Runtime::kStackGuard); |
832 __ pop(kInterpreterBytecodeArrayRegister); | 832 __ pop(kInterpreterBytecodeArrayRegister); |
833 __ bind(&ok); | 833 __ bind(&ok); |
834 } | 834 } |
835 | 835 |
836 // Load bytecode offset and dispatch table into registers. | 836 // Load bytecode offset and dispatch table into registers. |
837 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | 837 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); |
838 __ Daddu(kInterpreterRegisterFileRegister, fp, | 838 __ Daddu(kInterpreterRegisterFileRegister, fp, |
839 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); | 839 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); |
840 __ li(kInterpreterBytecodeOffsetRegister, | 840 __ li(kInterpreterBytecodeOffsetRegister, |
841 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | 841 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
942 static void Generate_InterpreterNotifyDeoptimizedHelper( | 942 static void Generate_InterpreterNotifyDeoptimizedHelper( |
943 MacroAssembler* masm, Deoptimizer::BailoutType type) { | 943 MacroAssembler* masm, Deoptimizer::BailoutType type) { |
944 // Enter an internal frame. | 944 // Enter an internal frame. |
945 { | 945 { |
946 FrameScope scope(masm, StackFrame::INTERNAL); | 946 FrameScope scope(masm, StackFrame::INTERNAL); |
947 __ push(kInterpreterAccumulatorRegister); // Save accumulator register. | 947 __ push(kInterpreterAccumulatorRegister); // Save accumulator register. |
948 | 948 |
949 // Pass the deoptimization type to the runtime system. | 949 // Pass the deoptimization type to the runtime system. |
950 __ li(a1, Operand(Smi::FromInt(static_cast<int>(type)))); | 950 __ li(a1, Operand(Smi::FromInt(static_cast<int>(type)))); |
951 __ push(a1); | 951 __ push(a1); |
952 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 952 __ CallRuntime(Runtime::kNotifyDeoptimized); |
953 | 953 |
954 __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. | 954 __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. |
955 // Tear down internal frame. | 955 // Tear down internal frame. |
956 } | 956 } |
957 | 957 |
958 // Drop state (we don't use these for interpreter deopts) and push PC at top | 958 // Drop state (we don't use these for interpreter deopts) and push PC at top |
959 // of stack (to simulate initial call to bytecode handler in interpreter entry | 959 // of stack (to simulate initial call to bytecode handler in interpreter entry |
960 // trampoline). | 960 // trampoline). |
961 __ ld(a1, MemOperand(sp)); | 961 __ ld(a1, MemOperand(sp)); |
962 __ Drop(1); | 962 __ Drop(1); |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1137 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 1137 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
1138 SaveFPRegsMode save_doubles) { | 1138 SaveFPRegsMode save_doubles) { |
1139 { | 1139 { |
1140 FrameScope scope(masm, StackFrame::INTERNAL); | 1140 FrameScope scope(masm, StackFrame::INTERNAL); |
1141 | 1141 |
1142 // Preserve registers across notification, this is important for compiled | 1142 // Preserve registers across notification, this is important for compiled |
1143 // stubs that tail call the runtime on deopts passing their parameters in | 1143 // stubs that tail call the runtime on deopts passing their parameters in |
1144 // registers. | 1144 // registers. |
1145 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 1145 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
1146 // Pass the function and deoptimization type to the runtime system. | 1146 // Pass the function and deoptimization type to the runtime system. |
1147 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 1147 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); |
1148 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 1148 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
1149 } | 1149 } |
1150 | 1150 |
1151 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state | 1151 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state |
1152 __ Jump(ra); // Jump to miss handler | 1152 __ Jump(ra); // Jump to miss handler |
1153 } | 1153 } |
1154 | 1154 |
1155 | 1155 |
1156 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 1156 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
1157 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 1157 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
1158 } | 1158 } |
1159 | 1159 |
1160 | 1160 |
1161 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 1161 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
1162 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 1162 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
1163 } | 1163 } |
1164 | 1164 |
1165 | 1165 |
1166 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1166 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
1167 Deoptimizer::BailoutType type) { | 1167 Deoptimizer::BailoutType type) { |
1168 { | 1168 { |
1169 FrameScope scope(masm, StackFrame::INTERNAL); | 1169 FrameScope scope(masm, StackFrame::INTERNAL); |
1170 // Pass the function and deoptimization type to the runtime system. | 1170 // Pass the function and deoptimization type to the runtime system. |
1171 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | 1171 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); |
1172 __ push(a0); | 1172 __ push(a0); |
1173 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 1173 __ CallRuntime(Runtime::kNotifyDeoptimized); |
1174 } | 1174 } |
1175 | 1175 |
1176 // Get the full codegen state from the stack and untag it -> a6. | 1176 // Get the full codegen state from the stack and untag it -> a6. |
1177 __ ld(a6, MemOperand(sp, 0 * kPointerSize)); | 1177 __ ld(a6, MemOperand(sp, 0 * kPointerSize)); |
1178 __ SmiUntag(a6); | 1178 __ SmiUntag(a6); |
1179 // Switch on the state. | 1179 // Switch on the state. |
1180 Label with_tos_register, unknown_state; | 1180 Label with_tos_register, unknown_state; |
1181 __ Branch(&with_tos_register, | 1181 __ Branch(&with_tos_register, |
1182 ne, a6, Operand(FullCodeGenerator::NO_REGISTERS)); | 1182 ne, a6, Operand(FullCodeGenerator::NO_REGISTERS)); |
1183 __ Ret(USE_DELAY_SLOT); | 1183 __ Ret(USE_DELAY_SLOT); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1319 __ bind(&set_global_proxy); | 1319 __ bind(&set_global_proxy); |
1320 __ LoadGlobalProxy(t0); | 1320 __ LoadGlobalProxy(t0); |
1321 __ sd(t0, MemOperand(t8)); | 1321 __ sd(t0, MemOperand(t8)); |
1322 __ Branch(&valid_receiver); | 1322 __ Branch(&valid_receiver); |
1323 | 1323 |
1324 // Compatible receiver check failed: throw an Illegal Invocation exception. | 1324 // Compatible receiver check failed: throw an Illegal Invocation exception. |
1325 __ bind(&receiver_check_failed); | 1325 __ bind(&receiver_check_failed); |
1326 // Drop the arguments (including the receiver); | 1326 // Drop the arguments (including the receiver); |
1327 __ Daddu(t8, t8, Operand(kPointerSize)); | 1327 __ Daddu(t8, t8, Operand(kPointerSize)); |
1328 __ daddu(sp, t8, zero_reg); | 1328 __ daddu(sp, t8, zero_reg); |
1329 __ TailCallRuntime(Runtime::kThrowIllegalInvocation, 0); | 1329 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
1330 } | 1330 } |
1331 | 1331 |
1332 | 1332 |
1333 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1333 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
1334 // Lookup the function in the JavaScript frame. | 1334 // Lookup the function in the JavaScript frame. |
1335 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1335 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1336 { | 1336 { |
1337 FrameScope scope(masm, StackFrame::INTERNAL); | 1337 FrameScope scope(masm, StackFrame::INTERNAL); |
1338 // Pass function as argument. | 1338 // Pass function as argument. |
1339 __ push(a0); | 1339 __ push(a0); |
1340 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1340 __ CallRuntime(Runtime::kCompileForOnStackReplacement); |
1341 } | 1341 } |
1342 | 1342 |
1343 // If the code object is null, just return to the unoptimized code. | 1343 // If the code object is null, just return to the unoptimized code. |
1344 __ Ret(eq, v0, Operand(Smi::FromInt(0))); | 1344 __ Ret(eq, v0, Operand(Smi::FromInt(0))); |
1345 | 1345 |
1346 // Load deoptimization data from the code object. | 1346 // Load deoptimization data from the code object. |
1347 // <deopt_data> = <code>[#deoptimization_data_offset] | 1347 // <deopt_data> = <code>[#deoptimization_data_offset] |
1348 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | 1348 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); |
1349 | 1349 |
1350 // Load the OSR entrypoint offset from the deoptimization data. | 1350 // Load the OSR entrypoint offset from the deoptimization data. |
(...skipping 12 matching lines...) Expand all Loading... |
1363 } | 1363 } |
1364 | 1364 |
1365 | 1365 |
1366 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1366 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
1367 // We check the stack limit as indicator that recompilation might be done. | 1367 // We check the stack limit as indicator that recompilation might be done. |
1368 Label ok; | 1368 Label ok; |
1369 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 1369 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
1370 __ Branch(&ok, hs, sp, Operand(at)); | 1370 __ Branch(&ok, hs, sp, Operand(at)); |
1371 { | 1371 { |
1372 FrameScope scope(masm, StackFrame::INTERNAL); | 1372 FrameScope scope(masm, StackFrame::INTERNAL); |
1373 __ CallRuntime(Runtime::kStackGuard, 0); | 1373 __ CallRuntime(Runtime::kStackGuard); |
1374 } | 1374 } |
1375 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1375 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
1376 RelocInfo::CODE_TARGET); | 1376 RelocInfo::CODE_TARGET); |
1377 | 1377 |
1378 __ bind(&ok); | 1378 __ bind(&ok); |
1379 __ Ret(); | 1379 __ Ret(); |
1380 } | 1380 } |
1381 | 1381 |
1382 | 1382 |
1383 // static | 1383 // static |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1441 __ bind(&no_arguments); | 1441 __ bind(&no_arguments); |
1442 { | 1442 { |
1443 __ mov(a0, zero_reg); | 1443 __ mov(a0, zero_reg); |
1444 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1444 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
1445 } | 1445 } |
1446 | 1446 |
1447 // 4c. The receiver is not callable, throw an appropriate TypeError. | 1447 // 4c. The receiver is not callable, throw an appropriate TypeError. |
1448 __ bind(&receiver_not_callable); | 1448 __ bind(&receiver_not_callable); |
1449 { | 1449 { |
1450 __ sd(a1, MemOperand(sp)); | 1450 __ sd(a1, MemOperand(sp)); |
1451 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); | 1451 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
1452 } | 1452 } |
1453 } | 1453 } |
1454 | 1454 |
1455 | 1455 |
1456 // static | 1456 // static |
1457 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | 1457 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
1458 // 1. Make sure we have at least one argument. | 1458 // 1. Make sure we have at least one argument. |
1459 // a0: actual number of arguments | 1459 // a0: actual number of arguments |
1460 { | 1460 { |
1461 Label done; | 1461 Label done; |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1550 | 1550 |
1551 // 3a. Apply the target to the given argumentsList (passing undefined for | 1551 // 3a. Apply the target to the given argumentsList (passing undefined for |
1552 // new.target). | 1552 // new.target). |
1553 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | 1553 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
1554 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1554 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
1555 | 1555 |
1556 // 3b. The target is not callable, throw an appropriate TypeError. | 1556 // 3b. The target is not callable, throw an appropriate TypeError. |
1557 __ bind(&target_not_callable); | 1557 __ bind(&target_not_callable); |
1558 { | 1558 { |
1559 __ sd(a1, MemOperand(sp)); | 1559 __ sd(a1, MemOperand(sp)); |
1560 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); | 1560 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
1561 } | 1561 } |
1562 } | 1562 } |
1563 | 1563 |
1564 | 1564 |
1565 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 1565 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
1566 // ----------- S t a t e ------------- | 1566 // ----------- S t a t e ------------- |
1567 // -- a0 : argc | 1567 // -- a0 : argc |
1568 // -- sp[0] : new.target (optional) | 1568 // -- sp[0] : new.target (optional) |
1569 // -- sp[4] : argumentsList | 1569 // -- sp[4] : argumentsList |
1570 // -- sp[8] : target | 1570 // -- sp[8] : target |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1621 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); | 1621 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); |
1622 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); | 1622 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); |
1623 | 1623 |
1624 // 4a. Construct the target with the given new.target and argumentsList. | 1624 // 4a. Construct the target with the given new.target and argumentsList. |
1625 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1625 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
1626 | 1626 |
1627 // 4b. The target is not a constructor, throw an appropriate TypeError. | 1627 // 4b. The target is not a constructor, throw an appropriate TypeError. |
1628 __ bind(&target_not_constructor); | 1628 __ bind(&target_not_constructor); |
1629 { | 1629 { |
1630 __ sd(a1, MemOperand(sp)); | 1630 __ sd(a1, MemOperand(sp)); |
1631 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); | 1631 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
1632 } | 1632 } |
1633 | 1633 |
1634 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | 1634 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
1635 __ bind(&new_target_not_constructor); | 1635 __ bind(&new_target_not_constructor); |
1636 { | 1636 { |
1637 __ sd(a3, MemOperand(sp)); | 1637 __ sd(a3, MemOperand(sp)); |
1638 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); | 1638 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
1639 } | 1639 } |
1640 } | 1640 } |
1641 | 1641 |
1642 | 1642 |
1643 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1643 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
1644 Label* stack_overflow) { | 1644 Label* stack_overflow) { |
1645 // ----------- S t a t e ------------- | 1645 // ----------- S t a t e ------------- |
1646 // -- a0 : actual number of arguments | 1646 // -- a0 : actual number of arguments |
1647 // -- a1 : function (passed through to callee) | 1647 // -- a1 : function (passed through to callee) |
1648 // -- a2 : expected number of arguments | 1648 // -- a2 : expected number of arguments |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1718 // Check if argumentsList is a fast JSArray. | 1718 // Check if argumentsList is a fast JSArray. |
1719 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); | 1719 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); |
1720 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | 1720 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
1721 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); | 1721 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); |
1722 | 1722 |
1723 // Ask the runtime to create the list (actually a FixedArray). | 1723 // Ask the runtime to create the list (actually a FixedArray). |
1724 __ bind(&create_runtime); | 1724 __ bind(&create_runtime); |
1725 { | 1725 { |
1726 FrameScope scope(masm, StackFrame::INTERNAL); | 1726 FrameScope scope(masm, StackFrame::INTERNAL); |
1727 __ Push(a1, a3, a0); | 1727 __ Push(a1, a3, a0); |
1728 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1); | 1728 __ CallRuntime(Runtime::kCreateListFromArrayLike); |
1729 __ mov(a0, v0); | 1729 __ mov(a0, v0); |
1730 __ Pop(a1, a3); | 1730 __ Pop(a1, a3); |
1731 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 1731 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
1732 __ SmiUntag(a2); | 1732 __ SmiUntag(a2); |
1733 } | 1733 } |
1734 __ Branch(&done_create); | 1734 __ Branch(&done_create); |
1735 | 1735 |
1736 // Try to create the list from an arguments object. | 1736 // Try to create the list from an arguments object. |
1737 __ bind(&create_arguments); | 1737 __ bind(&create_arguments); |
1738 __ ld(a2, | 1738 __ ld(a2, |
(...skipping 27 matching lines...) Expand all Loading... |
1766 // Check the stack for overflow. We are not trying to catch interruptions | 1766 // Check the stack for overflow. We are not trying to catch interruptions |
1767 // (i.e. debug break and preemption) here, so check the "real stack limit". | 1767 // (i.e. debug break and preemption) here, so check the "real stack limit". |
1768 Label done; | 1768 Label done; |
1769 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); | 1769 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); |
1770 // Make ip the space we have left. The stack might already be overflowed | 1770 // Make ip the space we have left. The stack might already be overflowed |
1771 // here which will cause ip to become negative. | 1771 // here which will cause ip to become negative. |
1772 __ Dsubu(a4, sp, a4); | 1772 __ Dsubu(a4, sp, a4); |
1773 // Check if the arguments will overflow the stack. | 1773 // Check if the arguments will overflow the stack. |
1774 __ dsll(at, a2, kPointerSizeLog2); | 1774 __ dsll(at, a2, kPointerSizeLog2); |
1775 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison. | 1775 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison. |
1776 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1); | 1776 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1777 __ bind(&done); | 1777 __ bind(&done); |
1778 } | 1778 } |
1779 | 1779 |
1780 // ----------- S t a t e ------------- | 1780 // ----------- S t a t e ------------- |
1781 // -- a1 : target | 1781 // -- a1 : target |
1782 // -- a0 : args (a FixedArray built from argumentsList) | 1782 // -- a0 : args (a FixedArray built from argumentsList) |
1783 // -- a2 : len (number of elements to push from args) | 1783 // -- a2 : len (number of elements to push from args) |
1784 // -- a3 : new.target (checked to be constructor or undefined) | 1784 // -- a3 : new.target (checked to be constructor or undefined) |
1785 // -- sp[0] : thisArgument | 1785 // -- sp[0] : thisArgument |
1786 // ----------------------------------- | 1786 // ----------------------------------- |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1910 ParameterCount actual(a0); | 1910 ParameterCount actual(a0); |
1911 ParameterCount expected(a2); | 1911 ParameterCount expected(a2); |
1912 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, | 1912 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, |
1913 CheckDebugStepCallWrapper()); | 1913 CheckDebugStepCallWrapper()); |
1914 | 1914 |
1915 // The function is a "classConstructor", need to raise an exception. | 1915 // The function is a "classConstructor", need to raise an exception. |
1916 __ bind(&class_constructor); | 1916 __ bind(&class_constructor); |
1917 { | 1917 { |
1918 FrameScope frame(masm, StackFrame::INTERNAL); | 1918 FrameScope frame(masm, StackFrame::INTERNAL); |
1919 __ Push(a1); | 1919 __ Push(a1); |
1920 __ CallRuntime(Runtime::kThrowConstructorNonCallableError, 1); | 1920 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); |
1921 } | 1921 } |
1922 } | 1922 } |
1923 | 1923 |
1924 | 1924 |
1925 // static | 1925 // static |
1926 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) { | 1926 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) { |
1927 // ----------- S t a t e ------------- | 1927 // ----------- S t a t e ------------- |
1928 // -- a0 : the number of arguments (not including the receiver) | 1928 // -- a0 : the number of arguments (not including the receiver) |
1929 // -- a1 : the function to call (checked to be a JSBoundFunction) | 1929 // -- a1 : the function to call (checked to be a JSBoundFunction) |
1930 // ----------------------------------- | 1930 // ----------------------------------- |
(...skipping 26 matching lines...) Expand all Loading... |
1957 __ Dsubu(sp, sp, Operand(a5)); | 1957 __ Dsubu(sp, sp, Operand(a5)); |
1958 // Check the stack for overflow. We are not trying to catch interruptions | 1958 // Check the stack for overflow. We are not trying to catch interruptions |
1959 // (i.e. debug break and preemption) here, so check the "real stack limit". | 1959 // (i.e. debug break and preemption) here, so check the "real stack limit". |
1960 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | 1960 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); |
1961 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | 1961 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. |
1962 // Restore the stack pointer. | 1962 // Restore the stack pointer. |
1963 __ Daddu(sp, sp, Operand(a5)); | 1963 __ Daddu(sp, sp, Operand(a5)); |
1964 { | 1964 { |
1965 FrameScope scope(masm, StackFrame::MANUAL); | 1965 FrameScope scope(masm, StackFrame::MANUAL); |
1966 __ EnterFrame(StackFrame::INTERNAL); | 1966 __ EnterFrame(StackFrame::INTERNAL); |
1967 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 1967 __ CallRuntime(Runtime::kThrowStackOverflow); |
1968 } | 1968 } |
1969 __ bind(&done); | 1969 __ bind(&done); |
1970 } | 1970 } |
1971 | 1971 |
1972 // Relocate arguments down the stack. | 1972 // Relocate arguments down the stack. |
1973 { | 1973 { |
1974 Label loop, done_loop; | 1974 Label loop, done_loop; |
1975 __ mov(a5, zero_reg); | 1975 __ mov(a5, zero_reg); |
1976 __ bind(&loop); | 1976 __ bind(&loop); |
1977 __ Branch(&done_loop, gt, a5, Operand(a0)); | 1977 __ Branch(&done_loop, gt, a5, Operand(a0)); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2058 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); | 2058 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); |
2059 __ Jump(masm->isolate()->builtins()->CallFunction( | 2059 __ Jump(masm->isolate()->builtins()->CallFunction( |
2060 ConvertReceiverMode::kNotNullOrUndefined), | 2060 ConvertReceiverMode::kNotNullOrUndefined), |
2061 RelocInfo::CODE_TARGET); | 2061 RelocInfo::CODE_TARGET); |
2062 | 2062 |
2063 // 3. Call to something that is not callable. | 2063 // 3. Call to something that is not callable. |
2064 __ bind(&non_callable); | 2064 __ bind(&non_callable); |
2065 { | 2065 { |
2066 FrameScope scope(masm, StackFrame::INTERNAL); | 2066 FrameScope scope(masm, StackFrame::INTERNAL); |
2067 __ Push(a1); | 2067 __ Push(a1); |
2068 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1); | 2068 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2069 } | 2069 } |
2070 } | 2070 } |
2071 | 2071 |
2072 | 2072 |
2073 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2073 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2074 // ----------- S t a t e ------------- | 2074 // ----------- S t a t e ------------- |
2075 // -- a0 : the number of arguments (not including the receiver) | 2075 // -- a0 : the number of arguments (not including the receiver) |
2076 // -- a1 : the constructor to call (checked to be a JSFunction) | 2076 // -- a1 : the constructor to call (checked to be a JSFunction) |
2077 // -- a3 : the new target (checked to be a constructor) | 2077 // -- a3 : the new target (checked to be a constructor) |
2078 // ----------------------------------- | 2078 // ----------------------------------- |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2120 __ Dsubu(sp, sp, Operand(a5)); | 2120 __ Dsubu(sp, sp, Operand(a5)); |
2121 // Check the stack for overflow. We are not trying to catch interruptions | 2121 // Check the stack for overflow. We are not trying to catch interruptions |
2122 // (i.e. debug break and preemption) here, so check the "real stack limit". | 2122 // (i.e. debug break and preemption) here, so check the "real stack limit". |
2123 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | 2123 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); |
2124 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | 2124 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. |
2125 // Restore the stack pointer. | 2125 // Restore the stack pointer. |
2126 __ Daddu(sp, sp, Operand(a5)); | 2126 __ Daddu(sp, sp, Operand(a5)); |
2127 { | 2127 { |
2128 FrameScope scope(masm, StackFrame::MANUAL); | 2128 FrameScope scope(masm, StackFrame::MANUAL); |
2129 __ EnterFrame(StackFrame::INTERNAL); | 2129 __ EnterFrame(StackFrame::INTERNAL); |
2130 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 2130 __ CallRuntime(Runtime::kThrowStackOverflow); |
2131 } | 2131 } |
2132 __ bind(&done); | 2132 __ bind(&done); |
2133 } | 2133 } |
2134 | 2134 |
2135 // Relocate arguments down the stack. | 2135 // Relocate arguments down the stack. |
2136 { | 2136 { |
2137 Label loop, done_loop; | 2137 Label loop, done_loop; |
2138 __ mov(a5, zero_reg); | 2138 __ mov(a5, zero_reg); |
2139 __ bind(&loop); | 2139 __ bind(&loop); |
2140 __ Branch(&done_loop, ge, a5, Operand(a0)); | 2140 __ Branch(&done_loop, ge, a5, Operand(a0)); |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2323 | 2323 |
2324 // What we really care about is the required number of arguments. | 2324 // What we really care about is the required number of arguments. |
2325 DCHECK_EQ(kPointerSize, kInt64Size); | 2325 DCHECK_EQ(kPointerSize, kInt64Size); |
2326 __ lw(a5, FieldMemOperand(a4, SharedFunctionInfo::kLengthOffset)); | 2326 __ lw(a5, FieldMemOperand(a4, SharedFunctionInfo::kLengthOffset)); |
2327 __ srl(a5, a5, 1); | 2327 __ srl(a5, a5, 1); |
2328 __ Branch(&no_strong_error, ge, a0, Operand(a5)); | 2328 __ Branch(&no_strong_error, ge, a0, Operand(a5)); |
2329 | 2329 |
2330 { | 2330 { |
2331 FrameScope frame(masm, StackFrame::MANUAL); | 2331 FrameScope frame(masm, StackFrame::MANUAL); |
2332 EnterArgumentsAdaptorFrame(masm); | 2332 EnterArgumentsAdaptorFrame(masm); |
2333 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0); | 2333 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments); |
2334 } | 2334 } |
2335 | 2335 |
2336 __ bind(&no_strong_error); | 2336 __ bind(&no_strong_error); |
2337 EnterArgumentsAdaptorFrame(masm); | 2337 EnterArgumentsAdaptorFrame(masm); |
2338 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 2338 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
2339 | 2339 |
2340 // Calculate copy start address into a0 and copy end address into a7. | 2340 // Calculate copy start address into a0 and copy end address into a7. |
2341 // a0: actual number of arguments as a smi | 2341 // a0: actual number of arguments as a smi |
2342 // a1: function | 2342 // a1: function |
2343 // a2: expected number of arguments | 2343 // a2: expected number of arguments |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2401 // ------------------------------------------- | 2401 // ------------------------------------------- |
2402 // Don't adapt arguments. | 2402 // Don't adapt arguments. |
2403 // ------------------------------------------- | 2403 // ------------------------------------------- |
2404 __ bind(&dont_adapt_arguments); | 2404 __ bind(&dont_adapt_arguments); |
2405 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 2405 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
2406 __ Jump(a4); | 2406 __ Jump(a4); |
2407 | 2407 |
2408 __ bind(&stack_overflow); | 2408 __ bind(&stack_overflow); |
2409 { | 2409 { |
2410 FrameScope frame(masm, StackFrame::MANUAL); | 2410 FrameScope frame(masm, StackFrame::MANUAL); |
2411 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 2411 __ CallRuntime(Runtime::kThrowStackOverflow); |
2412 __ break_(0xCC); | 2412 __ break_(0xCC); |
2413 } | 2413 } |
2414 } | 2414 } |
2415 | 2415 |
2416 | 2416 |
2417 #undef __ | 2417 #undef __ |
2418 | 2418 |
2419 } // namespace internal | 2419 } // namespace internal |
2420 } // namespace v8 | 2420 } // namespace v8 |
2421 | 2421 |
2422 #endif // V8_TARGET_ARCH_MIPS64 | 2422 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |