| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 188 __ bind(&to_string); | 188 __ bind(&to_string); |
| 189 { | 189 { |
| 190 ToStringStub stub(masm->isolate()); | 190 ToStringStub stub(masm->isolate()); |
| 191 __ TailCallStub(&stub); | 191 __ TailCallStub(&stub); |
| 192 } | 192 } |
| 193 | 193 |
| 194 // 3b. Convert symbol in a0 to a string. | 194 // 3b. Convert symbol in a0 to a string. |
| 195 __ bind(&symbol_descriptive_string); | 195 __ bind(&symbol_descriptive_string); |
| 196 { | 196 { |
| 197 __ Push(a0); | 197 __ Push(a0); |
| 198 __ TailCallRuntime(Runtime::kSymbolDescriptiveString, 1); | 198 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); |
| 199 } | 199 } |
| 200 } | 200 } |
| 201 | 201 |
| 202 | 202 |
| 203 // static | 203 // static |
| 204 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | 204 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
| 205 // ----------- S t a t e ------------- | 205 // ----------- S t a t e ------------- |
| 206 // -- a0 : number of arguments | 206 // -- a0 : number of arguments |
| 207 // -- a1 : constructor function | 207 // -- a1 : constructor function |
| 208 // -- a3 : new target | 208 // -- a3 : new target |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 270 __ Ret(USE_DELAY_SLOT); | 270 __ Ret(USE_DELAY_SLOT); |
| 271 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | 271 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); |
| 272 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); | 272 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); |
| 273 } | 273 } |
| 274 | 274 |
| 275 // 5. Fallback to the runtime to create new object. | 275 // 5. Fallback to the runtime to create new object. |
| 276 __ bind(&new_object); | 276 __ bind(&new_object); |
| 277 { | 277 { |
| 278 FrameScope scope(masm, StackFrame::INTERNAL); | 278 FrameScope scope(masm, StackFrame::INTERNAL); |
| 279 __ Push(a0, a1, a3); // first argument, constructor, new target | 279 __ Push(a0, a1, a3); // first argument, constructor, new target |
| 280 __ CallRuntime(Runtime::kNewObject, 2); | 280 __ CallRuntime(Runtime::kNewObject); |
| 281 __ Pop(a0); | 281 __ Pop(a0); |
| 282 } | 282 } |
| 283 __ Ret(USE_DELAY_SLOT); | 283 __ Ret(USE_DELAY_SLOT); |
| 284 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | 284 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); |
| 285 } | 285 } |
| 286 | 286 |
| 287 | 287 |
| 288 static void CallRuntimePassFunction( | 288 static void CallRuntimePassFunction( |
| 289 MacroAssembler* masm, Runtime::FunctionId function_id) { | 289 MacroAssembler* masm, Runtime::FunctionId function_id) { |
| 290 // ----------- S t a t e ------------- | 290 // ----------- S t a t e ------------- |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 452 // filler map. | 452 // filler map. |
| 453 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); | 453 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); |
| 454 __ InitializeFieldsWithFiller(t5, t3, t7); | 454 __ InitializeFieldsWithFiller(t5, t3, t7); |
| 455 | 455 |
| 456 // t2: slack tracking counter value before decreasing. | 456 // t2: slack tracking counter value before decreasing. |
| 457 __ Branch(&allocated, ne, t2, Operand(Map::kSlackTrackingCounterEnd)); | 457 __ Branch(&allocated, ne, t2, Operand(Map::kSlackTrackingCounterEnd)); |
| 458 | 458 |
| 459 // Push the constructor, new_target and the object to the stack, | 459 // Push the constructor, new_target and the object to the stack, |
| 460 // and then the initial map as an argument to the runtime call. | 460 // and then the initial map as an argument to the runtime call. |
| 461 __ Push(a1, a3, t4, a2); | 461 __ Push(a1, a3, t4, a2); |
| 462 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 462 __ CallRuntime(Runtime::kFinalizeInstanceSize); |
| 463 __ Pop(a1, a3, t4); | 463 __ Pop(a1, a3, t4); |
| 464 | 464 |
| 465 // Continue with JSObject being successfully allocated. | 465 // Continue with JSObject being successfully allocated. |
| 466 // a1: constructor function | 466 // a1: constructor function |
| 467 // a3: new target | 467 // a3: new target |
| 468 // t4: JSObject | 468 // t4: JSObject |
| 469 __ jmp(&allocated); | 469 __ jmp(&allocated); |
| 470 | 470 |
| 471 __ bind(&no_inobject_slack_tracking); | 471 __ bind(&no_inobject_slack_tracking); |
| 472 } | 472 } |
| 473 | 473 |
| 474 __ InitializeFieldsWithFiller(t5, t3, t7); | 474 __ InitializeFieldsWithFiller(t5, t3, t7); |
| 475 | 475 |
| 476 // Continue with JSObject being successfully allocated. | 476 // Continue with JSObject being successfully allocated. |
| 477 // a1: constructor function | 477 // a1: constructor function |
| 478 // a3: new target | 478 // a3: new target |
| 479 // t4: JSObject | 479 // t4: JSObject |
| 480 __ jmp(&allocated); | 480 __ jmp(&allocated); |
| 481 } | 481 } |
| 482 | 482 |
| 483 // Allocate the new receiver object using the runtime call. | 483 // Allocate the new receiver object using the runtime call. |
| 484 // a1: constructor function | 484 // a1: constructor function |
| 485 // a3: new target | 485 // a3: new target |
| 486 __ bind(&rt_call); | 486 __ bind(&rt_call); |
| 487 | 487 |
| 488 // Push the constructor and new_target twice, second pair as arguments | 488 // Push the constructor and new_target twice, second pair as arguments |
| 489 // to the runtime call. | 489 // to the runtime call. |
| 490 __ Push(a1, a3, a1, a3); // constructor function, new target | 490 __ Push(a1, a3, a1, a3); // constructor function, new target |
| 491 __ CallRuntime(Runtime::kNewObject, 2); | 491 __ CallRuntime(Runtime::kNewObject); |
| 492 __ mov(t4, v0); | 492 __ mov(t4, v0); |
| 493 __ Pop(a1, a3); | 493 __ Pop(a1, a3); |
| 494 | 494 |
| 495 // Receiver for constructor call allocated. | 495 // Receiver for constructor call allocated. |
| 496 // a1: constructor function | 496 // a1: constructor function |
| 497 // a3: new target | 497 // a3: new target |
| 498 // t4: JSObject | 498 // t4: JSObject |
| 499 __ bind(&allocated); | 499 __ bind(&allocated); |
| 500 | 500 |
| 501 // Retrieve smi-tagged arguments count from the stack. | 501 // Retrieve smi-tagged arguments count from the stack. |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 617 | 617 |
| 618 | 618 |
| 619 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | 619 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
| 620 Generate_JSConstructStubHelper(masm, false, false); | 620 Generate_JSConstructStubHelper(masm, false, false); |
| 621 } | 621 } |
| 622 | 622 |
| 623 | 623 |
| 624 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | 624 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
| 625 FrameScope scope(masm, StackFrame::INTERNAL); | 625 FrameScope scope(masm, StackFrame::INTERNAL); |
| 626 __ Push(a1); | 626 __ Push(a1); |
| 627 __ CallRuntime(Runtime::kThrowConstructedNonConstructable, 1); | 627 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); |
| 628 } | 628 } |
| 629 | 629 |
| 630 | 630 |
| 631 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 631 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
| 632 | 632 |
| 633 | 633 |
| 634 // Clobbers a2; preserves all other registers. | 634 // Clobbers a2; preserves all other registers. |
| 635 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | 635 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
| 636 IsTagged argc_is_tagged) { | 636 IsTagged argc_is_tagged) { |
| 637 // Check the stack for overflow. We are not trying to catch | 637 // Check the stack for overflow. We are not trying to catch |
| 638 // interruptions (e.g. debug break and preemption) here, so the "real stack | 638 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 639 // limit" is checked. | 639 // limit" is checked. |
| 640 Label okay; | 640 Label okay; |
| 641 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 641 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
| 642 // Make a2 the space we have left. The stack might already be overflowed | 642 // Make a2 the space we have left. The stack might already be overflowed |
| 643 // here which will cause a2 to become negative. | 643 // here which will cause a2 to become negative. |
| 644 __ Subu(a2, sp, a2); | 644 __ Subu(a2, sp, a2); |
| 645 // Check if the arguments will overflow the stack. | 645 // Check if the arguments will overflow the stack. |
| 646 if (argc_is_tagged == kArgcIsSmiTagged) { | 646 if (argc_is_tagged == kArgcIsSmiTagged) { |
| 647 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); | 647 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); |
| 648 } else { | 648 } else { |
| 649 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 649 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
| 650 __ sll(t3, argc, kPointerSizeLog2); | 650 __ sll(t3, argc, kPointerSizeLog2); |
| 651 } | 651 } |
| 652 // Signed comparison. | 652 // Signed comparison. |
| 653 __ Branch(&okay, gt, a2, Operand(t3)); | 653 __ Branch(&okay, gt, a2, Operand(t3)); |
| 654 | 654 |
| 655 // Out of stack space. | 655 // Out of stack space. |
| 656 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 656 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 657 | 657 |
| 658 __ bind(&okay); | 658 __ bind(&okay); |
| 659 } | 659 } |
| 660 | 660 |
| 661 | 661 |
| 662 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 662 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 663 bool is_construct) { | 663 bool is_construct) { |
| 664 // Called from JSEntryStub::GenerateBody | 664 // Called from JSEntryStub::GenerateBody |
| 665 | 665 |
| 666 // ----------- S t a t e ------------- | 666 // ----------- S t a t e ------------- |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 800 { | 800 { |
| 801 // Load frame size from the BytecodeArray object. | 801 // Load frame size from the BytecodeArray object. |
| 802 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister, | 802 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister, |
| 803 BytecodeArray::kFrameSizeOffset)); | 803 BytecodeArray::kFrameSizeOffset)); |
| 804 | 804 |
| 805 // Do a stack check to ensure we don't go over the limit. | 805 // Do a stack check to ensure we don't go over the limit. |
| 806 Label ok; | 806 Label ok; |
| 807 __ Subu(t1, sp, Operand(t0)); | 807 __ Subu(t1, sp, Operand(t0)); |
| 808 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 808 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
| 809 __ Branch(&ok, hs, t1, Operand(a2)); | 809 __ Branch(&ok, hs, t1, Operand(a2)); |
| 810 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 810 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 811 __ bind(&ok); | 811 __ bind(&ok); |
| 812 | 812 |
| 813 // If ok, push undefined as the initial value for all register file entries. | 813 // If ok, push undefined as the initial value for all register file entries. |
| 814 Label loop_header; | 814 Label loop_header; |
| 815 Label loop_check; | 815 Label loop_check; |
| 816 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); | 816 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); |
| 817 __ Branch(&loop_check); | 817 __ Branch(&loop_check); |
| 818 __ bind(&loop_header); | 818 __ bind(&loop_header); |
| 819 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | 819 // TODO(rmcilroy): Consider doing more than one push per loop iteration. |
| 820 __ push(t1); | 820 __ push(t1); |
| 821 // Continue loop if not done. | 821 // Continue loop if not done. |
| 822 __ bind(&loop_check); | 822 __ bind(&loop_check); |
| 823 __ Subu(t0, t0, Operand(kPointerSize)); | 823 __ Subu(t0, t0, Operand(kPointerSize)); |
| 824 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); | 824 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); |
| 825 } | 825 } |
| 826 | 826 |
| 827 // TODO(rmcilroy): List of things not currently dealt with here but done in | 827 // TODO(rmcilroy): List of things not currently dealt with here but done in |
| 828 // fullcodegen's prologue: | 828 // fullcodegen's prologue: |
| 829 // - Support profiler (specifically profiling_counter). | 829 // - Support profiler (specifically profiling_counter). |
| 830 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. | 830 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. |
| 831 // - Allow simulator stop operations if FLAG_stop_at is set. | 831 // - Allow simulator stop operations if FLAG_stop_at is set. |
| 832 // - Code aging of the BytecodeArray object. | 832 // - Code aging of the BytecodeArray object. |
| 833 | 833 |
| 834 // Perform stack guard check. | 834 // Perform stack guard check. |
| 835 { | 835 { |
| 836 Label ok; | 836 Label ok; |
| 837 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 837 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 838 __ Branch(&ok, hs, sp, Operand(at)); | 838 __ Branch(&ok, hs, sp, Operand(at)); |
| 839 __ push(kInterpreterBytecodeArrayRegister); | 839 __ push(kInterpreterBytecodeArrayRegister); |
| 840 __ CallRuntime(Runtime::kStackGuard, 0); | 840 __ CallRuntime(Runtime::kStackGuard); |
| 841 __ pop(kInterpreterBytecodeArrayRegister); | 841 __ pop(kInterpreterBytecodeArrayRegister); |
| 842 __ bind(&ok); | 842 __ bind(&ok); |
| 843 } | 843 } |
| 844 | 844 |
| 845 // Load bytecode offset and dispatch table into registers. | 845 // Load bytecode offset and dispatch table into registers. |
| 846 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | 846 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); |
| 847 __ Addu(kInterpreterRegisterFileRegister, fp, | 847 __ Addu(kInterpreterRegisterFileRegister, fp, |
| 848 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); | 848 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); |
| 849 __ li(kInterpreterBytecodeOffsetRegister, | 849 __ li(kInterpreterBytecodeOffsetRegister, |
| 850 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | 850 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 951 static void Generate_InterpreterNotifyDeoptimizedHelper( | 951 static void Generate_InterpreterNotifyDeoptimizedHelper( |
| 952 MacroAssembler* masm, Deoptimizer::BailoutType type) { | 952 MacroAssembler* masm, Deoptimizer::BailoutType type) { |
| 953 // Enter an internal frame. | 953 // Enter an internal frame. |
| 954 { | 954 { |
| 955 FrameScope scope(masm, StackFrame::INTERNAL); | 955 FrameScope scope(masm, StackFrame::INTERNAL); |
| 956 __ push(kInterpreterAccumulatorRegister); // Save accumulator register. | 956 __ push(kInterpreterAccumulatorRegister); // Save accumulator register. |
| 957 | 957 |
| 958 // Pass the deoptimization type to the runtime system. | 958 // Pass the deoptimization type to the runtime system. |
| 959 __ li(a1, Operand(Smi::FromInt(static_cast<int>(type)))); | 959 __ li(a1, Operand(Smi::FromInt(static_cast<int>(type)))); |
| 960 __ push(a1); | 960 __ push(a1); |
| 961 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 961 __ CallRuntime(Runtime::kNotifyDeoptimized); |
| 962 | 962 |
| 963 __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. | 963 __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. |
| 964 // Tear down internal frame. | 964 // Tear down internal frame. |
| 965 } | 965 } |
| 966 | 966 |
| 967 // Drop state (we don't use these for interpreter deopts) and push PC at top | 967 // Drop state (we don't use these for interpreter deopts) and push PC at top |
| 968 // of stack (to simulate initial call to bytecode handler in interpreter entry | 968 // of stack (to simulate initial call to bytecode handler in interpreter entry |
| 969 // trampoline). | 969 // trampoline). |
| 970 __ lw(a1, MemOperand(sp)); | 970 __ lw(a1, MemOperand(sp)); |
| 971 __ Drop(1); | 971 __ Drop(1); |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1146 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 1146 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 1147 SaveFPRegsMode save_doubles) { | 1147 SaveFPRegsMode save_doubles) { |
| 1148 { | 1148 { |
| 1149 FrameScope scope(masm, StackFrame::INTERNAL); | 1149 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1150 | 1150 |
| 1151 // Preserve registers across notification, this is important for compiled | 1151 // Preserve registers across notification, this is important for compiled |
| 1152 // stubs that tail call the runtime on deopts passing their parameters in | 1152 // stubs that tail call the runtime on deopts passing their parameters in |
| 1153 // registers. | 1153 // registers. |
| 1154 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 1154 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 1155 // Pass the function and deoptimization type to the runtime system. | 1155 // Pass the function and deoptimization type to the runtime system. |
| 1156 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 1156 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); |
| 1157 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 1157 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 1158 } | 1158 } |
| 1159 | 1159 |
| 1160 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state | 1160 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state |
| 1161 __ Jump(ra); // Jump to miss handler | 1161 __ Jump(ra); // Jump to miss handler |
| 1162 } | 1162 } |
| 1163 | 1163 |
| 1164 | 1164 |
| 1165 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 1165 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 1166 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 1166 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 1167 } | 1167 } |
| 1168 | 1168 |
| 1169 | 1169 |
| 1170 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 1170 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 1171 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 1171 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 1172 } | 1172 } |
| 1173 | 1173 |
| 1174 | 1174 |
| 1175 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1175 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 1176 Deoptimizer::BailoutType type) { | 1176 Deoptimizer::BailoutType type) { |
| 1177 { | 1177 { |
| 1178 FrameScope scope(masm, StackFrame::INTERNAL); | 1178 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1179 // Pass the function and deoptimization type to the runtime system. | 1179 // Pass the function and deoptimization type to the runtime system. |
| 1180 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | 1180 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); |
| 1181 __ push(a0); | 1181 __ push(a0); |
| 1182 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 1182 __ CallRuntime(Runtime::kNotifyDeoptimized); |
| 1183 } | 1183 } |
| 1184 | 1184 |
| 1185 // Get the full codegen state from the stack and untag it -> t2. | 1185 // Get the full codegen state from the stack and untag it -> t2. |
| 1186 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); | 1186 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); |
| 1187 __ SmiUntag(t2); | 1187 __ SmiUntag(t2); |
| 1188 // Switch on the state. | 1188 // Switch on the state. |
| 1189 Label with_tos_register, unknown_state; | 1189 Label with_tos_register, unknown_state; |
| 1190 __ Branch(&with_tos_register, | 1190 __ Branch(&with_tos_register, |
| 1191 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); | 1191 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); |
| 1192 __ Ret(USE_DELAY_SLOT); | 1192 __ Ret(USE_DELAY_SLOT); |
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1328 __ bind(&set_global_proxy); | 1328 __ bind(&set_global_proxy); |
| 1329 __ LoadGlobalProxy(t0); | 1329 __ LoadGlobalProxy(t0); |
| 1330 __ sw(t0, MemOperand(t8)); | 1330 __ sw(t0, MemOperand(t8)); |
| 1331 __ Branch(&valid_receiver); | 1331 __ Branch(&valid_receiver); |
| 1332 | 1332 |
| 1333 // Compatible receiver check failed: throw an Illegal Invocation exception. | 1333 // Compatible receiver check failed: throw an Illegal Invocation exception. |
| 1334 __ bind(&receiver_check_failed); | 1334 __ bind(&receiver_check_failed); |
| 1335 // Drop the arguments (including the receiver); | 1335 // Drop the arguments (including the receiver); |
| 1336 __ Addu(t8, t8, Operand(kPointerSize)); | 1336 __ Addu(t8, t8, Operand(kPointerSize)); |
| 1337 __ addu(sp, t8, zero_reg); | 1337 __ addu(sp, t8, zero_reg); |
| 1338 __ TailCallRuntime(Runtime::kThrowIllegalInvocation, 0); | 1338 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
| 1339 } | 1339 } |
| 1340 | 1340 |
| 1341 | 1341 |
| 1342 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1342 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1343 // Lookup the function in the JavaScript frame. | 1343 // Lookup the function in the JavaScript frame. |
| 1344 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1344 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1345 { | 1345 { |
| 1346 FrameScope scope(masm, StackFrame::INTERNAL); | 1346 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1347 // Pass function as argument. | 1347 // Pass function as argument. |
| 1348 __ push(a0); | 1348 __ push(a0); |
| 1349 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1349 __ CallRuntime(Runtime::kCompileForOnStackReplacement); |
| 1350 } | 1350 } |
| 1351 | 1351 |
| 1352 // If the code object is null, just return to the unoptimized code. | 1352 // If the code object is null, just return to the unoptimized code. |
| 1353 __ Ret(eq, v0, Operand(Smi::FromInt(0))); | 1353 __ Ret(eq, v0, Operand(Smi::FromInt(0))); |
| 1354 | 1354 |
| 1355 // Load deoptimization data from the code object. | 1355 // Load deoptimization data from the code object. |
| 1356 // <deopt_data> = <code>[#deoptimization_data_offset] | 1356 // <deopt_data> = <code>[#deoptimization_data_offset] |
| 1357 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | 1357 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); |
| 1358 | 1358 |
| 1359 // Load the OSR entrypoint offset from the deoptimization data. | 1359 // Load the OSR entrypoint offset from the deoptimization data. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1372 } | 1372 } |
| 1373 | 1373 |
| 1374 | 1374 |
| 1375 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1375 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1376 // We check the stack limit as indicator that recompilation might be done. | 1376 // We check the stack limit as indicator that recompilation might be done. |
| 1377 Label ok; | 1377 Label ok; |
| 1378 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 1378 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 1379 __ Branch(&ok, hs, sp, Operand(at)); | 1379 __ Branch(&ok, hs, sp, Operand(at)); |
| 1380 { | 1380 { |
| 1381 FrameScope scope(masm, StackFrame::INTERNAL); | 1381 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1382 __ CallRuntime(Runtime::kStackGuard, 0); | 1382 __ CallRuntime(Runtime::kStackGuard); |
| 1383 } | 1383 } |
| 1384 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1384 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1385 RelocInfo::CODE_TARGET); | 1385 RelocInfo::CODE_TARGET); |
| 1386 | 1386 |
| 1387 __ bind(&ok); | 1387 __ bind(&ok); |
| 1388 __ Ret(); | 1388 __ Ret(); |
| 1389 } | 1389 } |
| 1390 | 1390 |
| 1391 | 1391 |
| 1392 // static | 1392 // static |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1450 __ bind(&no_arguments); | 1450 __ bind(&no_arguments); |
| 1451 { | 1451 { |
| 1452 __ mov(a0, zero_reg); | 1452 __ mov(a0, zero_reg); |
| 1453 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1453 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1454 } | 1454 } |
| 1455 | 1455 |
| 1456 // 4c. The receiver is not callable, throw an appropriate TypeError. | 1456 // 4c. The receiver is not callable, throw an appropriate TypeError. |
| 1457 __ bind(&receiver_not_callable); | 1457 __ bind(&receiver_not_callable); |
| 1458 { | 1458 { |
| 1459 __ sw(a1, MemOperand(sp)); | 1459 __ sw(a1, MemOperand(sp)); |
| 1460 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); | 1460 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
| 1461 } | 1461 } |
| 1462 } | 1462 } |
| 1463 | 1463 |
| 1464 | 1464 |
| 1465 // static | 1465 // static |
| 1466 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | 1466 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
| 1467 // 1. Make sure we have at least one argument. | 1467 // 1. Make sure we have at least one argument. |
| 1468 // a0: actual number of arguments | 1468 // a0: actual number of arguments |
| 1469 { | 1469 { |
| 1470 Label done; | 1470 Label done; |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1559 | 1559 |
| 1560 // 3a. Apply the target to the given argumentsList (passing undefined for | 1560 // 3a. Apply the target to the given argumentsList (passing undefined for |
| 1561 // new.target). | 1561 // new.target). |
| 1562 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | 1562 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 1563 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1563 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1564 | 1564 |
| 1565 // 3b. The target is not callable, throw an appropriate TypeError. | 1565 // 3b. The target is not callable, throw an appropriate TypeError. |
| 1566 __ bind(&target_not_callable); | 1566 __ bind(&target_not_callable); |
| 1567 { | 1567 { |
| 1568 __ sw(a1, MemOperand(sp)); | 1568 __ sw(a1, MemOperand(sp)); |
| 1569 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); | 1569 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); |
| 1570 } | 1570 } |
| 1571 } | 1571 } |
| 1572 | 1572 |
| 1573 | 1573 |
| 1574 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 1574 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
| 1575 // ----------- S t a t e ------------- | 1575 // ----------- S t a t e ------------- |
| 1576 // -- a0 : argc | 1576 // -- a0 : argc |
| 1577 // -- sp[0] : new.target (optional) | 1577 // -- sp[0] : new.target (optional) |
| 1578 // -- sp[4] : argumentsList | 1578 // -- sp[4] : argumentsList |
| 1579 // -- sp[8] : target | 1579 // -- sp[8] : target |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1630 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); | 1630 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); |
| 1631 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); | 1631 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); |
| 1632 | 1632 |
| 1633 // 4a. Construct the target with the given new.target and argumentsList. | 1633 // 4a. Construct the target with the given new.target and argumentsList. |
| 1634 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | 1634 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1635 | 1635 |
| 1636 // 4b. The target is not a constructor, throw an appropriate TypeError. | 1636 // 4b. The target is not a constructor, throw an appropriate TypeError. |
| 1637 __ bind(&target_not_constructor); | 1637 __ bind(&target_not_constructor); |
| 1638 { | 1638 { |
| 1639 __ sw(a1, MemOperand(sp)); | 1639 __ sw(a1, MemOperand(sp)); |
| 1640 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); | 1640 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
| 1641 } | 1641 } |
| 1642 | 1642 |
| 1643 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | 1643 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
| 1644 __ bind(&new_target_not_constructor); | 1644 __ bind(&new_target_not_constructor); |
| 1645 { | 1645 { |
| 1646 __ sw(a3, MemOperand(sp)); | 1646 __ sw(a3, MemOperand(sp)); |
| 1647 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); | 1647 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); |
| 1648 } | 1648 } |
| 1649 } | 1649 } |
| 1650 | 1650 |
| 1651 | 1651 |
| 1652 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1652 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
| 1653 Label* stack_overflow) { | 1653 Label* stack_overflow) { |
| 1654 // ----------- S t a t e ------------- | 1654 // ----------- S t a t e ------------- |
| 1655 // -- a0 : actual number of arguments | 1655 // -- a0 : actual number of arguments |
| 1656 // -- a1 : function (passed through to callee) | 1656 // -- a1 : function (passed through to callee) |
| 1657 // -- a2 : expected number of arguments | 1657 // -- a2 : expected number of arguments |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1726 // Check if argumentsList is a fast JSArray. | 1726 // Check if argumentsList is a fast JSArray. |
| 1727 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); | 1727 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 1728 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | 1728 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
| 1729 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); | 1729 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); |
| 1730 | 1730 |
| 1731 // Ask the runtime to create the list (actually a FixedArray). | 1731 // Ask the runtime to create the list (actually a FixedArray). |
| 1732 __ bind(&create_runtime); | 1732 __ bind(&create_runtime); |
| 1733 { | 1733 { |
| 1734 FrameScope scope(masm, StackFrame::INTERNAL); | 1734 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1735 __ Push(a1, a3, a0); | 1735 __ Push(a1, a3, a0); |
| 1736 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1); | 1736 __ CallRuntime(Runtime::kCreateListFromArrayLike); |
| 1737 __ mov(a0, v0); | 1737 __ mov(a0, v0); |
| 1738 __ Pop(a1, a3); | 1738 __ Pop(a1, a3); |
| 1739 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 1739 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
| 1740 __ SmiUntag(a2); | 1740 __ SmiUntag(a2); |
| 1741 } | 1741 } |
| 1742 __ Branch(&done_create); | 1742 __ Branch(&done_create); |
| 1743 | 1743 |
| 1744 // Try to create the list from an arguments object. | 1744 // Try to create the list from an arguments object. |
| 1745 __ bind(&create_arguments); | 1745 __ bind(&create_arguments); |
| 1746 __ lw(a2, | 1746 __ lw(a2, |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1774 // Check the stack for overflow. We are not trying to catch interruptions | 1774 // Check the stack for overflow. We are not trying to catch interruptions |
| 1775 // (i.e. debug break and preemption) here, so check the "real stack limit". | 1775 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 1776 Label done; | 1776 Label done; |
| 1777 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); | 1777 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); |
| 1778 // Make ip the space we have left. The stack might already be overflowed | 1778 // Make ip the space we have left. The stack might already be overflowed |
| 1779 // here which will cause ip to become negative. | 1779 // here which will cause ip to become negative. |
| 1780 __ Subu(t0, sp, t0); | 1780 __ Subu(t0, sp, t0); |
| 1781 // Check if the arguments will overflow the stack. | 1781 // Check if the arguments will overflow the stack. |
| 1782 __ sll(at, a2, kPointerSizeLog2); | 1782 __ sll(at, a2, kPointerSizeLog2); |
| 1783 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. | 1783 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. |
| 1784 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1); | 1784 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 1785 __ bind(&done); | 1785 __ bind(&done); |
| 1786 } | 1786 } |
| 1787 | 1787 |
| 1788 // ----------- S t a t e ------------- | 1788 // ----------- S t a t e ------------- |
| 1789 // -- a1 : target | 1789 // -- a1 : target |
| 1790 // -- a0 : args (a FixedArray built from argumentsList) | 1790 // -- a0 : args (a FixedArray built from argumentsList) |
| 1791 // -- a2 : len (number of elements to push from args) | 1791 // -- a2 : len (number of elements to push from args) |
| 1792 // -- a3 : new.target (checked to be constructor or undefined) | 1792 // -- a3 : new.target (checked to be constructor or undefined) |
| 1793 // -- sp[0] : thisArgument | 1793 // -- sp[0] : thisArgument |
| 1794 // ----------------------------------- | 1794 // ----------------------------------- |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1919 ParameterCount actual(a0); | 1919 ParameterCount actual(a0); |
| 1920 ParameterCount expected(a2); | 1920 ParameterCount expected(a2); |
| 1921 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, | 1921 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, |
| 1922 CheckDebugStepCallWrapper()); | 1922 CheckDebugStepCallWrapper()); |
| 1923 | 1923 |
| 1924 // The function is a "classConstructor", need to raise an exception. | 1924 // The function is a "classConstructor", need to raise an exception. |
| 1925 __ bind(&class_constructor); | 1925 __ bind(&class_constructor); |
| 1926 { | 1926 { |
| 1927 FrameScope frame(masm, StackFrame::INTERNAL); | 1927 FrameScope frame(masm, StackFrame::INTERNAL); |
| 1928 __ Push(a1); | 1928 __ Push(a1); |
| 1929 __ CallRuntime(Runtime::kThrowConstructorNonCallableError, 1); | 1929 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); |
| 1930 } | 1930 } |
| 1931 } | 1931 } |
| 1932 | 1932 |
| 1933 | 1933 |
| 1934 // static | 1934 // static |
| 1935 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) { | 1935 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) { |
| 1936 // ----------- S t a t e ------------- | 1936 // ----------- S t a t e ------------- |
| 1937 // -- a0 : the number of arguments (not including the receiver) | 1937 // -- a0 : the number of arguments (not including the receiver) |
| 1938 // -- a1 : the function to call (checked to be a JSBoundFunction) | 1938 // -- a1 : the function to call (checked to be a JSBoundFunction) |
| 1939 // ----------------------------------- | 1939 // ----------------------------------- |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1966 __ Subu(sp, sp, Operand(t1)); | 1966 __ Subu(sp, sp, Operand(t1)); |
| 1967 // Check the stack for overflow. We are not trying to catch interruptions | 1967 // Check the stack for overflow. We are not trying to catch interruptions |
| 1968 // (i.e. debug break and preemption) here, so check the "real stack limit". | 1968 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 1969 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | 1969 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); |
| 1970 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | 1970 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. |
| 1971 // Restore the stack pointer. | 1971 // Restore the stack pointer. |
| 1972 __ Addu(sp, sp, Operand(t1)); | 1972 __ Addu(sp, sp, Operand(t1)); |
| 1973 { | 1973 { |
| 1974 FrameScope scope(masm, StackFrame::MANUAL); | 1974 FrameScope scope(masm, StackFrame::MANUAL); |
| 1975 __ EnterFrame(StackFrame::INTERNAL); | 1975 __ EnterFrame(StackFrame::INTERNAL); |
| 1976 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 1976 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 1977 } | 1977 } |
| 1978 __ bind(&done); | 1978 __ bind(&done); |
| 1979 } | 1979 } |
| 1980 | 1980 |
| 1981 // Relocate arguments down the stack. | 1981 // Relocate arguments down the stack. |
| 1982 { | 1982 { |
| 1983 Label loop, done_loop; | 1983 Label loop, done_loop; |
| 1984 __ mov(t1, zero_reg); | 1984 __ mov(t1, zero_reg); |
| 1985 __ bind(&loop); | 1985 __ bind(&loop); |
| 1986 __ Branch(&done_loop, gt, t1, Operand(a0)); | 1986 __ Branch(&done_loop, gt, t1, Operand(a0)); |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2067 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); | 2067 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); |
| 2068 __ Jump(masm->isolate()->builtins()->CallFunction( | 2068 __ Jump(masm->isolate()->builtins()->CallFunction( |
| 2069 ConvertReceiverMode::kNotNullOrUndefined), | 2069 ConvertReceiverMode::kNotNullOrUndefined), |
| 2070 RelocInfo::CODE_TARGET); | 2070 RelocInfo::CODE_TARGET); |
| 2071 | 2071 |
| 2072 // 3. Call to something that is not callable. | 2072 // 3. Call to something that is not callable. |
| 2073 __ bind(&non_callable); | 2073 __ bind(&non_callable); |
| 2074 { | 2074 { |
| 2075 FrameScope scope(masm, StackFrame::INTERNAL); | 2075 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2076 __ Push(a1); | 2076 __ Push(a1); |
| 2077 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1); | 2077 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
| 2078 } | 2078 } |
| 2079 } | 2079 } |
| 2080 | 2080 |
| 2081 | 2081 |
| 2082 // static | 2082 // static |
| 2083 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2083 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
| 2084 // ----------- S t a t e ------------- | 2084 // ----------- S t a t e ------------- |
| 2085 // -- a0 : the number of arguments (not including the receiver) | 2085 // -- a0 : the number of arguments (not including the receiver) |
| 2086 // -- a1 : the constructor to call (checked to be a JSFunction) | 2086 // -- a1 : the constructor to call (checked to be a JSFunction) |
| 2087 // -- a3 : the new target (checked to be a constructor) | 2087 // -- a3 : the new target (checked to be a constructor) |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2130 __ Subu(sp, sp, Operand(t1)); | 2130 __ Subu(sp, sp, Operand(t1)); |
| 2131 // Check the stack for overflow. We are not trying to catch interruptions | 2131 // Check the stack for overflow. We are not trying to catch interruptions |
| 2132 // (i.e. debug break and preemption) here, so check the "real stack limit". | 2132 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2133 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | 2133 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); |
| 2134 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | 2134 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. |
| 2135 // Restore the stack pointer. | 2135 // Restore the stack pointer. |
| 2136 __ Addu(sp, sp, Operand(t1)); | 2136 __ Addu(sp, sp, Operand(t1)); |
| 2137 { | 2137 { |
| 2138 FrameScope scope(masm, StackFrame::MANUAL); | 2138 FrameScope scope(masm, StackFrame::MANUAL); |
| 2139 __ EnterFrame(StackFrame::INTERNAL); | 2139 __ EnterFrame(StackFrame::INTERNAL); |
| 2140 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 2140 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 2141 } | 2141 } |
| 2142 __ bind(&done); | 2142 __ bind(&done); |
| 2143 } | 2143 } |
| 2144 | 2144 |
| 2145 // Relocate arguments down the stack. | 2145 // Relocate arguments down the stack. |
| 2146 { | 2146 { |
| 2147 Label loop, done_loop; | 2147 Label loop, done_loop; |
| 2148 __ mov(t1, zero_reg); | 2148 __ mov(t1, zero_reg); |
| 2149 __ bind(&loop); | 2149 __ bind(&loop); |
| 2150 __ Branch(&done_loop, ge, t1, Operand(a0)); | 2150 __ Branch(&done_loop, ge, t1, Operand(a0)); |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2333 __ Branch(&no_strong_error, eq, t3, Operand(zero_reg)); | 2333 __ Branch(&no_strong_error, eq, t3, Operand(zero_reg)); |
| 2334 | 2334 |
| 2335 // What we really care about is the required number of arguments. | 2335 // What we really care about is the required number of arguments. |
| 2336 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset)); | 2336 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset)); |
| 2337 __ SmiUntag(t2); | 2337 __ SmiUntag(t2); |
| 2338 __ Branch(&no_strong_error, ge, a0, Operand(t2)); | 2338 __ Branch(&no_strong_error, ge, a0, Operand(t2)); |
| 2339 | 2339 |
| 2340 { | 2340 { |
| 2341 FrameScope frame(masm, StackFrame::MANUAL); | 2341 FrameScope frame(masm, StackFrame::MANUAL); |
| 2342 EnterArgumentsAdaptorFrame(masm); | 2342 EnterArgumentsAdaptorFrame(masm); |
| 2343 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0); | 2343 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments); |
| 2344 } | 2344 } |
| 2345 | 2345 |
| 2346 __ bind(&no_strong_error); | 2346 __ bind(&no_strong_error); |
| 2347 EnterArgumentsAdaptorFrame(masm); | 2347 EnterArgumentsAdaptorFrame(masm); |
| 2348 ArgumentAdaptorStackCheck(masm, &stack_overflow); | 2348 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
| 2349 | 2349 |
| 2350 // Calculate copy start address into a0 and copy end address into t3. | 2350 // Calculate copy start address into a0 and copy end address into t3. |
| 2351 // a0: actual number of arguments as a smi | 2351 // a0: actual number of arguments as a smi |
| 2352 // a1: function | 2352 // a1: function |
| 2353 // a2: expected number of arguments | 2353 // a2: expected number of arguments |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2411 // ------------------------------------------- | 2411 // ------------------------------------------- |
| 2412 // Don't adapt arguments. | 2412 // Don't adapt arguments. |
| 2413 // ------------------------------------------- | 2413 // ------------------------------------------- |
| 2414 __ bind(&dont_adapt_arguments); | 2414 __ bind(&dont_adapt_arguments); |
| 2415 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 2415 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
| 2416 __ Jump(t0); | 2416 __ Jump(t0); |
| 2417 | 2417 |
| 2418 __ bind(&stack_overflow); | 2418 __ bind(&stack_overflow); |
| 2419 { | 2419 { |
| 2420 FrameScope frame(masm, StackFrame::MANUAL); | 2420 FrameScope frame(masm, StackFrame::MANUAL); |
| 2421 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 2421 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 2422 __ break_(0xCC); | 2422 __ break_(0xCC); |
| 2423 } | 2423 } |
| 2424 } | 2424 } |
| 2425 | 2425 |
| 2426 | 2426 |
| 2427 #undef __ | 2427 #undef __ |
| 2428 | 2428 |
| 2429 } // namespace internal | 2429 } // namespace internal |
| 2430 } // namespace v8 | 2430 } // namespace v8 |
| 2431 | 2431 |
| 2432 #endif // V8_TARGET_ARCH_MIPS | 2432 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |