| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 693 __ Addu(sp, sp, Operand(at)); | 693 __ Addu(sp, sp, Operand(at)); |
| 694 __ Addu(sp, sp, Operand(kPointerSize)); | 694 __ Addu(sp, sp, Operand(kPointerSize)); |
| 695 __ Jump(ra); | 695 __ Jump(ra); |
| 696 } | 696 } |
| 697 | 697 |
| 698 | 698 |
| 699 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 699 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
| 700 | 700 |
| 701 | 701 |
| 702 // Clobbers a2; preserves all other registers. | 702 // Clobbers a2; preserves all other registers. |
| 703 static void Generate_CheckStackOverflow(MacroAssembler* masm, | 703 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
| 704 const int calleeOffset, Register argc, | |
| 705 IsTagged argc_is_tagged) { | 704 IsTagged argc_is_tagged) { |
| 706 // Check the stack for overflow. We are not trying to catch | 705 // Check the stack for overflow. We are not trying to catch |
| 707 // interruptions (e.g. debug break and preemption) here, so the "real stack | 706 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 708 // limit" is checked. | 707 // limit" is checked. |
| 709 Label okay; | 708 Label okay; |
| 710 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 709 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
| 711 // Make a2 the space we have left. The stack might already be overflowed | 710 // Make a2 the space we have left. The stack might already be overflowed |
| 712 // here which will cause r2 to become negative. | 711 // here which will cause r2 to become negative. |
| 713 __ Subu(a2, sp, a2); | 712 __ Subu(a2, sp, a2); |
| 714 // Check if the arguments will overflow the stack. | 713 // Check if the arguments will overflow the stack. |
| 715 if (argc_is_tagged == kArgcIsSmiTagged) { | 714 if (argc_is_tagged == kArgcIsSmiTagged) { |
| 716 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); | 715 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); |
| 717 } else { | 716 } else { |
| 718 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 717 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
| 719 __ sll(t3, argc, kPointerSizeLog2); | 718 __ sll(t3, argc, kPointerSizeLog2); |
| 720 } | 719 } |
| 721 // Signed comparison. | 720 // Signed comparison. |
| 722 __ Branch(&okay, gt, a2, Operand(t3)); | 721 __ Branch(&okay, gt, a2, Operand(t3)); |
| 723 | 722 |
| 724 // Out of stack space. | 723 // Out of stack space. |
| 725 __ lw(a1, MemOperand(fp, calleeOffset)); | |
| 726 if (argc_is_tagged == kArgcIsUntaggedInt) { | |
| 727 __ SmiTag(argc); | |
| 728 } | |
| 729 __ Push(a1, argc); | |
| 730 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 724 __ CallRuntime(Runtime::kThrowStackOverflow, 0); |
| 731 | 725 |
| 732 __ bind(&okay); | 726 __ bind(&okay); |
| 733 } | 727 } |
| 734 | 728 |
| 735 | 729 |
| 736 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 730 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 737 bool is_construct) { | 731 bool is_construct) { |
| 738 // Called from JSEntryStub::GenerateBody | 732 // Called from JSEntryStub::GenerateBody |
| 739 | 733 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 756 // Setup the context (we need to use the caller context from the isolate). | 750 // Setup the context (we need to use the caller context from the isolate). |
| 757 ExternalReference context_address(Isolate::kContextAddress, | 751 ExternalReference context_address(Isolate::kContextAddress, |
| 758 masm->isolate()); | 752 masm->isolate()); |
| 759 __ li(cp, Operand(context_address)); | 753 __ li(cp, Operand(context_address)); |
| 760 __ lw(cp, MemOperand(cp)); | 754 __ lw(cp, MemOperand(cp)); |
| 761 | 755 |
| 762 // Push the function and the receiver onto the stack. | 756 // Push the function and the receiver onto the stack. |
| 763 __ Push(a1, a2); | 757 __ Push(a1, a2); |
| 764 | 758 |
| 765 // Check if we have enough stack space to push all arguments. | 759 // Check if we have enough stack space to push all arguments. |
| 766 // The function is the first thing that was pushed above after entering | |
| 767 // the internal frame. | |
| 768 const int kFunctionOffset = | |
| 769 InternalFrameConstants::kCodeOffset - kPointerSize; | |
| 770 // Clobbers a2. | 760 // Clobbers a2. |
| 771 Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); | 761 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt); |
| 772 | 762 |
| 773 // Remember new.target. | 763 // Remember new.target. |
| 774 __ mov(t1, a0); | 764 __ mov(t1, a0); |
| 775 | 765 |
| 776 // Copy arguments to the stack in a loop. | 766 // Copy arguments to the stack in a loop. |
| 777 // a3: argc | 767 // a3: argc |
| 778 // s0: argv, i.e. points to first arg | 768 // s0: argv, i.e. points to first arg |
| 779 Label loop, entry; | 769 Label loop, entry; |
| 780 __ sll(t0, a3, kPointerSizeLog2); | 770 __ sll(t0, a3, kPointerSizeLog2); |
| 781 __ addu(t2, s0, t0); | 771 __ addu(t2, s0, t0); |
| (...skipping 552 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1334 __ Push(a0, a1); | 1324 __ Push(a0, a1); |
| 1335 // Returns (in v0) number of arguments to copy to stack as Smi. | 1325 // Returns (in v0) number of arguments to copy to stack as Smi. |
| 1336 if (targetIsArgument) { | 1326 if (targetIsArgument) { |
| 1337 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1327 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
| 1338 CALL_FUNCTION); | 1328 CALL_FUNCTION); |
| 1339 } else { | 1329 } else { |
| 1340 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1330 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
| 1341 } | 1331 } |
| 1342 | 1332 |
| 1343 // Returns the result in v0. | 1333 // Returns the result in v0. |
| 1344 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); | 1334 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); |
| 1345 | 1335 |
| 1346 // Push current limit and index. | 1336 // Push current limit and index. |
| 1347 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1337 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1348 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1338 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1349 __ mov(a1, zero_reg); | 1339 __ mov(a1, zero_reg); |
| 1350 __ lw(a2, MemOperand(fp, kReceiverOffset)); | 1340 __ lw(a2, MemOperand(fp, kReceiverOffset)); |
| 1351 __ Push(v0, a1, a2); // limit, initial index and receiver. | 1341 __ Push(v0, a1, a2); // limit, initial index and receiver. |
| 1352 | 1342 |
| 1353 // Copy all arguments from the array to the stack. | 1343 // Copy all arguments from the array to the stack. |
| 1354 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | 1344 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1398 __ push(a0); | 1388 __ push(a0); |
| 1399 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array | 1389 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1400 __ push(a0); | 1390 __ push(a0); |
| 1401 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target | 1391 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target |
| 1402 __ push(a0); | 1392 __ push(a0); |
| 1403 // Returns argument count in v0. | 1393 // Returns argument count in v0. |
| 1404 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | 1394 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, |
| 1405 CALL_FUNCTION); | 1395 CALL_FUNCTION); |
| 1406 | 1396 |
| 1407 // Returns result in v0. | 1397 // Returns result in v0. |
| 1408 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); | 1398 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); |
| 1409 | 1399 |
| 1410 // Push current limit and index. | 1400 // Push current limit and index. |
| 1411 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1401 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1412 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1402 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1413 __ push(v0); // limit | 1403 __ push(v0); // limit |
| 1414 __ mov(a1, zero_reg); // initial index | 1404 __ mov(a1, zero_reg); // initial index |
| 1415 __ push(a1); | 1405 __ push(a1); |
| 1416 // Push the constructor function as callee. | 1406 // Push the constructor function as callee. |
| 1417 __ lw(a0, MemOperand(fp, kFunctionOffset)); | 1407 __ lw(a0, MemOperand(fp, kFunctionOffset)); |
| 1418 __ push(a0); | 1408 __ push(a0); |
| (...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1886 } | 1876 } |
| 1887 } | 1877 } |
| 1888 | 1878 |
| 1889 | 1879 |
| 1890 #undef __ | 1880 #undef __ |
| 1891 | 1881 |
| 1892 } // namespace internal | 1882 } // namespace internal |
| 1893 } // namespace v8 | 1883 } // namespace v8 |
| 1894 | 1884 |
| 1895 #endif // V8_TARGET_ARCH_MIPS | 1885 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |