OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
693 __ Daddu(sp, sp, Operand(at)); | 693 __ Daddu(sp, sp, Operand(at)); |
694 __ Daddu(sp, sp, Operand(kPointerSize)); | 694 __ Daddu(sp, sp, Operand(kPointerSize)); |
695 __ Jump(ra); | 695 __ Jump(ra); |
696 } | 696 } |
697 | 697 |
698 | 698 |
699 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 699 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
700 | 700 |
701 | 701 |
702 // Clobbers a2; preserves all other registers. | 702 // Clobbers a2; preserves all other registers. |
703 static void Generate_CheckStackOverflow(MacroAssembler* masm, | 703 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
704 const int calleeOffset, Register argc, | |
705 IsTagged argc_is_tagged) { | 704 IsTagged argc_is_tagged) { |
706 // Check the stack for overflow. We are not trying to catch | 705 // Check the stack for overflow. We are not trying to catch |
707 // interruptions (e.g. debug break and preemption) here, so the "real stack | 706 // interruptions (e.g. debug break and preemption) here, so the "real stack |
708 // limit" is checked. | 707 // limit" is checked. |
709 Label okay; | 708 Label okay; |
710 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 709 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
711 // Make a2 the space we have left. The stack might already be overflowed | 710 // Make a2 the space we have left. The stack might already be overflowed |
712 // here which will cause r2 to become negative. | 711 // here which will cause r2 to become negative. |
713 __ dsubu(a2, sp, a2); | 712 __ dsubu(a2, sp, a2); |
714 // Check if the arguments will overflow the stack. | 713 // Check if the arguments will overflow the stack. |
715 if (argc_is_tagged == kArgcIsSmiTagged) { | 714 if (argc_is_tagged == kArgcIsSmiTagged) { |
716 __ SmiScale(a7, v0, kPointerSizeLog2); | 715 __ SmiScale(a7, v0, kPointerSizeLog2); |
717 } else { | 716 } else { |
718 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 717 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
719 __ dsll(a7, argc, kPointerSizeLog2); | 718 __ dsll(a7, argc, kPointerSizeLog2); |
720 } | 719 } |
721 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. | 720 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. |
722 | 721 |
723 // Out of stack space. | 722 // Out of stack space. |
724 __ ld(a1, MemOperand(fp, calleeOffset)); | |
725 if (argc_is_tagged == kArgcIsUntaggedInt) { | |
726 __ SmiTag(argc); | |
727 } | |
728 __ Push(a1, argc); | |
729 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 723 __ CallRuntime(Runtime::kThrowStackOverflow, 0); |
730 | 724 |
731 __ bind(&okay); | 725 __ bind(&okay); |
732 } | 726 } |
733 | 727 |
734 | 728 |
735 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 729 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
736 bool is_construct) { | 730 bool is_construct) { |
737 // Called from JSEntryStub::GenerateBody | 731 // Called from JSEntryStub::GenerateBody |
738 | 732 |
(...skipping 15 matching lines...) Expand all Loading... |
754 // Setup the context (we need to use the caller context from the isolate). | 748 // Setup the context (we need to use the caller context from the isolate). |
755 ExternalReference context_address(Isolate::kContextAddress, | 749 ExternalReference context_address(Isolate::kContextAddress, |
756 masm->isolate()); | 750 masm->isolate()); |
757 __ li(cp, Operand(context_address)); | 751 __ li(cp, Operand(context_address)); |
758 __ ld(cp, MemOperand(cp)); | 752 __ ld(cp, MemOperand(cp)); |
759 | 753 |
760 // Push the function and the receiver onto the stack. | 754 // Push the function and the receiver onto the stack. |
761 __ Push(a1, a2); | 755 __ Push(a1, a2); |
762 | 756 |
763 // Check if we have enough stack space to push all arguments. | 757 // Check if we have enough stack space to push all arguments. |
764 // The function is the first thing that was pushed above after entering | |
765 // the internal frame. | |
766 const int kFunctionOffset = | |
767 InternalFrameConstants::kCodeOffset - kPointerSize; | |
768 // Clobbers a2. | 758 // Clobbers a2. |
769 Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); | 759 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt); |
770 | 760 |
771 // Remember new.target. | 761 // Remember new.target. |
772 __ mov(a5, a0); | 762 __ mov(a5, a0); |
773 | 763 |
774 // Copy arguments to the stack in a loop. | 764 // Copy arguments to the stack in a loop. |
775 // a3: argc | 765 // a3: argc |
776 // s0: argv, i.e. points to first arg | 766 // s0: argv, i.e. points to first arg |
777 Label loop, entry; | 767 Label loop, entry; |
778 __ dsll(a4, a3, kPointerSizeLog2); | 768 __ dsll(a4, a3, kPointerSizeLog2); |
779 __ daddu(a6, s0, a4); | 769 __ daddu(a6, s0, a4); |
(...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1331 | 1321 |
1332 // Returns (in v0) number of arguments to copy to stack as Smi. | 1322 // Returns (in v0) number of arguments to copy to stack as Smi. |
1333 if (targetIsArgument) { | 1323 if (targetIsArgument) { |
1334 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1324 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
1335 CALL_FUNCTION); | 1325 CALL_FUNCTION); |
1336 } else { | 1326 } else { |
1337 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1327 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
1338 } | 1328 } |
1339 | 1329 |
1340 // Returns the result in v0. | 1330 // Returns the result in v0. |
1341 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); | 1331 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); |
1342 | 1332 |
1343 // Push current limit and index. | 1333 // Push current limit and index. |
1344 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1334 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
1345 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1335 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
1346 __ mov(a1, zero_reg); | 1336 __ mov(a1, zero_reg); |
1347 __ ld(a2, MemOperand(fp, kReceiverOffset)); | 1337 __ ld(a2, MemOperand(fp, kReceiverOffset)); |
1348 __ Push(v0, a1, a2); // limit, initial index and receiver. | 1338 __ Push(v0, a1, a2); // limit, initial index and receiver. |
1349 | 1339 |
1350 // Copy all arguments from the array to the stack. | 1340 // Copy all arguments from the array to the stack. |
1351 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | 1341 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1395 __ push(a0); | 1385 __ push(a0); |
1396 __ ld(a0, MemOperand(fp, kArgumentsOffset)); // get the args array | 1386 __ ld(a0, MemOperand(fp, kArgumentsOffset)); // get the args array |
1397 __ push(a0); | 1387 __ push(a0); |
1398 __ ld(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target | 1388 __ ld(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target |
1399 __ push(a0); | 1389 __ push(a0); |
1400 // Returns argument count in v0. | 1390 // Returns argument count in v0. |
1401 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | 1391 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, |
1402 CALL_FUNCTION); | 1392 CALL_FUNCTION); |
1403 | 1393 |
1404 // Returns result in v0. | 1394 // Returns result in v0. |
1405 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); | 1395 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); |
1406 | 1396 |
1407 // Push current limit and index. | 1397 // Push current limit and index. |
1408 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1398 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
1409 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1399 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
1410 __ push(v0); // limit | 1400 __ push(v0); // limit |
1411 __ mov(a1, zero_reg); // initial index | 1401 __ mov(a1, zero_reg); // initial index |
1412 __ push(a1); | 1402 __ push(a1); |
1413 // Push the constructor function as callee. | 1403 // Push the constructor function as callee. |
1414 __ ld(a0, MemOperand(fp, kFunctionOffset)); | 1404 __ ld(a0, MemOperand(fp, kFunctionOffset)); |
1415 __ push(a0); | 1405 __ push(a0); |
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1882 } | 1872 } |
1883 } | 1873 } |
1884 | 1874 |
1885 | 1875 |
1886 #undef __ | 1876 #undef __ |
1887 | 1877 |
1888 } // namespace internal | 1878 } // namespace internal |
1889 } // namespace v8 | 1879 } // namespace v8 |
1890 | 1880 |
1891 #endif // V8_TARGET_ARCH_MIPS64 | 1881 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |