OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
689 __ add(sp, sp, r4); | 689 __ add(sp, sp, r4); |
690 __ addi(sp, sp, Operand(kPointerSize)); | 690 __ addi(sp, sp, Operand(kPointerSize)); |
691 __ blr(); | 691 __ blr(); |
692 } | 692 } |
693 | 693 |
694 | 694 |
695 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | 695 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
696 | 696 |
697 | 697 |
698 // Clobbers r5; preserves all other registers. | 698 // Clobbers r5; preserves all other registers. |
699 static void Generate_CheckStackOverflow(MacroAssembler* masm, | 699 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
700 const int calleeOffset, Register argc, | |
701 IsTagged argc_is_tagged) { | 700 IsTagged argc_is_tagged) { |
702 // Check the stack for overflow. We are not trying to catch | 701 // Check the stack for overflow. We are not trying to catch |
703 // interruptions (e.g. debug break and preemption) here, so the "real stack | 702 // interruptions (e.g. debug break and preemption) here, so the "real stack |
704 // limit" is checked. | 703 // limit" is checked. |
705 Label okay; | 704 Label okay; |
706 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 705 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
707 // Make r5 the space we have left. The stack might already be overflowed | 706 // Make r5 the space we have left. The stack might already be overflowed |
708 // here which will cause r5 to become negative. | 707 // here which will cause r5 to become negative. |
709 __ sub(r5, sp, r5); | 708 __ sub(r5, sp, r5); |
710 // Check if the arguments will overflow the stack. | 709 // Check if the arguments will overflow the stack. |
711 if (argc_is_tagged == kArgcIsSmiTagged) { | 710 if (argc_is_tagged == kArgcIsSmiTagged) { |
712 __ SmiToPtrArrayOffset(r0, argc); | 711 __ SmiToPtrArrayOffset(r0, argc); |
713 } else { | 712 } else { |
714 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 713 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
715 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); | 714 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); |
716 } | 715 } |
717 __ cmp(r5, r0); | 716 __ cmp(r5, r0); |
718 __ bgt(&okay); // Signed comparison. | 717 __ bgt(&okay); // Signed comparison. |
719 | 718 |
720 // Out of stack space. | 719 // Out of stack space. |
721 __ LoadP(r4, MemOperand(fp, calleeOffset)); | |
722 if (argc_is_tagged == kArgcIsUntaggedInt) { | |
723 __ SmiTag(argc); | |
724 } | |
725 __ Push(r4, argc); | |
726 __ CallRuntime(Runtime::kThrowStackOverflow, 0); | 720 __ CallRuntime(Runtime::kThrowStackOverflow, 0); |
727 | 721 |
728 __ bind(&okay); | 722 __ bind(&okay); |
729 } | 723 } |
730 | 724 |
731 | 725 |
732 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 726 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
733 bool is_construct) { | 727 bool is_construct) { |
734 // Called from Generate_JS_Entry | 728 // Called from Generate_JS_Entry |
735 // r3: new.target | 729 // r3: new.target |
(...skipping 16 matching lines...) Expand all Loading... |
752 masm->isolate()); | 746 masm->isolate()); |
753 __ mov(cp, Operand(context_address)); | 747 __ mov(cp, Operand(context_address)); |
754 __ LoadP(cp, MemOperand(cp)); | 748 __ LoadP(cp, MemOperand(cp)); |
755 | 749 |
756 __ InitializeRootRegister(); | 750 __ InitializeRootRegister(); |
757 | 751 |
758 // Push the function and the receiver onto the stack. | 752 // Push the function and the receiver onto the stack. |
759 __ Push(r4, r5); | 753 __ Push(r4, r5); |
760 | 754 |
761 // Check if we have enough stack space to push all arguments. | 755 // Check if we have enough stack space to push all arguments. |
762 // The function is the first thing that was pushed above after entering | |
763 // the internal frame. | |
764 const int kFunctionOffset = | |
765 InternalFrameConstants::kCodeOffset - kPointerSize; | |
766 // Clobbers r5. | 756 // Clobbers r5. |
767 Generate_CheckStackOverflow(masm, kFunctionOffset, r6, kArgcIsUntaggedInt); | 757 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt); |
768 | 758 |
769 // Copy arguments to the stack in a loop. | 759 // Copy arguments to the stack in a loop. |
770 // r4: function | 760 // r4: function |
771 // r6: argc | 761 // r6: argc |
772 // r7: argv, i.e. points to first arg | 762 // r7: argv, i.e. points to first arg |
773 Label loop, entry; | 763 Label loop, entry; |
774 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); | 764 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); |
775 __ add(r5, r7, r0); | 765 __ add(r5, r7, r0); |
776 // r5 points past last arg. | 766 // r5 points past last arg. |
777 __ b(&entry); | 767 __ b(&entry); |
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1342 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | 1332 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
1343 __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array | 1333 __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array |
1344 __ Push(r3, r4); | 1334 __ Push(r3, r4); |
1345 if (targetIsArgument) { | 1335 if (targetIsArgument) { |
1346 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1336 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
1347 CALL_FUNCTION); | 1337 CALL_FUNCTION); |
1348 } else { | 1338 } else { |
1349 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1339 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
1350 } | 1340 } |
1351 | 1341 |
1352 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); | 1342 Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged); |
1353 | 1343 |
1354 // Push current limit and index. | 1344 // Push current limit and index. |
1355 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1345 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
1356 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1346 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
1357 __ li(r4, Operand::Zero()); | 1347 __ li(r4, Operand::Zero()); |
1358 __ LoadP(r5, MemOperand(fp, kReceiverOffset)); | 1348 __ LoadP(r5, MemOperand(fp, kReceiverOffset)); |
1359 __ Push(r3, r4, r5); // limit, initial index and receiver. | 1349 __ Push(r3, r4, r5); // limit, initial index and receiver. |
1360 | 1350 |
1361 // Copy all arguments from the array to the stack. | 1351 // Copy all arguments from the array to the stack. |
1362 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | 1352 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1404 __ bind(&validate_arguments); | 1394 __ bind(&validate_arguments); |
1405 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | 1395 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
1406 __ push(r3); | 1396 __ push(r3); |
1407 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array | 1397 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array |
1408 __ push(r3); | 1398 __ push(r3); |
1409 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target | 1399 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target |
1410 __ push(r3); | 1400 __ push(r3); |
1411 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | 1401 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, |
1412 CALL_FUNCTION); | 1402 CALL_FUNCTION); |
1413 | 1403 |
1414 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); | 1404 Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged); |
1415 | 1405 |
1416 // Push current limit and index. | 1406 // Push current limit and index. |
1417 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | 1407 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
1418 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | 1408 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
1419 __ li(r4, Operand::Zero()); | 1409 __ li(r4, Operand::Zero()); |
1420 __ Push(r3, r4); // limit and initial index. | 1410 __ Push(r3, r4); // limit and initial index. |
1421 // Push the constructor function as callee | 1411 // Push the constructor function as callee |
1422 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | 1412 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); |
1423 __ push(r3); | 1413 __ push(r3); |
1424 | 1414 |
(...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1892 __ bkpt(0); | 1882 __ bkpt(0); |
1893 } | 1883 } |
1894 } | 1884 } |
1895 | 1885 |
1896 | 1886 |
1897 #undef __ | 1887 #undef __ |
1898 } // namespace internal | 1888 } // namespace internal |
1899 } // namespace v8 | 1889 } // namespace v8 |
1900 | 1890 |
1901 #endif // V8_TARGET_ARCH_PPC | 1891 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |