OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1142 __ ld(scratch, MemOperand(index)); | 1142 __ ld(scratch, MemOperand(index)); |
1143 __ Daddu(index, index, Operand(-kPointerSize)); | 1143 __ Daddu(index, index, Operand(-kPointerSize)); |
1144 __ push(scratch); | 1144 __ push(scratch); |
1145 __ bind(&loop_check); | 1145 __ bind(&loop_check); |
1146 __ Branch(&loop_header, gt, index, Operand(scratch2)); | 1146 __ Branch(&loop_header, gt, index, Operand(scratch2)); |
1147 } | 1147 } |
1148 | 1148 |
1149 // static | 1149 // static |
1150 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | 1150 void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
1151 MacroAssembler* masm, TailCallMode tail_call_mode, | 1151 MacroAssembler* masm, TailCallMode tail_call_mode, |
1152 CallableType function_type) { | 1152 InterpreterPushArgsMode mode) { |
1153 // ----------- S t a t e ------------- | 1153 // ----------- S t a t e ------------- |
1154 // -- a0 : the number of arguments (not including the receiver) | 1154 // -- a0 : the number of arguments (not including the receiver) |
1155 // -- a2 : the address of the first argument to be pushed. Subsequent | 1155 // -- a2 : the address of the first argument to be pushed. Subsequent |
1156 // arguments should be consecutive above this, in the same order as | 1156 // arguments should be consecutive above this, in the same order as |
1157 // they are to be pushed onto the stack. | 1157 // they are to be pushed onto the stack. |
1158 // -- a1 : the target to call (can be any Object). | 1158 // -- a1 : the target to call (can be any Object). |
1159 // ----------------------------------- | 1159 // ----------------------------------- |
1160 Label stack_overflow; | 1160 Label stack_overflow; |
1161 | 1161 |
1162 __ Daddu(a3, a0, Operand(1)); // Add one for receiver. | 1162 __ Daddu(a3, a0, Operand(1)); // Add one for receiver. |
1163 | 1163 |
1164 // This function modifies a2, t0 and a4. | 1164 // This function modifies a2, t0 and a4. |
1165 Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow); | 1165 Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow); |
1166 | 1166 |
1167 // Call the target. | 1167 // Call the target. |
1168 if (function_type == CallableType::kJSFunction) { | 1168 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1169 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | 1169 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, |
1170 tail_call_mode), | 1170 tail_call_mode), |
1171 RelocInfo::CODE_TARGET); | 1171 RelocInfo::CODE_TARGET); |
| 1172 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
| 1173 __ Jump(masm->isolate()->builtins()->CallWithSpread(), |
| 1174 RelocInfo::CODE_TARGET); |
1172 } else { | 1175 } else { |
1173 DCHECK_EQ(function_type, CallableType::kAny); | |
1174 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 1176 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
1175 tail_call_mode), | 1177 tail_call_mode), |
1176 RelocInfo::CODE_TARGET); | 1178 RelocInfo::CODE_TARGET); |
1177 } | 1179 } |
1178 | 1180 |
1179 __ bind(&stack_overflow); | 1181 __ bind(&stack_overflow); |
1180 { | 1182 { |
1181 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1183 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1182 // Unreachable code. | 1184 // Unreachable code. |
1183 __ break_(0xCC); | 1185 __ break_(0xCC); |
1184 } | 1186 } |
1185 } | 1187 } |
1186 | 1188 |
1187 // static | 1189 // static |
1188 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( | 1190 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
1189 MacroAssembler* masm, PushArgsConstructMode mode) { | 1191 MacroAssembler* masm, InterpreterPushArgsMode mode) { |
1190 // ----------- S t a t e ------------- | 1192 // ----------- S t a t e ------------- |
1191 // -- a0 : argument count (not including receiver) | 1193 // -- a0 : argument count (not including receiver) |
1192 // -- a3 : new target | 1194 // -- a3 : new target |
1193 // -- a1 : constructor to call | 1195 // -- a1 : constructor to call |
1194 // -- a2 : allocation site feedback if available, undefined otherwise. | 1196 // -- a2 : allocation site feedback if available, undefined otherwise. |
1195 // -- a4 : address of the first argument | 1197 // -- a4 : address of the first argument |
1196 // ----------------------------------- | 1198 // ----------------------------------- |
1197 Label stack_overflow; | 1199 Label stack_overflow; |
1198 | 1200 |
1199 // Push a slot for the receiver. | 1201 // Push a slot for the receiver. |
1200 __ push(zero_reg); | 1202 __ push(zero_reg); |
1201 | 1203 |
1202 // This function modifies t0, a4 and a5. | 1204 // This function modifies t0, a4 and a5. |
1203 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow); | 1205 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow); |
1204 | 1206 |
1205 __ AssertUndefinedOrAllocationSite(a2, t0); | 1207 __ AssertUndefinedOrAllocationSite(a2, t0); |
1206 if (mode == PushArgsConstructMode::kJSFunction) { | 1208 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1207 __ AssertFunction(a1); | 1209 __ AssertFunction(a1); |
1208 | 1210 |
1209 // Tail call to the function-specific construct stub (still in the caller | 1211 // Tail call to the function-specific construct stub (still in the caller |
1210 // context at this point). | 1212 // context at this point). |
1211 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1213 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1212 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); | 1214 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); |
1213 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1215 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1214 __ Jump(at); | 1216 __ Jump(at); |
1215 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { | 1217 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
1216 // Call the constructor with a0, a1, and a3 unmodified. | 1218 // Call the constructor with a0, a1, and a3 unmodified. |
1217 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), | 1219 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
1218 RelocInfo::CODE_TARGET); | 1220 RelocInfo::CODE_TARGET); |
1219 } else { | 1221 } else { |
1220 DCHECK_EQ(PushArgsConstructMode::kOther, mode); | 1222 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); |
1221 // Call the constructor with a0, a1, and a3 unmodified. | 1223 // Call the constructor with a0, a1, and a3 unmodified. |
1222 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1224 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1223 } | 1225 } |
1224 | 1226 |
1225 __ bind(&stack_overflow); | 1227 __ bind(&stack_overflow); |
1226 { | 1228 { |
1227 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1229 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1228 // Unreachable code. | 1230 // Unreachable code. |
1229 __ break_(0xCC); | 1231 __ break_(0xCC); |
1230 } | 1232 } |
(...skipping 1414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2645 | 2647 |
2646 // 3. Call to something that is not callable. | 2648 // 3. Call to something that is not callable. |
2647 __ bind(&non_callable); | 2649 __ bind(&non_callable); |
2648 { | 2650 { |
2649 FrameScope scope(masm, StackFrame::INTERNAL); | 2651 FrameScope scope(masm, StackFrame::INTERNAL); |
2650 __ Push(a1); | 2652 __ Push(a1); |
2651 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2653 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2652 } | 2654 } |
2653 } | 2655 } |
2654 | 2656 |
| 2657 static void CheckSpreadAndPushToStack(MacroAssembler* masm) { |
| 2658 Register argc = a0; |
| 2659 Register constructor = a1; |
| 2660 Register new_target = a3; |
| 2661 |
| 2662 Register scratch = t0; |
| 2663 Register scratch2 = t1; |
| 2664 |
| 2665 Register spread = a2; |
| 2666 Register spread_map = a4; |
| 2667 |
| 2668 Register spread_len = a4; |
| 2669 |
| 2670 Register native_context = a5; |
| 2671 |
| 2672 __ ld(spread, MemOperand(sp, 0)); |
| 2673 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
| 2674 __ ld(native_context, NativeContextMemOperand()); |
| 2675 |
| 2676 Label runtime_call, push_args; |
| 2677 // Check that the spread is an array. |
| 2678 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); |
| 2679 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); |
| 2680 |
| 2681 // Check that we have the original ArrayPrototype. |
| 2682 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
| 2683 __ ld(scratch2, ContextMemOperand(native_context, |
| 2684 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2685 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2686 |
| 2687 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2688 // affect iteration. |
| 2689 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2690 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 2691 __ Branch(&runtime_call, ne, scratch, |
| 2692 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2693 |
| 2694 // Check that the map of the initial array iterator hasn't changed. |
| 2695 __ ld(scratch, |
| 2696 ContextMemOperand(native_context, |
| 2697 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2698 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2699 __ ld(scratch2, |
| 2700 ContextMemOperand(native_context, |
| 2701 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2702 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2703 |
| 2704 // For FastPacked kinds, iteration will have the same effect as simply |
| 2705 // accessing each property in order. |
| 2706 Label no_protector_check; |
| 2707 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2708 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2709 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); |
| 2710 // For non-FastHoley kinds, we can skip the protector check. |
| 2711 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
| 2712 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
| 2713 // Check the ArrayProtector cell. |
| 2714 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2715 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2716 __ Branch(&runtime_call, ne, scratch, |
| 2717 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2718 |
| 2719 __ bind(&no_protector_check); |
| 2720 // Load the FixedArray backing store, but use the length from the array. |
| 2721 __ lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset)); |
| 2722 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2723 __ Branch(&push_args); |
| 2724 |
| 2725 __ bind(&runtime_call); |
| 2726 { |
| 2727 // Call the builtin for the result of the spread. |
| 2728 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2729 __ SmiTag(argc); |
| 2730 __ Push(constructor, new_target, argc, spread); |
| 2731 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2732 __ mov(spread, v0); |
| 2733 __ Pop(constructor, new_target, argc); |
| 2734 __ SmiUntag(argc); |
| 2735 } |
| 2736 |
| 2737 { |
| 2738 // Calculate the new nargs including the result of the spread. |
| 2739 __ lw(spread_len, |
| 2740 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset)); |
| 2741 |
| 2742 __ bind(&push_args); |
| 2743 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2744 __ Daddu(argc, argc, spread_len); |
| 2745 __ Dsubu(argc, argc, Operand(1)); |
| 2746 |
| 2747 // Pop the spread argument off the stack. |
| 2748 __ Pop(scratch); |
| 2749 } |
| 2750 |
| 2751 // Check for stack overflow. |
| 2752 { |
| 2753 // Check the stack for overflow. We are not trying to catch interruptions |
| 2754 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2755 Label done; |
| 2756 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2757 // Make scratch the space we have left. The stack might already be |
| 2758 // overflowed here which will cause ip to become negative. |
| 2759 __ Dsubu(scratch, sp, scratch); |
| 2760 // Check if the arguments will overflow the stack. |
| 2761 __ dsll(at, spread_len, kPointerSizeLog2); |
| 2762 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. |
| 2763 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2764 __ bind(&done); |
| 2765 } |
| 2766 |
| 2767 // Put the evaluated spread onto the stack as additional arguments. |
| 2768 { |
| 2769 __ mov(scratch, zero_reg); |
| 2770 Label done, loop; |
| 2771 __ bind(&loop); |
| 2772 __ Branch(&done, eq, scratch, Operand(spread_len)); |
| 2773 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2); |
| 2774 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
| 2775 __ Push(scratch2); |
| 2776 __ Daddu(scratch, scratch, Operand(1)); |
| 2777 __ Branch(&loop); |
| 2778 __ bind(&done); |
| 2779 } |
| 2780 } |
| 2781 |
| 2782 // static |
| 2783 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { |
| 2784 // ----------- S t a t e ------------- |
| 2785 // -- a0 : the number of arguments (not including the receiver) |
| 2786 // -- a1 : the target to call (can be any Object). |
| 2787 // ----------------------------------- |
| 2788 |
| 2789 // CheckSpreadAndPushToStack will push a3 to save it. |
| 2790 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 2791 CheckSpreadAndPushToStack(masm); |
| 2792 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 2793 TailCallMode::kDisallow), |
| 2794 RelocInfo::CODE_TARGET); |
| 2795 } |
| 2796 |
2655 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2797 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2656 // ----------- S t a t e ------------- | 2798 // ----------- S t a t e ------------- |
2657 // -- a0 : the number of arguments (not including the receiver) | 2799 // -- a0 : the number of arguments (not including the receiver) |
2658 // -- a1 : the constructor to call (checked to be a JSFunction) | 2800 // -- a1 : the constructor to call (checked to be a JSFunction) |
2659 // -- a3 : the new target (checked to be a constructor) | 2801 // -- a3 : the new target (checked to be a constructor) |
2660 // ----------------------------------- | 2802 // ----------------------------------- |
2661 __ AssertFunction(a1); | 2803 __ AssertFunction(a1); |
2662 | 2804 |
2663 // Calling convention for function specific ConstructStubs require | 2805 // Calling convention for function specific ConstructStubs require |
2664 // a2 to contain either an AllocationSite or undefined. | 2806 // a2 to contain either an AllocationSite or undefined. |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2832 | 2974 |
2833 // static | 2975 // static |
2834 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { | 2976 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
2835 // ----------- S t a t e ------------- | 2977 // ----------- S t a t e ------------- |
2836 // -- a0 : the number of arguments (not including the receiver) | 2978 // -- a0 : the number of arguments (not including the receiver) |
2837 // -- a1 : the constructor to call (can be any Object) | 2979 // -- a1 : the constructor to call (can be any Object) |
2838 // -- a3 : the new target (either the same as the constructor or | 2980 // -- a3 : the new target (either the same as the constructor or |
2839 // the JSFunction on which new was invoked initially) | 2981 // the JSFunction on which new was invoked initially) |
2840 // ----------------------------------- | 2982 // ----------------------------------- |
2841 | 2983 |
2842 Register argc = a0; | 2984 CheckSpreadAndPushToStack(masm); |
2843 Register constructor = a1; | |
2844 Register new_target = a3; | |
2845 | |
2846 Register scratch = t0; | |
2847 Register scratch2 = t1; | |
2848 | |
2849 Register spread = a2; | |
2850 Register spread_map = a4; | |
2851 | |
2852 Register native_context = a5; | |
2853 | |
2854 __ ld(spread, MemOperand(sp, 0)); | |
2855 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); | |
2856 __ ld(native_context, NativeContextMemOperand()); | |
2857 | |
2858 Label runtime_call, push_args; | |
2859 // Check that the spread is an array. | |
2860 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); | |
2861 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); | |
2862 | |
2863 // Check that we have the original ArrayPrototype. | |
2864 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); | |
2865 __ ld(scratch2, ContextMemOperand(native_context, | |
2866 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); | |
2867 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | |
2868 | |
2869 // Check that the ArrayPrototype hasn't been modified in a way that would | |
2870 // affect iteration. | |
2871 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); | |
2872 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
2873 __ Branch(&runtime_call, ne, scratch, | |
2874 Operand(Smi::FromInt(Isolate::kProtectorValid))); | |
2875 | |
2876 // Check that the map of the initial array iterator hasn't changed. | |
2877 __ ld(scratch, | |
2878 ContextMemOperand(native_context, | |
2879 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); | |
2880 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2881 __ ld(scratch2, | |
2882 ContextMemOperand(native_context, | |
2883 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | |
2884 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | |
2885 | |
2886 // For FastPacked kinds, iteration will have the same effect as simply | |
2887 // accessing each property in order. | |
2888 Label no_protector_check; | |
2889 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | |
2890 __ DecodeField<Map::ElementsKindBits>(scratch); | |
2891 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); | |
2892 // For non-FastHoley kinds, we can skip the protector check. | |
2893 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); | |
2894 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); | |
2895 // Check the ArrayProtector cell. | |
2896 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | |
2897 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | |
2898 __ Branch(&runtime_call, ne, scratch, | |
2899 Operand(Smi::FromInt(Isolate::kProtectorValid))); | |
2900 | |
2901 __ bind(&no_protector_check); | |
2902 // Load the FixedArray backing store. | |
2903 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | |
2904 __ Branch(&push_args); | |
2905 | |
2906 __ bind(&runtime_call); | |
2907 { | |
2908 // Call the builtin for the result of the spread. | |
2909 FrameScope scope(masm, StackFrame::INTERNAL); | |
2910 __ SmiTag(argc); | |
2911 __ Push(constructor, new_target, argc, spread); | |
2912 __ CallRuntime(Runtime::kSpreadIterableFixed); | |
2913 __ mov(spread, v0); | |
2914 __ Pop(constructor, new_target, argc); | |
2915 __ SmiUntag(argc); | |
2916 } | |
2917 | |
2918 Register spread_len = a4; | |
2919 __ bind(&push_args); | |
2920 { | |
2921 // Pop the spread argument off the stack. | |
2922 __ Pop(scratch); | |
2923 // Calculate the new nargs including the result of the spread. | |
2924 __ lw(spread_len, | |
2925 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset)); | |
2926 // argc += spread_len - 1. Subtract 1 for the spread itself. | |
2927 __ Daddu(argc, argc, spread_len); | |
2928 __ Dsubu(argc, argc, Operand(1)); | |
2929 } | |
2930 | |
2931 // Check for stack overflow. | |
2932 { | |
2933 // Check the stack for overflow. We are not trying to catch interruptions | |
2934 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2935 Label done; | |
2936 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); | |
2937 // Make scratch the space we have left. The stack might already be | |
2938 // overflowed here which will cause ip to become negative. | |
2939 __ Dsubu(scratch, sp, scratch); | |
2940 // Check if the arguments will overflow the stack. | |
2941 __ dsll(at, spread_len, kPointerSizeLog2); | |
2942 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. | |
2943 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2944 __ bind(&done); | |
2945 } | |
2946 | |
2947 // Put the evaluated spread onto the stack as additional arguments. | |
2948 { | |
2949 __ mov(scratch, zero_reg); | |
2950 Label done, loop; | |
2951 __ bind(&loop); | |
2952 __ Branch(&done, eq, scratch, Operand(spread_len)); | |
2953 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2); | |
2954 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); | |
2955 __ Push(scratch2); | |
2956 __ Daddu(scratch, scratch, Operand(1)); | |
2957 __ Branch(&loop); | |
2958 __ bind(&done); | |
2959 } | |
2960 | |
2961 // Dispatch. | |
2962 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2985 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2963 } | 2986 } |
2964 | 2987 |
2965 // static | 2988 // static |
2966 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2989 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2967 // ----------- S t a t e ------------- | 2990 // ----------- S t a t e ------------- |
2968 // -- a0 : requested object size (untagged) | 2991 // -- a0 : requested object size (untagged) |
2969 // -- ra : return address | 2992 // -- ra : return address |
2970 // ----------------------------------- | 2993 // ----------------------------------- |
2971 __ SmiTag(a0); | 2994 __ SmiTag(a0); |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3129 __ break_(0xCC); | 3152 __ break_(0xCC); |
3130 } | 3153 } |
3131 } | 3154 } |
3132 | 3155 |
3133 #undef __ | 3156 #undef __ |
3134 | 3157 |
3135 } // namespace internal | 3158 } // namespace internal |
3136 } // namespace v8 | 3159 } // namespace v8 |
3137 | 3160 |
3138 #endif // V8_TARGET_ARCH_MIPS64 | 3161 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |