OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1151 __ lw(scratch, MemOperand(index)); | 1151 __ lw(scratch, MemOperand(index)); |
1152 __ Addu(index, index, Operand(-kPointerSize)); | 1152 __ Addu(index, index, Operand(-kPointerSize)); |
1153 __ push(scratch); | 1153 __ push(scratch); |
1154 __ bind(&loop_check); | 1154 __ bind(&loop_check); |
1155 __ Branch(&loop_header, gt, index, Operand(scratch2)); | 1155 __ Branch(&loop_header, gt, index, Operand(scratch2)); |
1156 } | 1156 } |
1157 | 1157 |
1158 // static | 1158 // static |
1159 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | 1159 void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
1160 MacroAssembler* masm, TailCallMode tail_call_mode, | 1160 MacroAssembler* masm, TailCallMode tail_call_mode, |
1161 CallableType function_type) { | 1161 InterpreterPushArgsMode mode) { |
1162 // ----------- S t a t e ------------- | 1162 // ----------- S t a t e ------------- |
1163 // -- a0 : the number of arguments (not including the receiver) | 1163 // -- a0 : the number of arguments (not including the receiver) |
1164 // -- a2 : the address of the first argument to be pushed. Subsequent | 1164 // -- a2 : the address of the first argument to be pushed. Subsequent |
1165 // arguments should be consecutive above this, in the same order as | 1165 // arguments should be consecutive above this, in the same order as |
1166 // they are to be pushed onto the stack. | 1166 // they are to be pushed onto the stack. |
1167 // -- a1 : the target to call (can be any Object). | 1167 // -- a1 : the target to call (can be any Object). |
1168 // ----------------------------------- | 1168 // ----------------------------------- |
1169 Label stack_overflow; | 1169 Label stack_overflow; |
1170 | 1170 |
1171 __ Addu(t0, a0, Operand(1)); // Add one for receiver. | 1171 __ Addu(t0, a0, Operand(1)); // Add one for receiver. |
1172 | 1172 |
1173 // This function modifies a2, t4 and t1. | 1173 // This function modifies a2, t4 and t1. |
1174 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow); | 1174 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow); |
1175 | 1175 |
1176 // Call the target. | 1176 // Call the target. |
1177 if (function_type == CallableType::kJSFunction) { | 1177 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1178 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | 1178 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, |
1179 tail_call_mode), | 1179 tail_call_mode), |
1180 RelocInfo::CODE_TARGET); | 1180 RelocInfo::CODE_TARGET); |
| 1181 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
| 1182 __ Jump(masm->isolate()->builtins()->CallWithSpread(), |
| 1183 RelocInfo::CODE_TARGET); |
1181 } else { | 1184 } else { |
1182 DCHECK_EQ(function_type, CallableType::kAny); | |
1183 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 1185 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
1184 tail_call_mode), | 1186 tail_call_mode), |
1185 RelocInfo::CODE_TARGET); | 1187 RelocInfo::CODE_TARGET); |
1186 } | 1188 } |
1187 | 1189 |
1188 __ bind(&stack_overflow); | 1190 __ bind(&stack_overflow); |
1189 { | 1191 { |
1190 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1192 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1191 // Unreachable code. | 1193 // Unreachable code. |
1192 __ break_(0xCC); | 1194 __ break_(0xCC); |
1193 } | 1195 } |
1194 } | 1196 } |
1195 | 1197 |
1196 // static | 1198 // static |
1197 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( | 1199 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
1198 MacroAssembler* masm, PushArgsConstructMode mode) { | 1200 MacroAssembler* masm, InterpreterPushArgsMode mode) { |
1199 // ----------- S t a t e ------------- | 1201 // ----------- S t a t e ------------- |
1200 // -- a0 : argument count (not including receiver) | 1202 // -- a0 : argument count (not including receiver) |
1201 // -- a3 : new target | 1203 // -- a3 : new target |
1202 // -- a1 : constructor to call | 1204 // -- a1 : constructor to call |
1203 // -- a2 : allocation site feedback if available, undefined otherwise. | 1205 // -- a2 : allocation site feedback if available, undefined otherwise. |
1204 // -- t4 : address of the first argument | 1206 // -- t4 : address of the first argument |
1205 // ----------------------------------- | 1207 // ----------------------------------- |
1206 Label stack_overflow; | 1208 Label stack_overflow; |
1207 | 1209 |
1208 // Push a slot for the receiver. | 1210 // Push a slot for the receiver. |
1209 __ push(zero_reg); | 1211 __ push(zero_reg); |
1210 | 1212 |
1211 // This function modified t4, t1 and t0. | 1213 // This function modified t4, t1 and t0. |
1212 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); | 1214 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); |
1213 | 1215 |
1214 __ AssertUndefinedOrAllocationSite(a2, t0); | 1216 __ AssertUndefinedOrAllocationSite(a2, t0); |
1215 if (mode == PushArgsConstructMode::kJSFunction) { | 1217 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1216 __ AssertFunction(a1); | 1218 __ AssertFunction(a1); |
1217 | 1219 |
1218 // Tail call to the function-specific construct stub (still in the caller | 1220 // Tail call to the function-specific construct stub (still in the caller |
1219 // context at this point). | 1221 // context at this point). |
1220 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1222 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1221 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); | 1223 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); |
1222 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1224 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1223 __ Jump(at); | 1225 __ Jump(at); |
1224 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { | 1226 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
1225 // Call the constructor with a0, a1, and a3 unmodified. | 1227 // Call the constructor with a0, a1, and a3 unmodified. |
1226 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), | 1228 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
1227 RelocInfo::CODE_TARGET); | 1229 RelocInfo::CODE_TARGET); |
1228 } else { | 1230 } else { |
1229 DCHECK_EQ(PushArgsConstructMode::kOther, mode); | 1231 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); |
1230 // Call the constructor with a0, a1, and a3 unmodified. | 1232 // Call the constructor with a0, a1, and a3 unmodified. |
1231 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1233 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1232 } | 1234 } |
1233 | 1235 |
1234 __ bind(&stack_overflow); | 1236 __ bind(&stack_overflow); |
1235 { | 1237 { |
1236 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1238 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1237 // Unreachable code. | 1239 // Unreachable code. |
1238 __ break_(0xCC); | 1240 __ break_(0xCC); |
1239 } | 1241 } |
(...skipping 1379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2619 | 2621 |
2620 // 3. Call to something that is not callable. | 2622 // 3. Call to something that is not callable. |
2621 __ bind(&non_callable); | 2623 __ bind(&non_callable); |
2622 { | 2624 { |
2623 FrameScope scope(masm, StackFrame::INTERNAL); | 2625 FrameScope scope(masm, StackFrame::INTERNAL); |
2624 __ Push(a1); | 2626 __ Push(a1); |
2625 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2627 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2626 } | 2628 } |
2627 } | 2629 } |
2628 | 2630 |
| 2631 static void CheckSpreadAndPushToStack(MacroAssembler* masm) { |
| 2632 Register argc = a0; |
| 2633 Register constructor = a1; |
| 2634 Register new_target = a3; |
| 2635 |
| 2636 Register scratch = t0; |
| 2637 Register scratch2 = t1; |
| 2638 |
| 2639 Register spread = a2; |
| 2640 Register spread_map = t3; |
| 2641 |
| 2642 Register spread_len = t3; |
| 2643 |
| 2644 Register native_context = t4; |
| 2645 |
| 2646 __ lw(spread, MemOperand(sp, 0)); |
| 2647 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
| 2648 __ lw(native_context, NativeContextMemOperand()); |
| 2649 |
| 2650 Label runtime_call, push_args; |
| 2651 // Check that the spread is an array. |
| 2652 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); |
| 2653 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); |
| 2654 |
| 2655 // Check that we have the original ArrayPrototype. |
| 2656 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
| 2657 __ lw(scratch2, ContextMemOperand(native_context, |
| 2658 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2659 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2660 |
| 2661 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2662 // affect iteration. |
| 2663 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2664 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 2665 __ Branch(&runtime_call, ne, scratch, |
| 2666 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2667 |
| 2668 // Check that the map of the initial array iterator hasn't changed. |
| 2669 __ lw(scratch, |
| 2670 ContextMemOperand(native_context, |
| 2671 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2672 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2673 __ lw(scratch2, |
| 2674 ContextMemOperand(native_context, |
| 2675 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2676 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2677 |
| 2678 // For FastPacked kinds, iteration will have the same effect as simply |
| 2679 // accessing each property in order. |
| 2680 Label no_protector_check; |
| 2681 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2682 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2683 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); |
| 2684 // For non-FastHoley kinds, we can skip the protector check. |
| 2685 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
| 2686 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
| 2687 // Check the ArrayProtector cell. |
| 2688 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2689 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2690 __ Branch(&runtime_call, ne, scratch, |
| 2691 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2692 |
| 2693 __ bind(&no_protector_check); |
| 2694 // Load the FixedArray backing store, but use the length from the array. |
| 2695 __ lw(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset)); |
| 2696 __ SmiUntag(spread_len); |
| 2697 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2698 __ Branch(&push_args); |
| 2699 |
| 2700 __ bind(&runtime_call); |
| 2701 { |
| 2702 // Call the builtin for the result of the spread. |
| 2703 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2704 __ SmiTag(argc); |
| 2705 __ Push(constructor, new_target, argc, spread); |
| 2706 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2707 __ mov(spread, v0); |
| 2708 __ Pop(constructor, new_target, argc); |
| 2709 __ SmiUntag(argc); |
| 2710 } |
| 2711 |
| 2712 { |
| 2713 // Calculate the new nargs including the result of the spread. |
| 2714 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); |
| 2715 __ SmiUntag(spread_len); |
| 2716 |
| 2717 __ bind(&push_args); |
| 2718 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2719 __ Addu(argc, argc, spread_len); |
| 2720 __ Subu(argc, argc, Operand(1)); |
| 2721 |
| 2722 // Pop the spread argument off the stack. |
| 2723 __ Pop(scratch); |
| 2724 } |
| 2725 |
| 2726 // Check for stack overflow. |
| 2727 { |
| 2728 // Check the stack for overflow. We are not trying to catch interruptions |
| 2729 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2730 Label done; |
| 2731 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2732 // Make scratch the space we have left. The stack might already be |
| 2733 // overflowed here which will cause ip to become negative. |
| 2734 __ Subu(scratch, sp, scratch); |
| 2735 // Check if the arguments will overflow the stack. |
| 2736 __ sll(at, spread_len, kPointerSizeLog2); |
| 2737 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. |
| 2738 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2739 __ bind(&done); |
| 2740 } |
| 2741 |
| 2742 // Put the evaluated spread onto the stack as additional arguments. |
| 2743 { |
| 2744 __ mov(scratch, zero_reg); |
| 2745 Label done, loop; |
| 2746 __ bind(&loop); |
| 2747 __ Branch(&done, eq, scratch, Operand(spread_len)); |
| 2748 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2); |
| 2749 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
| 2750 __ Push(scratch2); |
| 2751 __ Addu(scratch, scratch, Operand(1)); |
| 2752 __ Branch(&loop); |
| 2753 __ bind(&done); |
| 2754 } |
| 2755 } |
| 2756 |
| 2757 // static |
| 2758 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { |
| 2759 // ----------- S t a t e ------------- |
| 2760 // -- a0 : the number of arguments (not including the receiver) |
| 2761 // -- a1 : the target to call (can be any Object). |
| 2762 // ----------------------------------- |
| 2763 |
| 2764 // CheckSpreadAndPushToStack will push a3 to save it. |
| 2765 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 2766 CheckSpreadAndPushToStack(masm); |
| 2767 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 2768 TailCallMode::kDisallow), |
| 2769 RelocInfo::CODE_TARGET); |
| 2770 } |
| 2771 |
2629 // static | 2772 // static |
2630 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2773 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2631 // ----------- S t a t e ------------- | 2774 // ----------- S t a t e ------------- |
2632 // -- a0 : the number of arguments (not including the receiver) | 2775 // -- a0 : the number of arguments (not including the receiver) |
2633 // -- a1 : the constructor to call (checked to be a JSFunction) | 2776 // -- a1 : the constructor to call (checked to be a JSFunction) |
2634 // -- a3 : the new target (checked to be a constructor) | 2777 // -- a3 : the new target (checked to be a constructor) |
2635 // ----------------------------------- | 2778 // ----------------------------------- |
2636 __ AssertFunction(a1); | 2779 __ AssertFunction(a1); |
2637 | 2780 |
2638 // Calling convention for function specific ConstructStubs require | 2781 // Calling convention for function specific ConstructStubs require |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2809 | 2952 |
2810 // static | 2953 // static |
2811 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { | 2954 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
2812 // ----------- S t a t e ------------- | 2955 // ----------- S t a t e ------------- |
2813 // -- a0 : the number of arguments (not including the receiver) | 2956 // -- a0 : the number of arguments (not including the receiver) |
2814 // -- a1 : the constructor to call (can be any Object) | 2957 // -- a1 : the constructor to call (can be any Object) |
2815 // -- a3 : the new target (either the same as the constructor or | 2958 // -- a3 : the new target (either the same as the constructor or |
2816 // the JSFunction on which new was invoked initially) | 2959 // the JSFunction on which new was invoked initially) |
2817 // ----------------------------------- | 2960 // ----------------------------------- |
2818 | 2961 |
2819 Register argc = a0; | 2962 CheckSpreadAndPushToStack(masm); |
2820 Register constructor = a1; | |
2821 Register new_target = a3; | |
2822 | |
2823 Register scratch = t0; | |
2824 Register scratch2 = t1; | |
2825 | |
2826 Register spread = a2; | |
2827 Register spread_map = t3; | |
2828 | |
2829 Register native_context = t4; | |
2830 | |
2831 __ lw(spread, MemOperand(sp, 0)); | |
2832 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); | |
2833 __ lw(native_context, NativeContextMemOperand()); | |
2834 | |
2835 Label runtime_call, push_args; | |
2836 // Check that the spread is an array. | |
2837 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); | |
2838 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); | |
2839 | |
2840 // Check that we have the original ArrayPrototype. | |
2841 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); | |
2842 __ lw(scratch2, ContextMemOperand(native_context, | |
2843 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); | |
2844 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | |
2845 | |
2846 // Check that the ArrayPrototype hasn't been modified in a way that would | |
2847 // affect iteration. | |
2848 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); | |
2849 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
2850 __ Branch(&runtime_call, ne, scratch, | |
2851 Operand(Smi::FromInt(Isolate::kProtectorValid))); | |
2852 | |
2853 // Check that the map of the initial array iterator hasn't changed. | |
2854 __ lw(scratch, | |
2855 ContextMemOperand(native_context, | |
2856 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); | |
2857 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2858 __ lw(scratch2, | |
2859 ContextMemOperand(native_context, | |
2860 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | |
2861 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); | |
2862 | |
2863 // For FastPacked kinds, iteration will have the same effect as simply | |
2864 // accessing each property in order. | |
2865 Label no_protector_check; | |
2866 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | |
2867 __ DecodeField<Map::ElementsKindBits>(scratch); | |
2868 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); | |
2869 // For non-FastHoley kinds, we can skip the protector check. | |
2870 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); | |
2871 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); | |
2872 // Check the ArrayProtector cell. | |
2873 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | |
2874 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | |
2875 __ Branch(&runtime_call, ne, scratch, | |
2876 Operand(Smi::FromInt(Isolate::kProtectorValid))); | |
2877 | |
2878 __ bind(&no_protector_check); | |
2879 // Load the FixedArray backing store. | |
2880 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | |
2881 __ Branch(&push_args); | |
2882 | |
2883 __ bind(&runtime_call); | |
2884 { | |
2885 // Call the builtin for the result of the spread. | |
2886 FrameScope scope(masm, StackFrame::INTERNAL); | |
2887 __ SmiTag(argc); | |
2888 __ Push(constructor, new_target, argc, spread); | |
2889 __ CallRuntime(Runtime::kSpreadIterableFixed); | |
2890 __ mov(spread, v0); | |
2891 __ Pop(constructor, new_target, argc); | |
2892 __ SmiUntag(argc); | |
2893 } | |
2894 | |
2895 Register spread_len = t3; | |
2896 __ bind(&push_args); | |
2897 { | |
2898 // Pop the spread argument off the stack. | |
2899 __ Pop(scratch); | |
2900 // Calculate the new nargs including the result of the spread. | |
2901 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); | |
2902 __ SmiUntag(spread_len); | |
2903 // argc += spread_len - 1. Subtract 1 for the spread itself. | |
2904 __ Addu(argc, argc, spread_len); | |
2905 __ Subu(argc, argc, Operand(1)); | |
2906 } | |
2907 | |
2908 // Check for stack overflow. | |
2909 { | |
2910 // Check the stack for overflow. We are not trying to catch interruptions | |
2911 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2912 Label done; | |
2913 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); | |
2914 // Make scratch the space we have left. The stack might already be | |
2915 // overflowed here which will cause ip to become negative. | |
2916 __ Subu(scratch, sp, scratch); | |
2917 // Check if the arguments will overflow the stack. | |
2918 __ sll(at, spread_len, kPointerSizeLog2); | |
2919 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. | |
2920 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2921 __ bind(&done); | |
2922 } | |
2923 | |
2924 // Put the evaluated spread onto the stack as additional arguments. | |
2925 { | |
2926 __ mov(scratch, zero_reg); | |
2927 Label done, loop; | |
2928 __ bind(&loop); | |
2929 __ Branch(&done, eq, scratch, Operand(spread_len)); | |
2930 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2); | |
2931 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); | |
2932 __ Push(scratch2); | |
2933 __ Addu(scratch, scratch, Operand(1)); | |
2934 __ Branch(&loop); | |
2935 __ bind(&done); | |
2936 } | |
2937 | |
2938 // Dispatch. | |
2939 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2963 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2940 } | 2964 } |
2941 | 2965 |
2942 // static | 2966 // static |
2943 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2967 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2944 // ----------- S t a t e ------------- | 2968 // ----------- S t a t e ------------- |
2945 // -- a0 : requested object size (untagged) | 2969 // -- a0 : requested object size (untagged) |
2946 // -- ra : return address | 2970 // -- ra : return address |
2947 // ----------------------------------- | 2971 // ----------------------------------- |
2948 __ SmiTag(a0); | 2972 __ SmiTag(a0); |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3104 __ break_(0xCC); | 3128 __ break_(0xCC); |
3105 } | 3129 } |
3106 } | 3130 } |
3107 | 3131 |
3108 #undef __ | 3132 #undef __ |
3109 | 3133 |
3110 } // namespace internal | 3134 } // namespace internal |
3111 } // namespace v8 | 3135 } // namespace v8 |
3112 | 3136 |
3113 #endif // V8_TARGET_ARCH_MIPS | 3137 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |