OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1189 __ bind(&stack_overflow); | 1189 __ bind(&stack_overflow); |
1190 { | 1190 { |
1191 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1191 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1192 // Unreachable code. | 1192 // Unreachable code. |
1193 __ break_(0xCC); | 1193 __ break_(0xCC); |
1194 } | 1194 } |
1195 } | 1195 } |
1196 | 1196 |
1197 // static | 1197 // static |
1198 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( | 1198 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
1199 MacroAssembler* masm, CallableType construct_type) { | 1199 MacroAssembler* masm, PushArgsConstructMode mode) { |
1200 // ----------- S t a t e ------------- | 1200 // ----------- S t a t e ------------- |
1201 // -- a0 : argument count (not including receiver) | 1201 // -- a0 : argument count (not including receiver) |
1202 // -- a3 : new target | 1202 // -- a3 : new target |
1203 // -- a1 : constructor to call | 1203 // -- a1 : constructor to call |
1204 // -- a2 : allocation site feedback if available, undefined otherwise. | 1204 // -- a2 : allocation site feedback if available, undefined otherwise. |
1205 // -- t4 : address of the first argument | 1205 // -- t4 : address of the first argument |
1206 // ----------------------------------- | 1206 // ----------------------------------- |
1207 Label stack_overflow; | 1207 Label stack_overflow; |
1208 | 1208 |
1209 // Push a slot for the receiver. | 1209 // Push a slot for the receiver. |
1210 __ push(zero_reg); | 1210 __ push(zero_reg); |
1211 | 1211 |
1212 // This function modified t4, t1 and t0. | 1212 // This function modified t4, t1 and t0. |
1213 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); | 1213 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); |
1214 | 1214 |
1215 __ AssertUndefinedOrAllocationSite(a2, t0); | 1215 __ AssertUndefinedOrAllocationSite(a2, t0); |
1216 if (construct_type == CallableType::kJSFunction) { | 1216 if (mode == PushArgsConstructMode::kJSFunction) { |
1217 __ AssertFunction(a1); | 1217 __ AssertFunction(a1); |
1218 | 1218 |
1219 // Tail call to the function-specific construct stub (still in the caller | 1219 // Tail call to the function-specific construct stub (still in the caller |
1220 // context at this point). | 1220 // context at this point). |
1221 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1221 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1222 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); | 1222 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); |
1223 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1223 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1224 __ Jump(at); | 1224 __ Jump(at); |
| 1225 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { |
| 1226 // Call the constructor with a0, a1, and a3 unmodified. |
| 1227 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
| 1228 RelocInfo::CODE_TARGET); |
1225 } else { | 1229 } else { |
1226 DCHECK_EQ(construct_type, CallableType::kAny); | 1230 DCHECK_EQ(PushArgsConstructMode::kOther, mode); |
1227 // Call the constructor with a0, a1, and a3 unmodified. | 1231 // Call the constructor with a0, a1, and a3 unmodified. |
1228 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1232 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1229 } | 1233 } |
1230 | 1234 |
1231 __ bind(&stack_overflow); | 1235 __ bind(&stack_overflow); |
1232 { | 1236 { |
1233 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1237 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1234 // Unreachable code. | 1238 // Unreachable code. |
1235 __ break_(0xCC); | 1239 __ break_(0xCC); |
1236 } | 1240 } |
(...skipping 1551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2788 } | 2792 } |
2789 | 2793 |
2790 // Called Construct on an Object that doesn't have a [[Construct]] internal | 2794 // Called Construct on an Object that doesn't have a [[Construct]] internal |
2791 // method. | 2795 // method. |
2792 __ bind(&non_constructor); | 2796 __ bind(&non_constructor); |
2793 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | 2797 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), |
2794 RelocInfo::CODE_TARGET); | 2798 RelocInfo::CODE_TARGET); |
2795 } | 2799 } |
2796 | 2800 |
2797 // static | 2801 // static |
| 2802 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
| 2803 // ----------- S t a t e ------------- |
| 2804 // -- a0 : the number of arguments (not including the receiver) |
| 2805 // -- a1 : the constructor to call (can be any Object) |
| 2806 // -- a3 : the new target (either the same as the constructor or |
| 2807 // the JSFunction on which new was invoked initially) |
| 2808 // ----------------------------------- |
| 2809 |
| 2810 Register argc = a0; |
| 2811 Register constructor = a1; |
| 2812 Register new_target = a3; |
| 2813 |
| 2814 Register scratch = t0; |
| 2815 Register scratch2 = t1; |
| 2816 |
| 2817 Register spread = a2; |
| 2818 Register spread_map = t3; |
| 2819 |
| 2820 Register native_context = t4; |
| 2821 |
| 2822 __ lw(spread, MemOperand(sp, 0)); |
| 2823 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
| 2824 __ lw(native_context, NativeContextMemOperand()); |
| 2825 |
| 2826 Label runtime_call, push_args; |
| 2827 // Check that the spread is an array. |
| 2828 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); |
| 2829 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); |
| 2830 |
| 2831 // Check that we have the original ArrayPrototype. |
| 2832 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
| 2833 __ lw(scratch2, ContextMemOperand(native_context, |
| 2834 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2835 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2836 |
| 2837 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2838 // affect iteration. |
| 2839 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2840 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 2841 __ Branch(&runtime_call, ne, scratch, |
| 2842 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2843 |
| 2844 // Check that the map of the initial array iterator hasn't changed. |
| 2845 __ lw(scratch, |
| 2846 ContextMemOperand(native_context, |
| 2847 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2848 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2849 __ lw(scratch2, |
| 2850 ContextMemOperand(native_context, |
| 2851 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2852 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
| 2853 |
| 2854 // For FastPacked kinds, iteration will have the same effect as simply |
| 2855 // accessing each property in order. |
| 2856 Label no_protector_check; |
| 2857 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2858 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2859 __ Branch(&runtime_call, hi, scratch, Operand(LAST_FAST_ELEMENTS_KIND)); |
| 2860 // For non-FastHoley kinds, we can skip the protector check. |
| 2861 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
| 2862 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
| 2863 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_DOUBLE_ELEMENTS)); |
| 2864 // Check the ArrayProtector cell. |
| 2865 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2866 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2867 __ Branch(&runtime_call, ne, scratch, |
| 2868 Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2869 |
| 2870 __ bind(&no_protector_check); |
| 2871 // Load the FixedArray backing store. |
| 2872 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2873 __ Branch(&push_args); |
| 2874 |
| 2875 __ bind(&runtime_call); |
| 2876 { |
| 2877 // Call the builtin for the result of the spread. |
| 2878 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2879 __ SmiTag(argc); |
| 2880 __ Push(constructor, new_target, argc, spread); |
| 2881 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2882 __ mov(spread, v0); |
| 2883 __ Pop(constructor, new_target, argc); |
| 2884 __ SmiUntag(argc); |
| 2885 } |
| 2886 |
| 2887 Register spread_len = t3; |
| 2888 __ bind(&push_args); |
| 2889 { |
| 2890 // Pop the spread argument off the stack. |
| 2891 __ Pop(scratch); |
| 2892 // Calculate the new nargs including the result of the spread. |
| 2893 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); |
| 2894 __ SmiUntag(spread_len); |
| 2895 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2896 __ Addu(argc, argc, spread_len); |
| 2897 __ Subu(argc, argc, Operand(1)); |
| 2898 } |
| 2899 |
| 2900 // Check for stack overflow. |
| 2901 { |
| 2902 // Check the stack for overflow. We are not trying to catch interruptions |
| 2903 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2904 Label done; |
| 2905 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2906 // Make scratch the space we have left. The stack might already be |
| 2907 // overflowed here which will cause ip to become negative. |
| 2908 __ Subu(scratch, sp, scratch); |
| 2909 // Check if the arguments will overflow the stack. |
| 2910 __ sll(at, spread_len, kPointerSizeLog2); |
| 2911 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. |
| 2912 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2913 __ bind(&done); |
| 2914 } |
| 2915 |
| 2916 // Put the evaluated spread onto the stack as additional arguments. |
| 2917 { |
| 2918 __ mov(scratch, zero_reg); |
| 2919 Label done, loop; |
| 2920 __ bind(&loop); |
| 2921 __ Branch(&done, eq, scratch, Operand(spread_len)); |
| 2922 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2); |
| 2923 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
| 2924 __ Push(scratch2); |
| 2925 __ Addu(scratch, scratch, Operand(1)); |
| 2926 __ Branch(&loop); |
| 2927 __ bind(&done); |
| 2928 } |
| 2929 |
| 2930 // Dispatch. |
| 2931 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 2932 } |
| 2933 |
| 2934 // static |
2798 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2935 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2799 // ----------- S t a t e ------------- | 2936 // ----------- S t a t e ------------- |
2800 // -- a0 : requested object size (untagged) | 2937 // -- a0 : requested object size (untagged) |
2801 // -- ra : return address | 2938 // -- ra : return address |
2802 // ----------------------------------- | 2939 // ----------------------------------- |
2803 __ SmiTag(a0); | 2940 __ SmiTag(a0); |
2804 __ Push(a0); | 2941 __ Push(a0); |
2805 __ Move(cp, Smi::kZero); | 2942 __ Move(cp, Smi::kZero); |
2806 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | 2943 __ TailCallRuntime(Runtime::kAllocateInNewSpace); |
2807 } | 2944 } |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2959 __ break_(0xCC); | 3096 __ break_(0xCC); |
2960 } | 3097 } |
2961 } | 3098 } |
2962 | 3099 |
2963 #undef __ | 3100 #undef __ |
2964 | 3101 |
2965 } // namespace internal | 3102 } // namespace internal |
2966 } // namespace v8 | 3103 } // namespace v8 |
2967 | 3104 |
2968 #endif // V8_TARGET_ARCH_MIPS | 3105 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |