OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/debug/debug.h" | 9 #include "src/debug/debug.h" |
10 #include "src/deoptimizer.h" | 10 #include "src/deoptimizer.h" |
(...skipping 1144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1155 __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex)); | 1155 __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex)); |
1156 __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex)); | 1156 __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex)); |
1157 __ Bind(&loop_check); | 1157 __ Bind(&loop_check); |
1158 __ Cmp(index, last_arg); | 1158 __ Cmp(index, last_arg); |
1159 __ B(gt, &loop_header); | 1159 __ B(gt, &loop_header); |
1160 } | 1160 } |
1161 | 1161 |
1162 // static | 1162 // static |
1163 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | 1163 void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
1164 MacroAssembler* masm, TailCallMode tail_call_mode, | 1164 MacroAssembler* masm, TailCallMode tail_call_mode, |
1165 CallableType function_type) { | 1165 InterpreterPushArgsMode mode) { |
1166 // ----------- S t a t e ------------- | 1166 // ----------- S t a t e ------------- |
1167 // -- x0 : the number of arguments (not including the receiver) | 1167 // -- x0 : the number of arguments (not including the receiver) |
1168 // -- x2 : the address of the first argument to be pushed. Subsequent | 1168 // -- x2 : the address of the first argument to be pushed. Subsequent |
1169 // arguments should be consecutive above this, in the same order as | 1169 // arguments should be consecutive above this, in the same order as |
1170 // they are to be pushed onto the stack. | 1170 // they are to be pushed onto the stack. |
1171 // -- x1 : the target to call (can be any Object). | 1171 // -- x1 : the target to call (can be any Object). |
1172 // ----------------------------------- | 1172 // ----------------------------------- |
1173 Label stack_overflow; | 1173 Label stack_overflow; |
1174 | 1174 |
1175 // Add one for the receiver. | 1175 // Add one for the receiver. |
1176 __ add(x3, x0, Operand(1)); | 1176 __ add(x3, x0, Operand(1)); |
1177 | 1177 |
1178 // Push the arguments. x2, x4, x5, x6 will be modified. | 1178 // Push the arguments. x2, x4, x5, x6 will be modified. |
1179 Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow); | 1179 Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow); |
1180 | 1180 |
1181 // Call the target. | 1181 // Call the target. |
1182 if (function_type == CallableType::kJSFunction) { | 1182 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1183 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | 1183 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, |
1184 tail_call_mode), | 1184 tail_call_mode), |
1185 RelocInfo::CODE_TARGET); | 1185 RelocInfo::CODE_TARGET); |
| 1186 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
| 1187 __ Jump(masm->isolate()->builtins()->CallWithSpread(), |
| 1188 RelocInfo::CODE_TARGET); |
1186 } else { | 1189 } else { |
1187 DCHECK_EQ(function_type, CallableType::kAny); | |
1188 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 1190 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
1189 tail_call_mode), | 1191 tail_call_mode), |
1190 RelocInfo::CODE_TARGET); | 1192 RelocInfo::CODE_TARGET); |
1191 } | 1193 } |
1192 | 1194 |
1193 __ bind(&stack_overflow); | 1195 __ bind(&stack_overflow); |
1194 { | 1196 { |
1195 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1197 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1196 __ Unreachable(); | 1198 __ Unreachable(); |
1197 } | 1199 } |
1198 } | 1200 } |
1199 | 1201 |
1200 // static | 1202 // static |
1201 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( | 1203 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
1202 MacroAssembler* masm, PushArgsConstructMode mode) { | 1204 MacroAssembler* masm, InterpreterPushArgsMode mode) { |
1203 // ----------- S t a t e ------------- | 1205 // ----------- S t a t e ------------- |
1204 // -- x0 : argument count (not including receiver) | 1206 // -- x0 : argument count (not including receiver) |
1205 // -- x3 : new target | 1207 // -- x3 : new target |
1206 // -- x1 : constructor to call | 1208 // -- x1 : constructor to call |
1207 // -- x2 : allocation site feedback if available, undefined otherwise | 1209 // -- x2 : allocation site feedback if available, undefined otherwise |
1208 // -- x4 : address of the first argument | 1210 // -- x4 : address of the first argument |
1209 // ----------------------------------- | 1211 // ----------------------------------- |
1210 Label stack_overflow; | 1212 Label stack_overflow; |
1211 | 1213 |
1212 // Push a slot for the receiver. | 1214 // Push a slot for the receiver. |
1213 __ Push(xzr); | 1215 __ Push(xzr); |
1214 | 1216 |
1215 // Push the arguments. x5, x4, x6, x7 will be modified. | 1217 // Push the arguments. x5, x4, x6, x7 will be modified. |
1216 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow); | 1218 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow); |
1217 | 1219 |
1218 __ AssertUndefinedOrAllocationSite(x2, x6); | 1220 __ AssertUndefinedOrAllocationSite(x2, x6); |
1219 if (mode == PushArgsConstructMode::kJSFunction) { | 1221 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1220 __ AssertFunction(x1); | 1222 __ AssertFunction(x1); |
1221 | 1223 |
1222 // Tail call to the function-specific construct stub (still in the caller | 1224 // Tail call to the function-specific construct stub (still in the caller |
1223 // context at this point). | 1225 // context at this point). |
1224 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | 1226 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
1225 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); | 1227 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); |
1226 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); | 1228 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); |
1227 __ Br(x4); | 1229 __ Br(x4); |
1228 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { | 1230 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
1229 // Call the constructor with x0, x1, and x3 unmodified. | 1231 // Call the constructor with x0, x1, and x3 unmodified. |
1230 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), | 1232 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
1231 RelocInfo::CODE_TARGET); | 1233 RelocInfo::CODE_TARGET); |
1232 } else { | 1234 } else { |
1233 DCHECK_EQ(PushArgsConstructMode::kOther, mode); | 1235 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); |
1234 // Call the constructor with x0, x1, and x3 unmodified. | 1236 // Call the constructor with x0, x1, and x3 unmodified. |
1235 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1237 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1236 } | 1238 } |
1237 | 1239 |
1238 __ bind(&stack_overflow); | 1240 __ bind(&stack_overflow); |
1239 { | 1241 { |
1240 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1242 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1241 __ Unreachable(); | 1243 __ Unreachable(); |
1242 } | 1244 } |
1243 } | 1245 } |
(...skipping 1445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2689 | 2691 |
2690 // 3. Call to something that is not callable. | 2692 // 3. Call to something that is not callable. |
2691 __ bind(&non_callable); | 2693 __ bind(&non_callable); |
2692 { | 2694 { |
2693 FrameScope scope(masm, StackFrame::INTERNAL); | 2695 FrameScope scope(masm, StackFrame::INTERNAL); |
2694 __ Push(x1); | 2696 __ Push(x1); |
2695 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2697 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2696 } | 2698 } |
2697 } | 2699 } |
2698 | 2700 |
| 2701 static void CheckSpreadAndPushToStack(MacroAssembler* masm) { |
| 2702 Register argc = x0; |
| 2703 Register constructor = x1; |
| 2704 Register new_target = x3; |
| 2705 |
| 2706 Register scratch = x2; |
| 2707 Register scratch2 = x6; |
| 2708 |
| 2709 Register spread = x4; |
| 2710 Register spread_map = x5; |
| 2711 |
| 2712 Register spread_len = x5; |
| 2713 |
| 2714 __ Peek(spread, 0); |
| 2715 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
| 2716 |
| 2717 Label runtime_call, push_args; |
| 2718 // Check that the spread is an array. |
| 2719 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); |
| 2720 __ B(ne, &runtime_call); |
| 2721 |
| 2722 // Check that we have the original ArrayPrototype. |
| 2723 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
| 2724 __ Ldr(scratch2, NativeContextMemOperand()); |
| 2725 __ Ldr(scratch2, |
| 2726 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2727 __ Cmp(scratch, scratch2); |
| 2728 __ B(ne, &runtime_call); |
| 2729 |
| 2730 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2731 // affect iteration. |
| 2732 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2733 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 2734 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); |
| 2735 __ B(ne, &runtime_call); |
| 2736 |
| 2737 // Check that the map of the initial array iterator hasn't changed. |
| 2738 __ Ldr(scratch2, NativeContextMemOperand()); |
| 2739 __ Ldr(scratch, |
| 2740 ContextMemOperand(scratch2, |
| 2741 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2742 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2743 __ Ldr(scratch2, |
| 2744 ContextMemOperand( |
| 2745 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2746 __ Cmp(scratch, scratch2); |
| 2747 __ B(ne, &runtime_call); |
| 2748 |
| 2749 // For FastPacked kinds, iteration will have the same effect as simply |
| 2750 // accessing each property in order. |
| 2751 Label no_protector_check; |
| 2752 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2753 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2754 __ Cmp(scratch, FAST_HOLEY_ELEMENTS); |
| 2755 __ B(hi, &runtime_call); |
| 2756 // For non-FastHoley kinds, we can skip the protector check. |
| 2757 __ Cmp(scratch, FAST_SMI_ELEMENTS); |
| 2758 __ B(eq, &no_protector_check); |
| 2759 __ Cmp(scratch, FAST_ELEMENTS); |
| 2760 __ B(eq, &no_protector_check); |
| 2761 // Check the ArrayProtector cell. |
| 2762 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2763 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2764 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); |
| 2765 __ B(ne, &runtime_call); |
| 2766 |
| 2767 __ Bind(&no_protector_check); |
| 2768 // Load the FixedArray backing store, but use the length from the array. |
| 2769 __ Ldrsw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset)); |
| 2770 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2771 __ B(&push_args); |
| 2772 |
| 2773 __ Bind(&runtime_call); |
| 2774 { |
| 2775 // Call the builtin for the result of the spread. |
| 2776 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2777 __ SmiTag(argc); |
| 2778 __ Push(constructor, new_target, argc, spread); |
| 2779 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2780 __ Mov(spread, x0); |
| 2781 __ Pop(argc, new_target, constructor); |
| 2782 __ SmiUntag(argc); |
| 2783 } |
| 2784 |
| 2785 { |
| 2786 // Calculate the new nargs including the result of the spread. |
| 2787 __ Ldrsw(spread_len, |
| 2788 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset)); |
| 2789 |
| 2790 __ Bind(&push_args); |
| 2791 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2792 __ Add(argc, argc, spread_len); |
| 2793 __ Sub(argc, argc, 1); |
| 2794 |
| 2795 // Pop the spread argument off the stack. |
| 2796 __ Pop(scratch); |
| 2797 } |
| 2798 |
| 2799 // Check for stack overflow. |
| 2800 { |
| 2801 // Check the stack for overflow. We are not trying to catch interruptions |
| 2802 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2803 Label done; |
| 2804 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2805 // Make scratch the space we have left. The stack might already be |
| 2806 // overflowed here which will cause scratch to become negative. |
| 2807 __ Sub(scratch, masm->StackPointer(), scratch); |
| 2808 // Check if the arguments will overflow the stack. |
| 2809 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2)); |
| 2810 __ B(gt, &done); // Signed comparison. |
| 2811 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2812 __ Bind(&done); |
| 2813 } |
| 2814 |
| 2815 // Put the evaluated spread onto the stack as additional arguments. |
| 2816 { |
| 2817 __ Mov(scratch, 0); |
| 2818 Label done, loop; |
| 2819 __ Bind(&loop); |
| 2820 __ Cmp(scratch, spread_len); |
| 2821 __ B(eq, &done); |
| 2822 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2)); |
| 2823 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
| 2824 __ Push(scratch2); |
| 2825 __ Add(scratch, scratch, Operand(1)); |
| 2826 __ B(&loop); |
| 2827 __ Bind(&done); |
| 2828 } |
| 2829 } |
| 2830 |
| 2831 // static |
| 2832 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { |
| 2833 // ----------- S t a t e ------------- |
| 2834 // -- x0 : the number of arguments (not including the receiver) |
| 2835 // -- x1 : the constructor to call (can be any Object) |
| 2836 // ----------------------------------- |
| 2837 |
| 2838 // CheckSpreadAndPushToStack will push r3 to save it. |
| 2839 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex); |
| 2840 CheckSpreadAndPushToStack(masm); |
| 2841 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 2842 TailCallMode::kDisallow), |
| 2843 RelocInfo::CODE_TARGET); |
| 2844 } |
| 2845 |
2699 // static | 2846 // static |
2700 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2847 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2701 // ----------- S t a t e ------------- | 2848 // ----------- S t a t e ------------- |
2702 // -- x0 : the number of arguments (not including the receiver) | 2849 // -- x0 : the number of arguments (not including the receiver) |
2703 // -- x1 : the constructor to call (checked to be a JSFunction) | 2850 // -- x1 : the constructor to call (checked to be a JSFunction) |
2704 // -- x3 : the new target (checked to be a constructor) | 2851 // -- x3 : the new target (checked to be a constructor) |
2705 // ----------------------------------- | 2852 // ----------------------------------- |
2706 __ AssertFunction(x1); | 2853 __ AssertFunction(x1); |
2707 | 2854 |
2708 // Calling convention for function specific ConstructStubs require | 2855 // Calling convention for function specific ConstructStubs require |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2818 | 2965 |
2819 // static | 2966 // static |
2820 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { | 2967 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
2821 // ----------- S t a t e ------------- | 2968 // ----------- S t a t e ------------- |
2822 // -- x0 : the number of arguments (not including the receiver) | 2969 // -- x0 : the number of arguments (not including the receiver) |
2823 // -- x1 : the constructor to call (can be any Object) | 2970 // -- x1 : the constructor to call (can be any Object) |
2824 // -- x3 : the new target (either the same as the constructor or | 2971 // -- x3 : the new target (either the same as the constructor or |
2825 // the JSFunction on which new was invoked initially) | 2972 // the JSFunction on which new was invoked initially) |
2826 // ----------------------------------- | 2973 // ----------------------------------- |
2827 | 2974 |
2828 Register argc = x0; | 2975 CheckSpreadAndPushToStack(masm); |
2829 Register constructor = x1; | |
2830 Register new_target = x3; | |
2831 | |
2832 Register scratch = x2; | |
2833 Register scratch2 = x6; | |
2834 | |
2835 Register spread = x4; | |
2836 Register spread_map = x5; | |
2837 __ Peek(spread, 0); | |
2838 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); | |
2839 | |
2840 Label runtime_call, push_args; | |
2841 // Check that the spread is an array. | |
2842 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); | |
2843 __ B(ne, &runtime_call); | |
2844 | |
2845 // Check that we have the original ArrayPrototype. | |
2846 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); | |
2847 __ Ldr(scratch2, NativeContextMemOperand()); | |
2848 __ Ldr(scratch2, | |
2849 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); | |
2850 __ Cmp(scratch, scratch2); | |
2851 __ B(ne, &runtime_call); | |
2852 | |
2853 // Check that the ArrayPrototype hasn't been modified in a way that would | |
2854 // affect iteration. | |
2855 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); | |
2856 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
2857 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); | |
2858 __ B(ne, &runtime_call); | |
2859 | |
2860 // Check that the map of the initial array iterator hasn't changed. | |
2861 __ Ldr(scratch2, NativeContextMemOperand()); | |
2862 __ Ldr(scratch, | |
2863 ContextMemOperand(scratch2, | |
2864 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); | |
2865 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2866 __ Ldr(scratch2, | |
2867 ContextMemOperand( | |
2868 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | |
2869 __ Cmp(scratch, scratch2); | |
2870 __ B(ne, &runtime_call); | |
2871 | |
2872 // For FastPacked kinds, iteration will have the same effect as simply | |
2873 // accessing each property in order. | |
2874 Label no_protector_check; | |
2875 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | |
2876 __ DecodeField<Map::ElementsKindBits>(scratch); | |
2877 __ Cmp(scratch, FAST_HOLEY_ELEMENTS); | |
2878 __ B(hi, &runtime_call); | |
2879 // For non-FastHoley kinds, we can skip the protector check. | |
2880 __ Cmp(scratch, FAST_SMI_ELEMENTS); | |
2881 __ B(eq, &no_protector_check); | |
2882 __ Cmp(scratch, FAST_ELEMENTS); | |
2883 __ B(eq, &no_protector_check); | |
2884 // Check the ArrayProtector cell. | |
2885 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | |
2886 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | |
2887 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); | |
2888 __ B(ne, &runtime_call); | |
2889 | |
2890 __ Bind(&no_protector_check); | |
2891 // Load the FixedArray backing store. | |
2892 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | |
2893 __ B(&push_args); | |
2894 | |
2895 __ Bind(&runtime_call); | |
2896 { | |
2897 // Call the builtin for the result of the spread. | |
2898 FrameScope scope(masm, StackFrame::INTERNAL); | |
2899 __ SmiTag(argc); | |
2900 __ Push(constructor, new_target, argc, spread); | |
2901 __ CallRuntime(Runtime::kSpreadIterableFixed); | |
2902 __ Mov(spread, x0); | |
2903 __ Pop(argc, new_target, constructor); | |
2904 __ SmiUntag(argc); | |
2905 } | |
2906 | |
2907 Register spread_len = x5; | |
2908 __ Bind(&push_args); | |
2909 { | |
2910 // Pop the spread argument off the stack. | |
2911 __ Pop(scratch); | |
2912 // Calculate the new nargs including the result of the spread. | |
2913 __ Ldrsw(spread_len, | |
2914 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset)); | |
2915 // argc += spread_len - 1. Subtract 1 for the spread itself. | |
2916 __ Add(argc, argc, spread_len); | |
2917 __ Sub(argc, argc, 1); | |
2918 } | |
2919 | |
2920 // Check for stack overflow. | |
2921 { | |
2922 // Check the stack for overflow. We are not trying to catch interruptions | |
2923 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2924 Label done; | |
2925 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); | |
2926 // Make scratch the space we have left. The stack might already be | |
2927 // overflowed here which will cause scratch to become negative. | |
2928 __ Sub(scratch, masm->StackPointer(), scratch); | |
2929 // Check if the arguments will overflow the stack. | |
2930 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2)); | |
2931 __ B(gt, &done); // Signed comparison. | |
2932 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2933 __ Bind(&done); | |
2934 } | |
2935 | |
2936 // Put the evaluated spread onto the stack as additional arguments. | |
2937 { | |
2938 __ Mov(scratch, 0); | |
2939 Label done, loop; | |
2940 __ Bind(&loop); | |
2941 __ Cmp(scratch, spread_len); | |
2942 __ B(eq, &done); | |
2943 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2)); | |
2944 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); | |
2945 __ Push(scratch2); | |
2946 __ Add(scratch, scratch, Operand(1)); | |
2947 __ B(&loop); | |
2948 __ Bind(&done); | |
2949 } | |
2950 | |
2951 // Dispatch. | |
2952 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2976 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2953 } | 2977 } |
2954 | 2978 |
2955 // static | 2979 // static |
2956 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2980 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2957 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace"); | 2981 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace"); |
2958 // ----------- S t a t e ------------- | 2982 // ----------- S t a t e ------------- |
2959 // -- x1 : requested object size (untagged) | 2983 // -- x1 : requested object size (untagged) |
2960 // -- lr : return address | 2984 // -- lr : return address |
2961 // ----------------------------------- | 2985 // ----------------------------------- |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3136 __ Unreachable(); | 3160 __ Unreachable(); |
3137 } | 3161 } |
3138 } | 3162 } |
3139 | 3163 |
3140 #undef __ | 3164 #undef __ |
3141 | 3165 |
3142 } // namespace internal | 3166 } // namespace internal |
3143 } // namespace v8 | 3167 } // namespace v8 |
3144 | 3168 |
3145 #endif // V8_TARGET_ARCH_ARM | 3169 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |