Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(234)

Side by Side Diff: src/builtins/arm64/builtins-arm64.cc

Issue 2649143002: [Turbofan] Implement call with spread bytecode in assembly code. (Closed)
Patch Set: Mips ports Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/debug/debug.h" 9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h" 10 #include "src/deoptimizer.h"
(...skipping 1143 matching lines...) Expand 10 before | Expand all | Expand 10 after
1154 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned. 1154 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1155 __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex)); 1155 __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1156 __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex)); 1156 __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex));
1157 __ Bind(&loop_check); 1157 __ Bind(&loop_check);
1158 __ Cmp(index, last_arg); 1158 __ Cmp(index, last_arg);
1159 __ B(gt, &loop_header); 1159 __ B(gt, &loop_header);
1160 } 1160 }
1161 1161
1162 // static 1162 // static
1163 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 1163 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1164 MacroAssembler* masm, TailCallMode tail_call_mode, 1164 MacroAssembler* masm, TailCallMode tail_call_mode, PushArgsMode mode) {
1165 CallableType function_type) {
1166 // ----------- S t a t e ------------- 1165 // ----------- S t a t e -------------
1167 // -- x0 : the number of arguments (not including the receiver) 1166 // -- x0 : the number of arguments (not including the receiver)
1168 // -- x2 : the address of the first argument to be pushed. Subsequent 1167 // -- x2 : the address of the first argument to be pushed. Subsequent
1169 // arguments should be consecutive above this, in the same order as 1168 // arguments should be consecutive above this, in the same order as
1170 // they are to be pushed onto the stack. 1169 // they are to be pushed onto the stack.
1171 // -- x1 : the target to call (can be any Object). 1170 // -- x1 : the target to call (can be any Object).
1172 // ----------------------------------- 1171 // -----------------------------------
1173 Label stack_overflow; 1172 Label stack_overflow;
1174 1173
1175 // Add one for the receiver. 1174 // Add one for the receiver.
1176 __ add(x3, x0, Operand(1)); 1175 __ add(x3, x0, Operand(1));
1177 1176
1178 // Push the arguments. x2, x4, x5, x6 will be modified. 1177 // Push the arguments. x2, x4, x5, x6 will be modified.
1179 Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow); 1178 Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow);
1180 1179
1181 // Call the target. 1180 // Call the target.
1182 if (function_type == CallableType::kJSFunction) { 1181 if (mode == PushArgsMode::kJSFunction) {
1183 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 1182 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1184 tail_call_mode), 1183 tail_call_mode),
1185 RelocInfo::CODE_TARGET); 1184 RelocInfo::CODE_TARGET);
1185 } else if (mode == PushArgsMode::kWithFinalSpread) {
1186 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1187 RelocInfo::CODE_TARGET);
1186 } else { 1188 } else {
1187 DCHECK_EQ(function_type, CallableType::kAny);
1188 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 1189 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1189 tail_call_mode), 1190 tail_call_mode),
1190 RelocInfo::CODE_TARGET); 1191 RelocInfo::CODE_TARGET);
1191 } 1192 }
1192 1193
1193 __ bind(&stack_overflow); 1194 __ bind(&stack_overflow);
1194 { 1195 {
1195 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1196 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1196 __ Unreachable(); 1197 __ Unreachable();
1197 } 1198 }
1198 } 1199 }
1199 1200
1200 // static 1201 // static
1201 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 1202 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1202 MacroAssembler* masm, PushArgsConstructMode mode) { 1203 MacroAssembler* masm, PushArgsMode mode) {
1203 // ----------- S t a t e ------------- 1204 // ----------- S t a t e -------------
1204 // -- x0 : argument count (not including receiver) 1205 // -- x0 : argument count (not including receiver)
1205 // -- x3 : new target 1206 // -- x3 : new target
1206 // -- x1 : constructor to call 1207 // -- x1 : constructor to call
1207 // -- x2 : allocation site feedback if available, undefined otherwise 1208 // -- x2 : allocation site feedback if available, undefined otherwise
1208 // -- x4 : address of the first argument 1209 // -- x4 : address of the first argument
1209 // ----------------------------------- 1210 // -----------------------------------
1210 Label stack_overflow; 1211 Label stack_overflow;
1211 1212
1212 // Push a slot for the receiver. 1213 // Push a slot for the receiver.
1213 __ Push(xzr); 1214 __ Push(xzr);
1214 1215
1215 // Push the arguments. x5, x4, x6, x7 will be modified. 1216 // Push the arguments. x5, x4, x6, x7 will be modified.
1216 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow); 1217 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
1217 1218
1218 __ AssertUndefinedOrAllocationSite(x2, x6); 1219 __ AssertUndefinedOrAllocationSite(x2, x6);
1219 if (mode == PushArgsConstructMode::kJSFunction) { 1220 if (mode == PushArgsMode::kJSFunction) {
1220 __ AssertFunction(x1); 1221 __ AssertFunction(x1);
1221 1222
1222 // Tail call to the function-specific construct stub (still in the caller 1223 // Tail call to the function-specific construct stub (still in the caller
1223 // context at this point). 1224 // context at this point).
1224 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 1225 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1225 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); 1226 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1226 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); 1227 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1227 __ Br(x4); 1228 __ Br(x4);
1228 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { 1229 } else if (mode == PushArgsMode::kWithFinalSpread) {
1229 // Call the constructor with x0, x1, and x3 unmodified. 1230 // Call the constructor with x0, x1, and x3 unmodified.
1230 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 1231 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1231 RelocInfo::CODE_TARGET); 1232 RelocInfo::CODE_TARGET);
1232 } else { 1233 } else {
1233 DCHECK_EQ(PushArgsConstructMode::kOther, mode); 1234 DCHECK_EQ(PushArgsMode::kOther, mode);
1234 // Call the constructor with x0, x1, and x3 unmodified. 1235 // Call the constructor with x0, x1, and x3 unmodified.
1235 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1236 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1236 } 1237 }
1237 1238
1238 __ bind(&stack_overflow); 1239 __ bind(&stack_overflow);
1239 { 1240 {
1240 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1241 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1241 __ Unreachable(); 1242 __ Unreachable();
1242 } 1243 }
1243 } 1244 }
(...skipping 1445 matching lines...) Expand 10 before | Expand all | Expand 10 after
2689 2690
2690 // 3. Call to something that is not callable. 2691 // 3. Call to something that is not callable.
2691 __ bind(&non_callable); 2692 __ bind(&non_callable);
2692 { 2693 {
2693 FrameScope scope(masm, StackFrame::INTERNAL); 2694 FrameScope scope(masm, StackFrame::INTERNAL);
2694 __ Push(x1); 2695 __ Push(x1);
2695 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2696 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2696 } 2697 }
2697 } 2698 }
2698 2699
2700 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2701 Register argc = x0;
2702 Register constructor = x1;
2703 Register new_target = x3;
2704
2705 Register scratch = x2;
2706 Register scratch2 = x6;
2707
2708 Register spread = x4;
2709 Register spread_map = x5;
2710
2711 Register spread_len = x5;
2712
2713 __ Peek(spread, 0);
2714 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2715
2716 Label runtime_call, push_args;
2717 // Check that the spread is an array.
2718 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2719 __ B(ne, &runtime_call);
2720
2721 // Check that we have the original ArrayPrototype.
2722 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2723 __ Ldr(scratch2, NativeContextMemOperand());
2724 __ Ldr(scratch2,
2725 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2726 __ Cmp(scratch, scratch2);
2727 __ B(ne, &runtime_call);
2728
2729 // Check that the ArrayPrototype hasn't been modified in a way that would
2730 // affect iteration.
2731 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2732 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2733 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2734 __ B(ne, &runtime_call);
2735
2736 // Check that the map of the initial array iterator hasn't changed.
2737 __ Ldr(scratch2, NativeContextMemOperand());
2738 __ Ldr(scratch,
2739 ContextMemOperand(scratch2,
2740 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2741 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2742 __ Ldr(scratch2,
2743 ContextMemOperand(
2744 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2745 __ Cmp(scratch, scratch2);
2746 __ B(ne, &runtime_call);
2747
2748 // For FastPacked kinds, iteration will have the same effect as simply
2749 // accessing each property in order.
2750 Label no_protector_check;
2751 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2752 __ DecodeField<Map::ElementsKindBits>(scratch);
2753 __ Cmp(scratch, FAST_HOLEY_ELEMENTS);
2754 __ B(hi, &runtime_call);
2755 // For non-FastHoley kinds, we can skip the protector check.
2756 __ Cmp(scratch, FAST_SMI_ELEMENTS);
2757 __ B(eq, &no_protector_check);
2758 __ Cmp(scratch, FAST_ELEMENTS);
2759 __ B(eq, &no_protector_check);
2760 // Check the ArrayProtector cell.
2761 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2762 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2763 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2764 __ B(ne, &runtime_call);
2765
2766 __ Bind(&no_protector_check);
2767 // Load the FixedArray backing store, but use the length from the array.
2768 __ Ldrsw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset));
2769 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2770 __ B(&push_args);
2771
2772 __ Bind(&runtime_call);
2773 {
2774 // Call the builtin for the result of the spread.
2775 FrameScope scope(masm, StackFrame::INTERNAL);
2776 __ SmiTag(argc);
2777 __ Push(constructor, new_target, argc, spread);
2778 __ CallRuntime(Runtime::kSpreadIterableFixed);
2779 __ Mov(spread, x0);
2780 __ Pop(argc, new_target, constructor);
2781 __ SmiUntag(argc);
2782 }
2783
2784 {
2785 // Calculate the new nargs including the result of the spread.
2786 __ Ldrsw(spread_len,
2787 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2788
2789 __ Bind(&push_args);
2790 // argc += spread_len - 1. Subtract 1 for the spread itself.
2791 __ Add(argc, argc, spread_len);
2792 __ Sub(argc, argc, 1);
2793
2794 // Pop the spread argument off the stack.
2795 __ Pop(scratch);
2796 }
2797
2798 // Check for stack overflow.
2799 {
2800 // Check the stack for overflow. We are not trying to catch interruptions
2801 // (i.e. debug break and preemption) here, so check the "real stack limit".
2802 Label done;
2803 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2804 // Make scratch the space we have left. The stack might already be
2805 // overflowed here which will cause scratch to become negative.
2806 __ Sub(scratch, masm->StackPointer(), scratch);
2807 // Check if the arguments will overflow the stack.
2808 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
2809 __ B(gt, &done); // Signed comparison.
2810 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2811 __ Bind(&done);
2812 }
2813
2814 // Put the evaluated spread onto the stack as additional arguments.
2815 {
2816 __ Mov(scratch, 0);
2817 Label done, loop;
2818 __ Bind(&loop);
2819 __ Cmp(scratch, spread_len);
2820 __ B(eq, &done);
2821 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
2822 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2823 __ Push(scratch2);
2824 __ Add(scratch, scratch, Operand(1));
2825 __ B(&loop);
2826 __ Bind(&done);
2827 }
2828 }
2829
2830 // static
2831 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2832 // ----------- S t a t e -------------
2833 // -- x0 : the number of arguments (not including the receiver)
2834 // -- x1 : the constructor to call (can be any Object)
2835 // -----------------------------------
2836
2837 // CheckSpreadAndPushToStack will push r3 to save it.
2838 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex);
2839 CheckSpreadAndPushToStack(masm);
2840 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2841 TailCallMode::kDisallow),
2842 RelocInfo::CODE_TARGET);
2843 }
2844
2699 // static 2845 // static
2700 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2846 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2701 // ----------- S t a t e ------------- 2847 // ----------- S t a t e -------------
2702 // -- x0 : the number of arguments (not including the receiver) 2848 // -- x0 : the number of arguments (not including the receiver)
2703 // -- x1 : the constructor to call (checked to be a JSFunction) 2849 // -- x1 : the constructor to call (checked to be a JSFunction)
2704 // -- x3 : the new target (checked to be a constructor) 2850 // -- x3 : the new target (checked to be a constructor)
2705 // ----------------------------------- 2851 // -----------------------------------
2706 __ AssertFunction(x1); 2852 __ AssertFunction(x1);
2707 2853
2708 // Calling convention for function specific ConstructStubs require 2854 // Calling convention for function specific ConstructStubs require
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
2818 2964
2819 // static 2965 // static
2820 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 2966 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2821 // ----------- S t a t e ------------- 2967 // ----------- S t a t e -------------
2822 // -- x0 : the number of arguments (not including the receiver) 2968 // -- x0 : the number of arguments (not including the receiver)
2823 // -- x1 : the constructor to call (can be any Object) 2969 // -- x1 : the constructor to call (can be any Object)
2824 // -- x3 : the new target (either the same as the constructor or 2970 // -- x3 : the new target (either the same as the constructor or
2825 // the JSFunction on which new was invoked initially) 2971 // the JSFunction on which new was invoked initially)
2826 // ----------------------------------- 2972 // -----------------------------------
2827 2973
2828 Register argc = x0; 2974 CheckSpreadAndPushToStack(masm);
2829 Register constructor = x1;
2830 Register new_target = x3;
2831
2832 Register scratch = x2;
2833 Register scratch2 = x6;
2834
2835 Register spread = x4;
2836 Register spread_map = x5;
2837 __ Peek(spread, 0);
2838 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2839
2840 Label runtime_call, push_args;
2841 // Check that the spread is an array.
2842 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2843 __ B(ne, &runtime_call);
2844
2845 // Check that we have the original ArrayPrototype.
2846 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2847 __ Ldr(scratch2, NativeContextMemOperand());
2848 __ Ldr(scratch2,
2849 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2850 __ Cmp(scratch, scratch2);
2851 __ B(ne, &runtime_call);
2852
2853 // Check that the ArrayPrototype hasn't been modified in a way that would
2854 // affect iteration.
2855 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2856 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2857 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2858 __ B(ne, &runtime_call);
2859
2860 // Check that the map of the initial array iterator hasn't changed.
2861 __ Ldr(scratch2, NativeContextMemOperand());
2862 __ Ldr(scratch,
2863 ContextMemOperand(scratch2,
2864 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2865 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2866 __ Ldr(scratch2,
2867 ContextMemOperand(
2868 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2869 __ Cmp(scratch, scratch2);
2870 __ B(ne, &runtime_call);
2871
2872 // For FastPacked kinds, iteration will have the same effect as simply
2873 // accessing each property in order.
2874 Label no_protector_check;
2875 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2876 __ DecodeField<Map::ElementsKindBits>(scratch);
2877 __ Cmp(scratch, FAST_HOLEY_ELEMENTS);
2878 __ B(hi, &runtime_call);
2879 // For non-FastHoley kinds, we can skip the protector check.
2880 __ Cmp(scratch, FAST_SMI_ELEMENTS);
2881 __ B(eq, &no_protector_check);
2882 __ Cmp(scratch, FAST_ELEMENTS);
2883 __ B(eq, &no_protector_check);
2884 // Check the ArrayProtector cell.
2885 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2886 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2887 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2888 __ B(ne, &runtime_call);
2889
2890 __ Bind(&no_protector_check);
2891 // Load the FixedArray backing store.
2892 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2893 __ B(&push_args);
2894
2895 __ Bind(&runtime_call);
2896 {
2897 // Call the builtin for the result of the spread.
2898 FrameScope scope(masm, StackFrame::INTERNAL);
2899 __ SmiTag(argc);
2900 __ Push(constructor, new_target, argc, spread);
2901 __ CallRuntime(Runtime::kSpreadIterableFixed);
2902 __ Mov(spread, x0);
2903 __ Pop(argc, new_target, constructor);
2904 __ SmiUntag(argc);
2905 }
2906
2907 Register spread_len = x5;
2908 __ Bind(&push_args);
2909 {
2910 // Pop the spread argument off the stack.
2911 __ Pop(scratch);
2912 // Calculate the new nargs including the result of the spread.
2913 __ Ldrsw(spread_len,
2914 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2915 // argc += spread_len - 1. Subtract 1 for the spread itself.
2916 __ Add(argc, argc, spread_len);
2917 __ Sub(argc, argc, 1);
2918 }
2919
2920 // Check for stack overflow.
2921 {
2922 // Check the stack for overflow. We are not trying to catch interruptions
2923 // (i.e. debug break and preemption) here, so check the "real stack limit".
2924 Label done;
2925 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2926 // Make scratch the space we have left. The stack might already be
2927 // overflowed here which will cause scratch to become negative.
2928 __ Sub(scratch, masm->StackPointer(), scratch);
2929 // Check if the arguments will overflow the stack.
2930 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
2931 __ B(gt, &done); // Signed comparison.
2932 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2933 __ Bind(&done);
2934 }
2935
2936 // Put the evaluated spread onto the stack as additional arguments.
2937 {
2938 __ Mov(scratch, 0);
2939 Label done, loop;
2940 __ Bind(&loop);
2941 __ Cmp(scratch, spread_len);
2942 __ B(eq, &done);
2943 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
2944 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2945 __ Push(scratch2);
2946 __ Add(scratch, scratch, Operand(1));
2947 __ B(&loop);
2948 __ Bind(&done);
2949 }
2950
2951 // Dispatch.
2952 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2975 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2953 } 2976 }
2954 2977
2955 // static 2978 // static
2956 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2979 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2957 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace"); 2980 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
2958 // ----------- S t a t e ------------- 2981 // ----------- S t a t e -------------
2959 // -- x1 : requested object size (untagged) 2982 // -- x1 : requested object size (untagged)
2960 // -- lr : return address 2983 // -- lr : return address
2961 // ----------------------------------- 2984 // -----------------------------------
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
3136 __ Unreachable(); 3159 __ Unreachable();
3137 } 3160 }
3138 } 3161 }
3139 3162
3140 #undef __ 3163 #undef __
3141 3164
3142 } // namespace internal 3165 } // namespace internal
3143 } // namespace v8 3166 } // namespace v8
3144 3167
3145 #endif // V8_TARGET_ARCH_ARM 3168 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698