Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(419)

Side by Side Diff: src/builtins/arm64/builtins-arm64.cc

Issue 2571563004: [Turbofan] Implement super calls with spread bytecode in assembly code. (Closed)
Patch Set: Update builtins for new push args modes Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/debug/debug.h" 9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h" 10 #include "src/deoptimizer.h"
(...skipping 1183 matching lines...) Expand 10 before | Expand all | Expand 10 after
1194 1194
1195 __ bind(&stack_overflow); 1195 __ bind(&stack_overflow);
1196 { 1196 {
1197 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1197 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1198 __ Unreachable(); 1198 __ Unreachable();
1199 } 1199 }
1200 } 1200 }
1201 1201
1202 // static 1202 // static
1203 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 1203 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1204 MacroAssembler* masm, CallableType construct_type) { 1204 MacroAssembler* masm, PushArgsConstructMode mode) {
1205 // ----------- S t a t e ------------- 1205 // ----------- S t a t e -------------
1206 // -- x0 : argument count (not including receiver) 1206 // -- x0 : argument count (not including receiver)
1207 // -- x3 : new target 1207 // -- x3 : new target
1208 // -- x1 : constructor to call 1208 // -- x1 : constructor to call
1209 // -- x2 : allocation site feedback if available, undefined otherwise 1209 // -- x2 : allocation site feedback if available, undefined otherwise
1210 // -- x4 : address of the first argument 1210 // -- x4 : address of the first argument
1211 // ----------------------------------- 1211 // -----------------------------------
1212 Label stack_overflow; 1212 Label stack_overflow;
1213 1213
1214 // Push a slot for the receiver. 1214 // Push a slot for the receiver.
1215 __ Push(xzr); 1215 __ Push(xzr);
1216 1216
1217 // Push the arguments. x5, x4, x6, x7 will be modified. 1217 // Push the arguments. x5, x4, x6, x7 will be modified.
1218 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow); 1218 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
1219 1219
1220 __ AssertUndefinedOrAllocationSite(x2, x6); 1220 __ AssertUndefinedOrAllocationSite(x2, x6);
1221 if (construct_type == CallableType::kJSFunction) { 1221 if (mode == PushArgsConstructMode::kJSFunction) {
1222 __ AssertFunction(x1); 1222 __ AssertFunction(x1);
1223 1223
1224 // Tail call to the function-specific construct stub (still in the caller 1224 // Tail call to the function-specific construct stub (still in the caller
1225 // context at this point). 1225 // context at this point).
1226 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 1226 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1227 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); 1227 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1228 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); 1228 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1229 __ Br(x4); 1229 __ Br(x4);
1230 } else if (mode == PushArgsConstructMode::kWithFinalSpread) {
1231 // Call the constructor with x0, x1, and x3 unmodified.
1232 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1233 RelocInfo::CODE_TARGET);
1230 } else { 1234 } else {
1231 DCHECK_EQ(construct_type, CallableType::kAny); 1235 DCHECK_EQ(PushArgsConstructMode::kOther, mode);
1232 // Call the constructor with x0, x1, and x3 unmodified. 1236 // Call the constructor with x0, x1, and x3 unmodified.
1233 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1237 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1234 } 1238 }
1235 1239
1236 __ bind(&stack_overflow); 1240 __ bind(&stack_overflow);
1237 { 1241 {
1238 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1242 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1239 __ Unreachable(); 1243 __ Unreachable();
1240 } 1244 }
1241 } 1245 }
(...skipping 1566 matching lines...) Expand 10 before | Expand all | Expand 10 after
2808 } 2812 }
2809 2813
2810 // Called Construct on an Object that doesn't have a [[Construct]] internal 2814 // Called Construct on an Object that doesn't have a [[Construct]] internal
2811 // method. 2815 // method.
2812 __ bind(&non_constructor); 2816 __ bind(&non_constructor);
2813 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 2817 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2814 RelocInfo::CODE_TARGET); 2818 RelocInfo::CODE_TARGET);
2815 } 2819 }
2816 2820
2817 // static 2821 // static
2822 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2823 // ----------- S t a t e -------------
2824 // -- x0 : the number of arguments (not including the receiver)
2825 // -- x1 : the constructor to call (can be any Object)
2826 // -- x3 : the new target (either the same as the constructor or
2827 // the JSFunction on which new was invoked initially)
2828 // -----------------------------------
2829
2830 Register argc = x0;
2831 Register constructor = x1;
2832 Register new_target = x3;
2833
2834 Register scratch = x2;
2835 Register scratch2 = x6;
2836
2837 Register spread = x4;
2838 Register spread_map = x5;
2839 __ Peek(spread, 0);
2840 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2841
2842 Label runtime_call, push_args;
2843 // Check that the spread is an array.
2844 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2845 __ B(ne, &runtime_call);
2846
2847 // Check that we have the original ArrayPrototype.
2848 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2849 __ Ldr(scratch2, NativeContextMemOperand());
2850 __ Ldr(scratch2,
2851 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2852 __ Cmp(scratch, scratch2);
2853 __ B(ne, &runtime_call);
2854
2855 // Check that the ArrayPrototype hasn't been modified in a way that would
2856 // affect iteration.
2857 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2858 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2859 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2860 __ B(ne, &runtime_call);
2861
2862 // Check that the map of the initial array iterator hasn't changed.
2863 __ Ldr(scratch2, NativeContextMemOperand());
2864 __ Ldr(scratch,
2865 ContextMemOperand(scratch2,
2866 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2867 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2868 __ Ldr(scratch2,
2869 ContextMemOperand(
2870 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2871 __ Cmp(scratch, scratch2);
2872 __ B(ne, &runtime_call);
2873
2874 // For FastPacked kinds, iteration will have the same effect as simply
2875 // accessing each property in order.
2876 Label no_protector_check;
2877 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2878 __ DecodeField<Map::ElementsKindBits>(scratch);
2879 __ Cmp(scratch, LAST_FAST_ELEMENTS_KIND);
2880 __ B(hi, &runtime_call);
2881 // For non-FastHoley kinds, we can skip the protector check.
2882 __ Cmp(scratch, FAST_SMI_ELEMENTS);
2883 __ B(eq, &no_protector_check);
2884 __ Cmp(scratch, FAST_ELEMENTS);
2885 __ B(eq, &no_protector_check);
2886 __ Cmp(scratch, FAST_DOUBLE_ELEMENTS);
2887 __ B(eq, &no_protector_check);
2888 // Check the ArrayProtector cell.
2889 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2890 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2891 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2892 __ B(ne, &runtime_call);
2893
2894 __ Bind(&no_protector_check);
2895 // Load the FixedArray backing store.
2896 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2897 __ B(&push_args);
2898
2899 __ Bind(&runtime_call);
2900 {
2901 // Call the builtin for the result of the spread.
2902 FrameScope scope(masm, StackFrame::INTERNAL);
2903 __ SmiTag(argc);
2904 __ Push(constructor, new_target, argc, spread);
2905 __ CallRuntime(Runtime::kSpreadIterableFixed);
2906 __ Mov(spread, x0);
2907 __ Pop(argc, new_target, constructor);
2908 __ SmiUntag(argc);
2909 }
2910
2911 Register spread_len = x5;
2912 __ Bind(&push_args);
2913 {
2914 // Pop the spread argument off the stack.
2915 __ Pop(scratch);
2916 // Calculate the new nargs including the result of the spread.
2917 __ Ldrsw(spread_len,
2918 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2919 // argc += spread_len - 1. Subtract 1 for the spread itself.
2920 __ Add(argc, argc, spread_len);
2921 __ Sub(argc, argc, 1);
2922 }
2923
2924 // Check for stack overflow.
2925 {
2926 // Check the stack for overflow. We are not trying to catch interruptions
2927 // (i.e. debug break and preemption) here, so check the "real stack limit".
2928 Label done;
2929 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2930 // Make scratch the space we have left. The stack might already be
2931 // overflowed here which will cause scratch to become negative.
2932 __ Sub(scratch, masm->StackPointer(), scratch);
2933 // Check if the arguments will overflow the stack.
2934 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
2935 __ B(gt, &done); // Signed comparison.
2936 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2937 __ Bind(&done);
2938 }
2939
2940 // Put the evaluated spread onto the stack as additional arguments.
2941 {
2942 __ Mov(scratch, 0);
2943 Label done, loop;
2944 __ Bind(&loop);
2945 __ Cmp(scratch, spread_len);
2946 __ B(eq, &done);
2947 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
2948 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2949 __ Push(scratch2);
2950 __ Add(scratch, scratch, Operand(1));
2951 __ B(&loop);
2952 __ Bind(&done);
2953 }
2954
2955 // Dispatch.
2956 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2957 }
2958
2959 // static
2818 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2960 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2819 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace"); 2961 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
2820 // ----------- S t a t e ------------- 2962 // ----------- S t a t e -------------
2821 // -- x1 : requested object size (untagged) 2963 // -- x1 : requested object size (untagged)
2822 // -- lr : return address 2964 // -- lr : return address
2823 // ----------------------------------- 2965 // -----------------------------------
2824 __ SmiTag(x1); 2966 __ SmiTag(x1);
2825 __ Push(x1); 2967 __ Push(x1);
2826 __ Move(cp, Smi::kZero); 2968 __ Move(cp, Smi::kZero);
2827 __ TailCallRuntime(Runtime::kAllocateInNewSpace); 2969 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2998 __ Unreachable(); 3140 __ Unreachable();
2999 } 3141 }
3000 } 3142 }
3001 3143
3002 #undef __ 3144 #undef __
3003 3145
3004 } // namespace internal 3146 } // namespace internal
3005 } // namespace v8 3147 } // namespace v8
3006 3148
3007 #endif // V8_TARGET_ARCH_ARM 3149 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698