Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(75)

Side by Side Diff: src/builtins/arm64/builtins-arm64.cc

Issue 2571563004: [Turbofan] Implement super calls with spread bytecode in assembly code. (Closed)
Patch Set: Change arm64 loop to be similar to the rest Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/debug/debug.h" 9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h" 10 #include "src/deoptimizer.h"
(...skipping 1251 matching lines...) Expand 10 before | Expand all | Expand 10 after
1262 ArrayConstructorStub stub(masm->isolate()); 1262 ArrayConstructorStub stub(masm->isolate());
1263 __ TailCallStub(&stub); 1263 __ TailCallStub(&stub);
1264 1264
1265 __ bind(&stack_overflow); 1265 __ bind(&stack_overflow);
1266 { 1266 {
1267 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1267 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1268 __ Unreachable(); 1268 __ Unreachable();
1269 } 1269 }
1270 } 1270 }
1271 1271
1272 // static
1273 void Builtins::Generate_InterpreterPushArgsAndConstructWithSpread(
1274 MacroAssembler* masm) {
1275 // ----------- S t a t e -------------
1276 // -- x0 : argument count (not including receiver)
1277 // -- x1 : constructor to call
1278 // -- x2 : allocation site feedback if available, undefined otherwise
1279 // -- x3 : new target
1280 // -- x4 : address of the first argument
1281 // -----------------------------------
1282 Label stack_overflow;
1283
1284 // Push a slot for the receiver.
1285 __ Push(xzr);
1286
1287 // Push the arguments. x5, x4, x6, x7 will be modified.
1288 Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
1289
1290 __ AssertUndefinedOrAllocationSite(x2, x6);
1291 // Call the constructor with x0, x1, and x3 unmodified.
1292 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1293 RelocInfo::CODE_TARGET);
1294
1295 __ bind(&stack_overflow);
1296 {
1297 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1298 __ Unreachable();
1299 }
1300 }
1301
1272 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { 1302 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1273 // Set the return address to the correct point in the interpreter entry 1303 // Set the return address to the correct point in the interpreter entry
1274 // trampoline. 1304 // trampoline.
1275 Smi* interpreter_entry_return_pc_offset( 1305 Smi* interpreter_entry_return_pc_offset(
1276 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); 1306 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1277 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero); 1307 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1278 __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline()); 1308 __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1279 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() + 1309 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
1280 Code::kHeaderSize - kHeapObjectTag)); 1310 Code::kHeaderSize - kHeapObjectTag));
1281 1311
(...skipping 1526 matching lines...) Expand 10 before | Expand all | Expand 10 after
2808 } 2838 }
2809 2839
2810 // Called Construct on an Object that doesn't have a [[Construct]] internal 2840 // Called Construct on an Object that doesn't have a [[Construct]] internal
2811 // method. 2841 // method.
2812 __ bind(&non_constructor); 2842 __ bind(&non_constructor);
2813 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 2843 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2814 RelocInfo::CODE_TARGET); 2844 RelocInfo::CODE_TARGET);
2815 } 2845 }
2816 2846
2817 // static 2847 // static
2848 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2849 // ----------- S t a t e -------------
2850 // -- x0 : the number of arguments (not including the receiver)
2851 // -- x1 : the constructor to call (can be any Object)
2852 // -- x3 : the new target (either the same as the constructor or
2853 // the JSFunction on which new was invoked initially)
2854 // -----------------------------------
2855
2856 Register argc = x0;
2857 Register constructor = x1;
2858 Register new_target = x3;
2859
2860 Register scratch = x2;
2861 Register scratch2 = x6;
2862
2863 Register spread = x4;
2864 Register spread_map = x5;
2865 __ Peek(spread, 0);
2866 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2867
2868 Label runtime_call, push_args;
2869 // Check that the spread is an array.
2870 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2871 __ B(ne, &runtime_call);
2872
2873 // Check that we have the original ArrayPrototype.
2874 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2875 __ Ldr(scratch2, NativeContextMemOperand());
2876 __ Ldr(scratch2,
2877 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2878 __ Cmp(scratch, scratch2);
2879 __ B(ne, &runtime_call);
2880
2881 // Check that the ArrayPrototype hasn't been modified in a way that would
2882 // affect iteration.
2883 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2884 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2885 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2886 __ B(ne, &runtime_call);
2887
2888 // Check that the map of the initial array iterator hasn't changed.
2889 __ Ldr(scratch2, NativeContextMemOperand());
2890 __ Ldr(scratch,
2891 ContextMemOperand(scratch2,
2892 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2893 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2894 __ Ldr(scratch2,
2895 ContextMemOperand(
2896 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2897 __ Cmp(scratch, scratch2);
2898 __ B(ne, &runtime_call);
2899
2900 // For FastPacked kinds, iteration will have the same effect as simply
2901 // accessing each property in order.
2902 Label no_protector_check;
2903 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2904 __ DecodeField<Map::ElementsKindBits>(scratch);
2905 __ Cmp(scratch, LAST_FAST_ELEMENTS_KIND);
2906 __ B(hi, &runtime_call);
2907 // For non-FastHoley kinds, we can skip the protector check.
2908 __ Cmp(scratch, FAST_SMI_ELEMENTS);
2909 __ B(eq, &no_protector_check);
2910 __ Cmp(scratch, FAST_ELEMENTS);
2911 __ B(eq, &no_protector_check);
2912 __ Cmp(scratch, FAST_DOUBLE_ELEMENTS);
2913 __ B(eq, &no_protector_check);
2914 // Check the ArrayProtector cell.
2915 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2916 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2917 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2918 __ B(ne, &runtime_call);
2919
2920 __ Bind(&no_protector_check);
2921 // Load the FixedArray backing store.
2922 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2923 __ B(&push_args);
2924
2925 __ Bind(&runtime_call);
2926 {
2927 // Call the builtin for the result of the spread.
2928 FrameScope scope(masm, StackFrame::INTERNAL);
2929 __ SmiTag(argc);
2930 __ Push(constructor, new_target, argc, spread);
2931 __ CallRuntime(Runtime::kSpreadIterableFixed);
2932 __ Mov(spread, x0);
2933 __ Pop(argc, new_target, constructor);
2934 __ SmiUntag(argc);
2935 }
2936
2937 Register spread_len = x5;
2938 __ Bind(&push_args);
2939 {
2940 // Pop the spread argument off the stack.
2941 __ Pop(scratch);
2942 // Calculate the new nargs including the result of the spread.
2943 __ Ldrsw(spread_len,
2944 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2945 // argc += spread_len - 1. Subtract 1 for the spread itself.
2946 __ Add(argc, argc, spread_len);
2947 __ Sub(argc, argc, 1);
2948 }
2949
2950 // Check for stack overflow.
2951 {
2952 // Check the stack for overflow. We are not trying to catch interruptions
2953 // (i.e. debug break and preemption) here, so check the "real stack limit".
2954 Label done;
2955 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2956 // Make scratch the space we have left. The stack might already be
2957 // overflowed here which will cause scratch to become negative.
2958 __ Sub(scratch, masm->StackPointer(), scratch);
2959 // Check if the arguments will overflow the stack.
2960 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
2961 __ B(gt, &done); // Signed comparison.
2962 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2963 __ Bind(&done);
2964 }
2965
2966 // Put the evaluated spread onto the stack as additional arguments.
2967 {
2968 __ Mov(scratch, 0);
2969 Label done, loop;
2970 __ Bind(&loop);
2971 __ Cmp(scratch, spread_len);
2972 __ B(eq, &done);
2973 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
2974 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2975 __ Push(scratch2);
2976 __ Add(scratch, scratch, Operand(1));
2977 __ B(&loop);
2978 __ Bind(&done);
2979 }
2980
2981 // Dispatch.
2982 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2983 }
2984
2985 // static
2818 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2986 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2819 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace"); 2987 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
2820 // ----------- S t a t e ------------- 2988 // ----------- S t a t e -------------
2821 // -- x1 : requested object size (untagged) 2989 // -- x1 : requested object size (untagged)
2822 // -- lr : return address 2990 // -- lr : return address
2823 // ----------------------------------- 2991 // -----------------------------------
2824 __ SmiTag(x1); 2992 __ SmiTag(x1);
2825 __ Push(x1); 2993 __ Push(x1);
2826 __ Move(cp, Smi::kZero); 2994 __ Move(cp, Smi::kZero);
2827 __ TailCallRuntime(Runtime::kAllocateInNewSpace); 2995 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2998 __ Unreachable(); 3166 __ Unreachable();
2999 } 3167 }
3000 } 3168 }
3001 3169
3002 #undef __ 3170 #undef __
3003 3171
3004 } // namespace internal 3172 } // namespace internal
3005 } // namespace v8 3173 } // namespace v8
3006 3174
3007 #endif // V8_TARGET_ARCH_ARM 3175 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698