Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1029)

Side by Side Diff: src/builtins/x64/builtins-x64.cc

Issue 2571563004: [Turbofan] Implement super calls with spread bytecode in assembly code. (Closed)
Patch Set: Change arm64 loop to be similar to the rest Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 895 matching lines...) Expand 10 before | Expand all | Expand 10 after
906 906
907 // Throw stack overflow exception. 907 // Throw stack overflow exception.
908 __ bind(&stack_overflow); 908 __ bind(&stack_overflow);
909 { 909 {
910 __ TailCallRuntime(Runtime::kThrowStackOverflow); 910 __ TailCallRuntime(Runtime::kThrowStackOverflow);
911 // This should be unreachable. 911 // This should be unreachable.
912 __ int3(); 912 __ int3();
913 } 913 }
914 } 914 }
915 915
916 // static
917 void Builtins::Generate_InterpreterPushArgsAndConstructWithSpread(
rmcilroy 2017/01/11 15:24:44 High level question - this seems to be functionall
petermarshall 2017/01/11 16:50:03 Yes good point they are identical. I implemented t
rmcilroy 2017/01/12 10:40:38 Right, I wasn't suggesting to modifiy CallableType
petermarshall 2017/01/16 16:06:06 Ah I misread it a bit. How does this look now? Can
918 MacroAssembler* masm) {
919 // ----------- S t a t e -------------
920 // -- rax : the number of arguments (not including the receiver)
921 // -- rdx : the new target (either the same as the constructor or
922 // the JSFunction on which new was invoked initially)
923 // -- rdi : the constructor to call (can be any Object)
924 // -- rbx : the allocation site feedback (always null currently)
925 // -- rcx : the address of the first argument to be pushed. Subsequent
926 // arguments should be consecutive above this, in the same order as
927 // they are to be pushed onto the stack.
928 // -----------------------------------
929 Label stack_overflow;
930
931 // Add a stack check before pushing arguments.
932 Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow);
933
934 // Pop return address to allow tail-call after pushing arguments.
935 __ PopReturnAddressTo(kScratchRegister);
936
937 // Push slot for the receiver to be constructed.
938 __ Push(Immediate(0));
939
940 // rcx and r8 will be modified.
941 Generate_InterpreterPushArgs(masm, rax, rcx, r8);
942
943 // Push return address in preparation for the tail-call.
944 __ PushReturnAddressFrom(kScratchRegister);
945
946 __ AssertUndefinedOrAllocationSite(rbx);
947 // Call the constructor (rax, rdx, rdi passed on).
948 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
949 RelocInfo::CODE_TARGET);
950
951 // Throw stack overflow exception.
952 __ bind(&stack_overflow);
953 {
954 __ TailCallRuntime(Runtime::kThrowStackOverflow);
955 // This should be unreachable.
956 __ int3();
957 }
958 }
959
916 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { 960 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
917 // Set the return address to the correct point in the interpreter entry 961 // Set the return address to the correct point in the interpreter entry
918 // trampoline. 962 // trampoline.
919 Smi* interpreter_entry_return_pc_offset( 963 Smi* interpreter_entry_return_pc_offset(
920 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); 964 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
921 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero); 965 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
922 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline()); 966 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
923 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() + 967 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
924 Code::kHeaderSize - kHeapObjectTag)); 968 Code::kHeaderSize - kHeapObjectTag));
925 __ Push(rbx); 969 __ Push(rbx);
(...skipping 1948 matching lines...) Expand 10 before | Expand all | Expand 10 after
2874 RelocInfo::CODE_TARGET); 2918 RelocInfo::CODE_TARGET);
2875 } 2919 }
2876 2920
2877 // Called Construct on an Object that doesn't have a [[Construct]] internal 2921 // Called Construct on an Object that doesn't have a [[Construct]] internal
2878 // method. 2922 // method.
2879 __ bind(&non_constructor); 2923 __ bind(&non_constructor);
2880 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 2924 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2881 RelocInfo::CODE_TARGET); 2925 RelocInfo::CODE_TARGET);
2882 } 2926 }
2883 2927
2928 // static
2929 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2930 // ----------- S t a t e -------------
2931 // -- rax : the number of arguments (not including the receiver)
2932 // -- rdx : the new target (either the same as the constructor or
2933 // the JSFunction on which new was invoked initially)
2934 // -- rdi : the constructor to call (can be any Object)
2935 // -----------------------------------
2936
2937 // Load the spread argument into rbx.
2938 __ movp(rbx, Operand(rsp, kPointerSize));
2939 // Load the map of the spread into r15.
2940 __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset));
2941 // Load native context into r14.
2942 __ movp(r14, NativeContextOperand());
2943
2944 Label runtime_call, push_args;
2945 // Check that the spread is an array.
2946 __ CmpInstanceType(r15, JS_ARRAY_TYPE);
2947 __ j(not_equal, &runtime_call);
2948
2949 // Check that we have the original ArrayPrototype.
2950 __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset));
2951 __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2952 __ j(not_equal, &runtime_call);
2953
2954 // Check that the ArrayPrototype hasn't been modified in a way that would
2955 // affect iteration.
2956 __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex);
2957 __ Cmp(FieldOperand(rcx, Cell::kValueOffset),
2958 Smi::FromInt(Isolate::kProtectorValid));
2959 __ j(not_equal, &runtime_call);
2960
2961 // Check that the map of the initial array iterator hasn't changed.
2962 __ movp(rcx,
2963 ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2964 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2965 __ cmpp(rcx, ContextOperand(
2966 r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2967 __ j(not_equal, &runtime_call);
2968
2969 // For FastPacked kinds, iteration will have the same effect as simply
2970 // accessing each property in order.
2971 Label no_protector_check;
2972 __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset));
2973 __ DecodeField<Map::ElementsKindBits>(rcx);
2974 __ cmpp(rcx, Immediate(LAST_FAST_ELEMENTS_KIND));
2975 __ j(above, &runtime_call);
2976 // For non-FastHoley kinds, we can skip the protector check.
2977 __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS));
2978 __ j(equal, &no_protector_check);
2979 __ cmpp(rcx, Immediate(FAST_ELEMENTS));
2980 __ j(equal, &no_protector_check);
2981 __ cmpp(rcx, Immediate(FAST_DOUBLE_ELEMENTS));
2982 __ j(equal, &no_protector_check);
2983 // Check the ArrayProtector cell.
2984 __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex);
2985 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2986 Smi::FromInt(Isolate::kProtectorValid));
2987 __ j(not_equal, &runtime_call);
2988
2989 __ bind(&no_protector_check);
2990 // Load the FixedArray backing store.
2991 __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset));
2992 __ jmp(&push_args);
2993
2994 __ bind(&runtime_call);
2995 {
2996 // Call the builtin for the result of the spread.
2997 FrameScope scope(masm, StackFrame::INTERNAL);
2998 __ Push(rdi); // target
2999 __ Push(rdx); // new target
3000 __ Integer32ToSmi(rax, rax);
3001 __ Push(rax); // nargs
3002 __ Push(rbx);
3003 __ CallRuntime(Runtime::kSpreadIterableFixed);
3004 __ movp(rbx, rax);
3005 __ Pop(rax); // nargs
3006 __ SmiToInteger32(rax, rax);
3007 __ Pop(rdx); // new target
3008 __ Pop(rdi); // target
3009 }
3010
3011 __ bind(&push_args);
3012 {
3013 // Pop the return address and spread argument.
3014 __ PopReturnAddressTo(r8);
3015 __ Pop(rcx);
3016
3017 // Calculate the new nargs including the result of the spread.
3018 __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset));
3019 // rax += r9 - 1. Subtract 1 for the spread itself.
3020 __ leap(rax, Operand(rax, r9, times_1, -1));
3021 }
3022
3023 // Check for stack overflow.
3024 {
3025 // Check the stack for overflow. We are not trying to catch interruptions
3026 // (i.e. debug break and preemption) here, so check the "real stack limit".
3027 Label done;
3028 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
3029 __ movp(rcx, rsp);
3030 // Make rcx the space we have left. The stack might already be overflowed
3031 // here which will cause rcx to become negative.
3032 __ subp(rcx, kScratchRegister);
3033 __ sarp(rcx, Immediate(kPointerSizeLog2));
3034 // Check if the arguments will overflow the stack.
3035 __ cmpp(rcx, r9);
3036 __ j(greater, &done, Label::kNear); // Signed comparison.
3037 __ TailCallRuntime(Runtime::kThrowStackOverflow);
3038 __ bind(&done);
3039 }
3040
3041 // Put the evaluated spread onto the stack as additional arguments.
3042 {
3043 __ Set(rcx, 0);
3044 Label done, loop;
3045 __ bind(&loop);
3046 __ cmpl(rcx, r9);
3047 __ j(equal, &done, Label::kNear);
3048 __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size,
3049 FixedArray::kHeaderSize));
3050 __ Push(kScratchRegister);
3051 __ incl(rcx);
3052 __ jmp(&loop);
3053 __ bind(&done);
3054 __ PushReturnAddressFrom(r8);
3055 }
3056 // Dispatch.
3057 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3058 }
3059
2884 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 3060 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2885 Register function_template_info, 3061 Register function_template_info,
2886 Register scratch0, Register scratch1, 3062 Register scratch0, Register scratch1,
2887 Register scratch2, 3063 Register scratch2,
2888 Label* receiver_check_failed) { 3064 Label* receiver_check_failed) {
2889 Register signature = scratch0; 3065 Register signature = scratch0;
2890 Register map = scratch1; 3066 Register map = scratch1;
2891 Register constructor = scratch2; 3067 Register constructor = scratch2;
2892 3068
2893 // If there is no signature, return the holder. 3069 // If there is no signature, return the holder.
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
3046 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { 3222 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3047 Generate_OnStackReplacementHelper(masm, true); 3223 Generate_OnStackReplacementHelper(masm, true);
3048 } 3224 }
3049 3225
3050 #undef __ 3226 #undef __
3051 3227
3052 } // namespace internal 3228 } // namespace internal
3053 } // namespace v8 3229 } // namespace v8
3054 3230
3055 #endif // V8_TARGET_ARCH_X64 3231 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698