Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(335)

Side by Side Diff: src/builtins/x64/builtins-x64.cc

Issue 2649143002: [Turbofan] Implement call with spread bytecode in assembly code. (Closed)
Patch Set: Mips ports Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 746 matching lines...) Expand 10 before | Expand all | Expand 10 after
757 __ bind(&loop_header); 757 __ bind(&loop_header);
758 __ Push(Operand(start_address, 0)); 758 __ Push(Operand(start_address, 0));
759 __ subp(start_address, Immediate(kPointerSize)); 759 __ subp(start_address, Immediate(kPointerSize));
760 __ bind(&loop_check); 760 __ bind(&loop_check);
761 __ cmpp(start_address, scratch); 761 __ cmpp(start_address, scratch);
762 __ j(greater, &loop_header, Label::kNear); 762 __ j(greater, &loop_header, Label::kNear);
763 } 763 }
764 764
765 // static 765 // static
766 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 766 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
767 MacroAssembler* masm, TailCallMode tail_call_mode, 767 MacroAssembler* masm, TailCallMode tail_call_mode, PushArgsMode mode) {
768 CallableType function_type) {
769 // ----------- S t a t e ------------- 768 // ----------- S t a t e -------------
770 // -- rax : the number of arguments (not including the receiver) 769 // -- rax : the number of arguments (not including the receiver)
771 // -- rbx : the address of the first argument to be pushed. Subsequent 770 // -- rbx : the address of the first argument to be pushed. Subsequent
772 // arguments should be consecutive above this, in the same order as 771 // arguments should be consecutive above this, in the same order as
773 // they are to be pushed onto the stack. 772 // they are to be pushed onto the stack.
774 // -- rdi : the target to call (can be any Object). 773 // -- rdi : the target to call (can be any Object).
775 // ----------------------------------- 774 // -----------------------------------
776 Label stack_overflow; 775 Label stack_overflow;
777 776
778 // Number of values to be pushed. 777 // Number of values to be pushed.
779 __ Move(rcx, rax); 778 __ Move(rcx, rax);
780 __ addp(rcx, Immediate(1)); // Add one for receiver. 779 __ addp(rcx, Immediate(1)); // Add one for receiver.
781 780
782 // Add a stack check before pushing arguments. 781 // Add a stack check before pushing arguments.
783 Generate_StackOverflowCheck(masm, rcx, rdx, r8, &stack_overflow); 782 Generate_StackOverflowCheck(masm, rcx, rdx, r8, &stack_overflow);
784 783
785 // Pop return address to allow tail-call after pushing arguments. 784 // Pop return address to allow tail-call after pushing arguments.
786 __ PopReturnAddressTo(kScratchRegister); 785 __ PopReturnAddressTo(kScratchRegister);
787 786
788 // rbx and rdx will be modified. 787 // rbx and rdx will be modified.
789 Generate_InterpreterPushArgs(masm, rcx, rbx, rdx); 788 Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
790 789
791 // Call the target. 790 // Call the target.
792 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address. 791 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
793 792
794 if (function_type == CallableType::kJSFunction) { 793 if (mode == PushArgsMode::kJSFunction) {
795 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 794 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
796 tail_call_mode), 795 tail_call_mode),
797 RelocInfo::CODE_TARGET); 796 RelocInfo::CODE_TARGET);
797 } else if (mode == PushArgsMode::kWithFinalSpread) {
798 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
799 RelocInfo::CODE_TARGET);
798 } else { 800 } else {
799 DCHECK_EQ(function_type, CallableType::kAny);
800 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 801 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
801 tail_call_mode), 802 tail_call_mode),
802 RelocInfo::CODE_TARGET); 803 RelocInfo::CODE_TARGET);
803 } 804 }
804 805
805 // Throw stack overflow exception. 806 // Throw stack overflow exception.
806 __ bind(&stack_overflow); 807 __ bind(&stack_overflow);
807 { 808 {
808 __ TailCallRuntime(Runtime::kThrowStackOverflow); 809 __ TailCallRuntime(Runtime::kThrowStackOverflow);
809 // This should be unreachable. 810 // This should be unreachable.
810 __ int3(); 811 __ int3();
811 } 812 }
812 } 813 }
813 814
814 // static 815 // static
815 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 816 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
816 MacroAssembler* masm, PushArgsConstructMode mode) { 817 MacroAssembler* masm, PushArgsMode mode) {
817 // ----------- S t a t e ------------- 818 // ----------- S t a t e -------------
818 // -- rax : the number of arguments (not including the receiver) 819 // -- rax : the number of arguments (not including the receiver)
819 // -- rdx : the new target (either the same as the constructor or 820 // -- rdx : the new target (either the same as the constructor or
820 // the JSFunction on which new was invoked initially) 821 // the JSFunction on which new was invoked initially)
821 // -- rdi : the constructor to call (can be any Object) 822 // -- rdi : the constructor to call (can be any Object)
822 // -- rbx : the allocation site feedback if available, undefined otherwise 823 // -- rbx : the allocation site feedback if available, undefined otherwise
823 // -- rcx : the address of the first argument to be pushed. Subsequent 824 // -- rcx : the address of the first argument to be pushed. Subsequent
824 // arguments should be consecutive above this, in the same order as 825 // arguments should be consecutive above this, in the same order as
825 // they are to be pushed onto the stack. 826 // they are to be pushed onto the stack.
826 // ----------------------------------- 827 // -----------------------------------
827 Label stack_overflow; 828 Label stack_overflow;
828 829
829 // Add a stack check before pushing arguments. 830 // Add a stack check before pushing arguments.
830 Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow); 831 Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow);
831 832
832 // Pop return address to allow tail-call after pushing arguments. 833 // Pop return address to allow tail-call after pushing arguments.
833 __ PopReturnAddressTo(kScratchRegister); 834 __ PopReturnAddressTo(kScratchRegister);
834 835
835 // Push slot for the receiver to be constructed. 836 // Push slot for the receiver to be constructed.
836 __ Push(Immediate(0)); 837 __ Push(Immediate(0));
837 838
838 // rcx and r8 will be modified. 839 // rcx and r8 will be modified.
839 Generate_InterpreterPushArgs(masm, rax, rcx, r8); 840 Generate_InterpreterPushArgs(masm, rax, rcx, r8);
840 841
841 // Push return address in preparation for the tail-call. 842 // Push return address in preparation for the tail-call.
842 __ PushReturnAddressFrom(kScratchRegister); 843 __ PushReturnAddressFrom(kScratchRegister);
843 844
844 __ AssertUndefinedOrAllocationSite(rbx); 845 __ AssertUndefinedOrAllocationSite(rbx);
845 if (mode == PushArgsConstructMode::kJSFunction) { 846 if (mode == PushArgsMode::kJSFunction) {
846 // Tail call to the function-specific construct stub (still in the caller 847 // Tail call to the function-specific construct stub (still in the caller
847 // context at this point). 848 // context at this point).
848 __ AssertFunction(rdi); 849 __ AssertFunction(rdi);
849 850
850 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 851 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
851 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); 852 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
852 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 853 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
853 // Jump to the constructor function (rax, rbx, rdx passed on). 854 // Jump to the constructor function (rax, rbx, rdx passed on).
854 __ jmp(rcx); 855 __ jmp(rcx);
855 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { 856 } else if (mode == PushArgsMode::kWithFinalSpread) {
856 // Call the constructor (rax, rdx, rdi passed on). 857 // Call the constructor (rax, rdx, rdi passed on).
857 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 858 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
858 RelocInfo::CODE_TARGET); 859 RelocInfo::CODE_TARGET);
859 } else { 860 } else {
860 DCHECK_EQ(PushArgsConstructMode::kOther, mode); 861 DCHECK_EQ(PushArgsMode::kOther, mode);
861 // Call the constructor (rax, rdx, rdi passed on). 862 // Call the constructor (rax, rdx, rdi passed on).
862 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 863 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
863 } 864 }
864 865
865 // Throw stack overflow exception. 866 // Throw stack overflow exception.
866 __ bind(&stack_overflow); 867 __ bind(&stack_overflow);
867 { 868 {
868 __ TailCallRuntime(Runtime::kThrowStackOverflow); 869 __ TailCallRuntime(Runtime::kThrowStackOverflow);
869 // This should be unreachable. 870 // This should be unreachable.
870 __ int3(); 871 __ int3();
(...skipping 1883 matching lines...) Expand 10 before | Expand all | Expand 10 after
2754 2755
2755 // 3. Call to something that is not callable. 2756 // 3. Call to something that is not callable.
2756 __ bind(&non_callable); 2757 __ bind(&non_callable);
2757 { 2758 {
2758 FrameScope scope(masm, StackFrame::INTERNAL); 2759 FrameScope scope(masm, StackFrame::INTERNAL);
2759 __ Push(rdi); 2760 __ Push(rdi);
2760 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2761 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2761 } 2762 }
2762 } 2763 }
2763 2764
2765 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2766 // Load the spread argument into rbx.
2767 __ movp(rbx, Operand(rsp, kPointerSize));
2768 // Load the map of the spread into r15.
2769 __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset));
2770 // Load native context into r14.
2771 __ movp(r14, NativeContextOperand());
2772
2773 Label runtime_call, push_args;
2774 // Check that the spread is an array.
2775 __ CmpInstanceType(r15, JS_ARRAY_TYPE);
2776 __ j(not_equal, &runtime_call);
2777
2778 // Check that we have the original ArrayPrototype.
2779 __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset));
2780 __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2781 __ j(not_equal, &runtime_call);
2782
2783 // Check that the ArrayPrototype hasn't been modified in a way that would
2784 // affect iteration.
2785 __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex);
2786 __ Cmp(FieldOperand(rcx, Cell::kValueOffset),
2787 Smi::FromInt(Isolate::kProtectorValid));
2788 __ j(not_equal, &runtime_call);
2789
2790 // Check that the map of the initial array iterator hasn't changed.
2791 __ movp(rcx,
2792 ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2793 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2794 __ cmpp(rcx, ContextOperand(
2795 r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2796 __ j(not_equal, &runtime_call);
2797
2798 // For FastPacked kinds, iteration will have the same effect as simply
2799 // accessing each property in order.
2800 Label no_protector_check;
2801 __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset));
2802 __ DecodeField<Map::ElementsKindBits>(rcx);
2803 __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS));
2804 __ j(above, &runtime_call);
2805 // For non-FastHoley kinds, we can skip the protector check.
2806 __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS));
2807 __ j(equal, &no_protector_check);
2808 __ cmpp(rcx, Immediate(FAST_ELEMENTS));
2809 __ j(equal, &no_protector_check);
2810 // Check the ArrayProtector cell.
2811 __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex);
2812 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2813 Smi::FromInt(Isolate::kProtectorValid));
2814 __ j(not_equal, &runtime_call);
2815
2816 __ bind(&no_protector_check);
2817 // Load the FixedArray backing store, but use the length from the array.
2818 __ SmiToInteger32(r9, FieldOperand(rbx, JSArray::kLengthOffset));
2819 __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset));
2820 __ jmp(&push_args);
2821
2822 __ bind(&runtime_call);
2823 {
2824 // Call the builtin for the result of the spread.
2825 FrameScope scope(masm, StackFrame::INTERNAL);
2826 __ Push(rdi); // target
2827 __ Push(rdx); // new target
2828 __ Integer32ToSmi(rax, rax);
2829 __ Push(rax); // nargs
2830 __ Push(rbx);
2831 __ CallRuntime(Runtime::kSpreadIterableFixed);
2832 __ movp(rbx, rax);
2833 __ Pop(rax); // nargs
2834 __ SmiToInteger32(rax, rax);
2835 __ Pop(rdx); // new target
2836 __ Pop(rdi); // target
2837 }
2838
2839 {
2840 // Calculate the new nargs including the result of the spread.
2841 __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset));
2842
2843 __ bind(&push_args);
2844 // rax += r9 - 1. Subtract 1 for the spread itself.
2845 __ leap(rax, Operand(rax, r9, times_1, -1));
2846
2847 // Pop the return address and spread argument.
2848 __ PopReturnAddressTo(r8);
2849 __ Pop(rcx);
2850 }
2851
2852 // Check for stack overflow.
2853 {
2854 // Check the stack for overflow. We are not trying to catch interruptions
2855 // (i.e. debug break and preemption) here, so check the "real stack limit".
2856 Label done;
2857 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2858 __ movp(rcx, rsp);
2859 // Make rcx the space we have left. The stack might already be overflowed
2860 // here which will cause rcx to become negative.
2861 __ subp(rcx, kScratchRegister);
2862 __ sarp(rcx, Immediate(kPointerSizeLog2));
2863 // Check if the arguments will overflow the stack.
2864 __ cmpp(rcx, r9);
2865 __ j(greater, &done, Label::kNear); // Signed comparison.
2866 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2867 __ bind(&done);
2868 }
2869
2870 // Put the evaluated spread onto the stack as additional arguments.
2871 {
2872 __ Set(rcx, 0);
2873 Label done, loop;
2874 __ bind(&loop);
2875 __ cmpl(rcx, r9);
2876 __ j(equal, &done, Label::kNear);
2877 __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size,
2878 FixedArray::kHeaderSize));
2879 __ Push(kScratchRegister);
2880 __ incl(rcx);
2881 __ jmp(&loop);
2882 __ bind(&done);
2883 __ PushReturnAddressFrom(r8);
2884 }
2885 }
2886
2887 // static
2888 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2889 // ----------- S t a t e -------------
2890 // -- rax : the number of arguments (not including the receiver)
2891 // -- rdi : the target to call (can be any Object)
2892 // -----------------------------------
2893
2894 // CheckSpreadAndPushToStack will push rdx to save it.
2895 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
2896 CheckSpreadAndPushToStack(masm);
2897 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2898 TailCallMode::kDisallow),
2899 RelocInfo::CODE_TARGET);
2900 }
2901
2764 // static 2902 // static
2765 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2903 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2766 // ----------- S t a t e ------------- 2904 // ----------- S t a t e -------------
2767 // -- rax : the number of arguments (not including the receiver) 2905 // -- rax : the number of arguments (not including the receiver)
2768 // -- rdx : the new target (checked to be a constructor) 2906 // -- rdx : the new target (checked to be a constructor)
2769 // -- rdi : the constructor to call (checked to be a JSFunction) 2907 // -- rdi : the constructor to call (checked to be a JSFunction)
2770 // ----------------------------------- 2908 // -----------------------------------
2771 __ AssertFunction(rdi); 2909 __ AssertFunction(rdi);
2772 2910
2773 // Calling convention for function specific ConstructStubs require 2911 // Calling convention for function specific ConstructStubs require
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
2885 3023
2886 // static 3024 // static
2887 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 3025 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2888 // ----------- S t a t e ------------- 3026 // ----------- S t a t e -------------
2889 // -- rax : the number of arguments (not including the receiver) 3027 // -- rax : the number of arguments (not including the receiver)
2890 // -- rdx : the new target (either the same as the constructor or 3028 // -- rdx : the new target (either the same as the constructor or
2891 // the JSFunction on which new was invoked initially) 3029 // the JSFunction on which new was invoked initially)
2892 // -- rdi : the constructor to call (can be any Object) 3030 // -- rdi : the constructor to call (can be any Object)
2893 // ----------------------------------- 3031 // -----------------------------------
2894 3032
2895 // Load the spread argument into rbx. 3033 CheckSpreadAndPushToStack(masm);
2896 __ movp(rbx, Operand(rsp, kPointerSize));
2897 // Load the map of the spread into r15.
2898 __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset));
2899 // Load native context into r14.
2900 __ movp(r14, NativeContextOperand());
2901
2902 Label runtime_call, push_args;
2903 // Check that the spread is an array.
2904 __ CmpInstanceType(r15, JS_ARRAY_TYPE);
2905 __ j(not_equal, &runtime_call);
2906
2907 // Check that we have the original ArrayPrototype.
2908 __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset));
2909 __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2910 __ j(not_equal, &runtime_call);
2911
2912 // Check that the ArrayPrototype hasn't been modified in a way that would
2913 // affect iteration.
2914 __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex);
2915 __ Cmp(FieldOperand(rcx, Cell::kValueOffset),
2916 Smi::FromInt(Isolate::kProtectorValid));
2917 __ j(not_equal, &runtime_call);
2918
2919 // Check that the map of the initial array iterator hasn't changed.
2920 __ movp(rcx,
2921 ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2922 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2923 __ cmpp(rcx, ContextOperand(
2924 r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2925 __ j(not_equal, &runtime_call);
2926
2927 // For FastPacked kinds, iteration will have the same effect as simply
2928 // accessing each property in order.
2929 Label no_protector_check;
2930 __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset));
2931 __ DecodeField<Map::ElementsKindBits>(rcx);
2932 __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS));
2933 __ j(above, &runtime_call);
2934 // For non-FastHoley kinds, we can skip the protector check.
2935 __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS));
2936 __ j(equal, &no_protector_check);
2937 __ cmpp(rcx, Immediate(FAST_ELEMENTS));
2938 __ j(equal, &no_protector_check);
2939 // Check the ArrayProtector cell.
2940 __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex);
2941 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset),
2942 Smi::FromInt(Isolate::kProtectorValid));
2943 __ j(not_equal, &runtime_call);
2944
2945 __ bind(&no_protector_check);
2946 // Load the FixedArray backing store.
2947 __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset));
2948 __ jmp(&push_args);
2949
2950 __ bind(&runtime_call);
2951 {
2952 // Call the builtin for the result of the spread.
2953 FrameScope scope(masm, StackFrame::INTERNAL);
2954 __ Push(rdi); // target
2955 __ Push(rdx); // new target
2956 __ Integer32ToSmi(rax, rax);
2957 __ Push(rax); // nargs
2958 __ Push(rbx);
2959 __ CallRuntime(Runtime::kSpreadIterableFixed);
2960 __ movp(rbx, rax);
2961 __ Pop(rax); // nargs
2962 __ SmiToInteger32(rax, rax);
2963 __ Pop(rdx); // new target
2964 __ Pop(rdi); // target
2965 }
2966
2967 __ bind(&push_args);
2968 {
2969 // Pop the return address and spread argument.
2970 __ PopReturnAddressTo(r8);
2971 __ Pop(rcx);
2972
2973 // Calculate the new nargs including the result of the spread.
2974 __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset));
2975 // rax += r9 - 1. Subtract 1 for the spread itself.
2976 __ leap(rax, Operand(rax, r9, times_1, -1));
2977 }
2978
2979 // Check for stack overflow.
2980 {
2981 // Check the stack for overflow. We are not trying to catch interruptions
2982 // (i.e. debug break and preemption) here, so check the "real stack limit".
2983 Label done;
2984 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2985 __ movp(rcx, rsp);
2986 // Make rcx the space we have left. The stack might already be overflowed
2987 // here which will cause rcx to become negative.
2988 __ subp(rcx, kScratchRegister);
2989 __ sarp(rcx, Immediate(kPointerSizeLog2));
2990 // Check if the arguments will overflow the stack.
2991 __ cmpp(rcx, r9);
2992 __ j(greater, &done, Label::kNear); // Signed comparison.
2993 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2994 __ bind(&done);
2995 }
2996
2997 // Put the evaluated spread onto the stack as additional arguments.
2998 {
2999 __ Set(rcx, 0);
3000 Label done, loop;
3001 __ bind(&loop);
3002 __ cmpl(rcx, r9);
3003 __ j(equal, &done, Label::kNear);
3004 __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size,
3005 FixedArray::kHeaderSize));
3006 __ Push(kScratchRegister);
3007 __ incl(rcx);
3008 __ jmp(&loop);
3009 __ bind(&done);
3010 __ PushReturnAddressFrom(r8);
3011 }
3012 // Dispatch.
3013 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 3034 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3014 } 3035 }
3015 3036
3016 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 3037 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
3017 Register function_template_info, 3038 Register function_template_info,
3018 Register scratch0, Register scratch1, 3039 Register scratch0, Register scratch1,
3019 Register scratch2, 3040 Register scratch2,
3020 Label* receiver_check_failed) { 3041 Label* receiver_check_failed) {
3021 Register signature = scratch0; 3042 Register signature = scratch0;
3022 Register map = scratch1; 3043 Register map = scratch1;
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
3178 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { 3199 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3179 Generate_OnStackReplacementHelper(masm, true); 3200 Generate_OnStackReplacementHelper(masm, true);
3180 } 3201 }
3181 3202
3182 #undef __ 3203 #undef __
3183 3204
3184 } // namespace internal 3205 } // namespace internal
3185 } // namespace v8 3206 } // namespace v8
3186 3207
3187 #endif // V8_TARGET_ARCH_X64 3208 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698