Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(716)

Side by Side Diff: src/builtins/ppc/builtins-ppc.cc

Issue 2655043004: PPC/s390: [Turbofan] Implement call with spread bytecode in assembly code. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/builtins/s390/builtins-s390.cc » ('j') | src/builtins/s390/builtins-s390.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1166 matching lines...) Expand 10 before | Expand all | Expand 10 after
1177 __ addi(r6, r3, Operand(1)); 1177 __ addi(r6, r3, Operand(1));
1178 1178
1179 // Push the arguments. r5, r6, r7 will be modified. 1179 // Push the arguments. r5, r6, r7 will be modified.
1180 Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow); 1180 Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
1181 1181
1182 // Call the target. 1182 // Call the target.
1183 if (mode == InterpreterPushArgsMode::kJSFunction) { 1183 if (mode == InterpreterPushArgsMode::kJSFunction) {
1184 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 1184 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1185 tail_call_mode), 1185 tail_call_mode),
1186 RelocInfo::CODE_TARGET); 1186 RelocInfo::CODE_TARGET);
1187 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1188 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1189 RelocInfo::CODE_TARGET);
1187 } else { 1190 } else {
1188 DCHECK_EQ(mode, InterpreterPushArgsMode::kOther);
1189 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 1191 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1190 tail_call_mode), 1192 tail_call_mode),
1191 RelocInfo::CODE_TARGET); 1193 RelocInfo::CODE_TARGET);
1192 } 1194 }
1193 1195
1194 __ bind(&stack_overflow); 1196 __ bind(&stack_overflow);
1195 { 1197 {
1196 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1198 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1197 // Unreachable Code. 1199 // Unreachable Code.
1198 __ bkpt(0); 1200 __ bkpt(0);
(...skipping 1482 matching lines...) Expand 10 before | Expand all | Expand 10 after
2681 2683
2682 // 3. Call to something that is not callable. 2684 // 3. Call to something that is not callable.
2683 __ bind(&non_callable); 2685 __ bind(&non_callable);
2684 { 2686 {
2685 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2687 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2686 __ Push(r4); 2688 __ Push(r4);
2687 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2689 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2688 } 2690 }
2689 } 2691 }
2690 2692
2693 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2694 Register argc = r3;
2695 Register constructor = r4;
2696 Register new_target = r6;
2697
2698 Register scratch = r5;
2699 Register scratch2 = r9;
2700
2701 Register spread = r7;
2702 Register spread_map = r8;
2703 Register spread_len = r8;
2704 Label runtime_call, push_args;
2705 __ LoadP(spread, MemOperand(sp, 0));
2706 __ JumpIfSmi(spread, &runtime_call);
2707 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2708
2709 // Check that the spread is an array.
2710 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2711 __ bne(&runtime_call);
2712
2713 // Check that we have the original ArrayPrototype.
2714 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2715 __ LoadP(scratch2, NativeContextMemOperand());
2716 __ LoadP(scratch2,
2717 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2718 __ cmp(scratch, scratch2);
2719 __ bne(&runtime_call);
2720
2721 // Check that the ArrayPrototype hasn't been modified in a way that would
2722 // affect iteration.
2723 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2724 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2725 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2726 __ bne(&runtime_call);
2727
2728 // Check that the map of the initial array iterator hasn't changed.
2729 __ LoadP(scratch2, NativeContextMemOperand());
2730 __ LoadP(scratch,
2731 ContextMemOperand(scratch2,
2732 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2733 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2734 __ LoadP(scratch2,
2735 ContextMemOperand(
2736 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2737 __ cmp(scratch, scratch2);
2738 __ bne(&runtime_call);
2739
2740 // For FastPacked kinds, iteration will have the same effect as simply
2741 // accessing each property in order.
2742 Label no_protector_check;
2743 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2744 __ DecodeField<Map::ElementsKindBits>(scratch);
2745 __ cmpi(scratch, Operand(FAST_HOLEY_ELEMENTS));
2746 __ bgt(&runtime_call);
2747 // For non-FastHoley kinds, we can skip the protector check.
2748 __ cmpi(scratch, Operand(FAST_SMI_ELEMENTS));
2749 __ beq(&no_protector_check);
2750 __ cmpi(scratch, Operand(FAST_ELEMENTS));
2751 __ beq(&no_protector_check);
2752 // Check the ArrayProtector cell.
2753 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2754 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2755 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2756 __ bne(&runtime_call);
2757
2758 __ bind(&no_protector_check);
2759 // Load the FixedArray backing store, but use the length from the array.
2760 __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
2761 __ SmiUntag(spread_len);
2762 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2763 __ b(&push_args);
2764
2765 __ bind(&runtime_call);
2766 {
2767 // Call the builtin for the result of the spread.
2768 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2769 __ SmiTag(argc);
2770 __ Push(constructor, new_target, argc, spread);
2771 __ CallRuntime(Runtime::kSpreadIterableFixed);
2772 __ mr(spread, r3);
2773 __ Pop(constructor, new_target, argc);
2774 __ SmiUntag(argc);
2775 }
2776
2777 {
2778 // Calculate the new nargs including the result of the spread.
2779 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2780 __ SmiUntag(spread_len);
2781
2782 __ bind(&push_args);
2783 // argc += spread_len - 1. Subtract 1 for the spread itself.
2784 __ add(argc, argc, spread_len);
2785 __ subi(argc, argc, Operand(1));
2786
2787 // Pop the spread argument off the stack.
2788 __ Pop(scratch);
2789 }
2790
2791 // Check for stack overflow.
2792 {
2793 // Check the stack for overflow. We are not trying to catch interruptions
2794 // (i.e. debug break and preemption) here, so check the "real stack limit".
2795 Label done;
2796 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2797 // Make scratch the space we have left. The stack might already be
2798 // overflowed here which will cause scratch to become negative.
2799 __ sub(scratch, sp, scratch);
2800 // Check if the arguments will overflow the stack.
2801 __ ShiftLeftImm(r0, spread_len, Operand(kPointerSizeLog2));
2802 __ cmp(scratch, r0);
2803 __ bgt(&done); // Signed comparison.
2804 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2805 __ bind(&done);
2806 }
2807
2808 // Put the evaluated spread onto the stack as additional arguments.
2809 {
2810 __ li(scratch, Operand::Zero());
2811 Label done, loop;
2812 __ bind(&loop);
2813 __ cmp(scratch, spread_len);
2814 __ beq(&done);
2815 __ ShiftLeftImm(r0, scratch, Operand(kPointerSizeLog2));
2816 __ add(scratch2, spread, r0);
2817 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2818 __ Push(scratch2);
2819 __ addi(scratch, scratch, Operand(1));
2820 __ b(&loop);
2821 __ bind(&done);
2822 }
2823 }
2824
2825 // static
2826 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2827 // ----------- S t a t e -------------
2828 // -- r3 : the number of arguments (not including the receiver)
2829 // -- r4 : the constructor to call (can be any Object)
2830 // -----------------------------------
2831
2832 // CheckSpreadAndPushToStack will push r6 to save it.
2833 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2834 CheckSpreadAndPushToStack(masm);
2835 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2836 TailCallMode::kDisallow),
2837 RelocInfo::CODE_TARGET);
2838 }
2839
2691 // static 2840 // static
2692 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2841 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2693 // ----------- S t a t e ------------- 2842 // ----------- S t a t e -------------
2694 // -- r3 : the number of arguments (not including the receiver) 2843 // -- r3 : the number of arguments (not including the receiver)
2695 // -- r4 : the constructor to call (checked to be a JSFunction) 2844 // -- r4 : the constructor to call (checked to be a JSFunction)
2696 // -- r6 : the new target (checked to be a constructor) 2845 // -- r6 : the new target (checked to be a constructor)
2697 // ----------------------------------- 2846 // -----------------------------------
2698 __ AssertFunction(r4); 2847 __ AssertFunction(r4);
2699 2848
2700 // Calling convention for function specific ConstructStubs require 2849 // Calling convention for function specific ConstructStubs require
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
2809 } 2958 }
2810 2959
2811 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 2960 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2812 // ----------- S t a t e ------------- 2961 // ----------- S t a t e -------------
2813 // -- r3 : the number of arguments (not including the receiver) 2962 // -- r3 : the number of arguments (not including the receiver)
2814 // -- r4 : the constructor to call (can be any Object) 2963 // -- r4 : the constructor to call (can be any Object)
2815 // -- r6 : the new target (either the same as the constructor or 2964 // -- r6 : the new target (either the same as the constructor or
2816 // the JSFunction on which new was invoked initially) 2965 // the JSFunction on which new was invoked initially)
2817 // ----------------------------------- 2966 // -----------------------------------
2818 2967
2819 Register argc = r3; 2968 CheckSpreadAndPushToStack(masm);
2820 Register constructor = r4;
2821 Register new_target = r6;
2822
2823 Register scratch = r5;
2824 Register scratch2 = r9;
2825
2826 Register spread = r7;
2827 Register spread_map = r8;
2828 __ LoadP(spread, MemOperand(sp, 0));
2829 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2830
2831 Label runtime_call, push_args;
2832 // Check that the spread is an array.
2833 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2834 __ bne(&runtime_call);
2835
2836 // Check that we have the original ArrayPrototype.
2837 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2838 __ LoadP(scratch2, NativeContextMemOperand());
2839 __ LoadP(scratch2,
2840 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2841 __ cmp(scratch, scratch2);
2842 __ bne(&runtime_call);
2843
2844 // Check that the ArrayPrototype hasn't been modified in a way that would
2845 // affect iteration.
2846 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2847 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2848 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2849 __ bne(&runtime_call);
2850
2851 // Check that the map of the initial array iterator hasn't changed.
2852 __ LoadP(scratch2, NativeContextMemOperand());
2853 __ LoadP(scratch,
2854 ContextMemOperand(scratch2,
2855 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2856 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2857 __ LoadP(scratch2,
2858 ContextMemOperand(
2859 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2860 __ cmp(scratch, scratch2);
2861 __ bne(&runtime_call);
2862
2863 // For FastPacked kinds, iteration will have the same effect as simply
2864 // accessing each property in order.
2865 Label no_protector_check;
2866 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2867 __ DecodeField<Map::ElementsKindBits>(scratch);
2868 __ cmpi(scratch, Operand(FAST_HOLEY_ELEMENTS));
2869 __ bgt(&runtime_call);
2870 // For non-FastHoley kinds, we can skip the protector check.
2871 __ cmpi(scratch, Operand(FAST_SMI_ELEMENTS));
2872 __ beq(&no_protector_check);
2873 __ cmpi(scratch, Operand(FAST_ELEMENTS));
2874 __ beq(&no_protector_check);
2875 // Check the ArrayProtector cell.
2876 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2877 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2878 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2879 __ bne(&runtime_call);
2880
2881 __ bind(&no_protector_check);
2882 // Load the FixedArray backing store.
2883 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2884 __ b(&push_args);
2885
2886 __ bind(&runtime_call);
2887 {
2888 // Call the builtin for the result of the spread.
2889 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2890 __ SmiTag(argc);
2891 __ Push(constructor, new_target, argc, spread);
2892 __ CallRuntime(Runtime::kSpreadIterableFixed);
2893 __ mr(spread, r3);
2894 __ Pop(constructor, new_target, argc);
2895 __ SmiUntag(argc);
2896 }
2897
2898 Register spread_len = r8;
2899 __ bind(&push_args);
2900 {
2901 // Pop the spread argument off the stack.
2902 __ Pop(scratch);
2903 // Calculate the new nargs including the result of the spread.
2904 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2905 __ SmiUntag(spread_len);
2906 // argc += spread_len - 1. Subtract 1 for the spread itself.
2907 __ add(argc, argc, spread_len);
2908 __ subi(argc, argc, Operand(1));
2909 }
2910
2911 // Check for stack overflow.
2912 {
2913 // Check the stack for overflow. We are not trying to catch interruptions
2914 // (i.e. debug break and preemption) here, so check the "real stack limit".
2915 Label done;
2916 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2917 // Make scratch the space we have left. The stack might already be
2918 // overflowed here which will cause scratch to become negative.
2919 __ sub(scratch, sp, scratch);
2920 // Check if the arguments will overflow the stack.
2921 __ ShiftLeftImm(r0, spread_len, Operand(kPointerSizeLog2));
2922 __ cmp(scratch, r0);
2923 __ bgt(&done); // Signed comparison.
2924 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2925 __ bind(&done);
2926 }
2927
2928 // Put the evaluated spread onto the stack as additional arguments.
2929 {
2930 __ li(scratch, Operand::Zero());
2931 Label done, loop;
2932 __ bind(&loop);
2933 __ cmp(scratch, spread_len);
2934 __ beq(&done);
2935 __ ShiftLeftImm(r0, scratch, Operand(kPointerSizeLog2));
2936 __ add(scratch2, spread, r0);
2937 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2938 __ Push(scratch2);
2939 __ addi(scratch, scratch, Operand(1));
2940 __ b(&loop);
2941 __ bind(&done);
2942 }
2943
2944 // Dispatch.
2945 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2969 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2946 } 2970 }
2947 2971
2948 // static 2972 // static
2949 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2973 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2950 // ----------- S t a t e ------------- 2974 // ----------- S t a t e -------------
2951 // -- r4 : requested object size (untagged) 2975 // -- r4 : requested object size (untagged)
2952 // -- lr : return address 2976 // -- lr : return address
2953 // ----------------------------------- 2977 // -----------------------------------
2954 __ SmiTag(r4); 2978 __ SmiTag(r4);
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
3111 __ CallRuntime(Runtime::kThrowStackOverflow); 3135 __ CallRuntime(Runtime::kThrowStackOverflow);
3112 __ bkpt(0); 3136 __ bkpt(0);
3113 } 3137 }
3114 } 3138 }
3115 3139
3116 #undef __ 3140 #undef __
3117 } // namespace internal 3141 } // namespace internal
3118 } // namespace v8 3142 } // namespace v8
3119 3143
3120 #endif // V8_TARGET_ARCH_PPC 3144 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « no previous file | src/builtins/s390/builtins-s390.cc » ('j') | src/builtins/s390/builtins-s390.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698