OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1184 __ AddP(r5, r2, Operand(1)); | 1184 __ AddP(r5, r2, Operand(1)); |
1185 | 1185 |
1186 // Push the arguments. | 1186 // Push the arguments. |
1187 Generate_InterpreterPushArgs(masm, r5, r4, r5, r6, &stack_overflow); | 1187 Generate_InterpreterPushArgs(masm, r5, r4, r5, r6, &stack_overflow); |
1188 | 1188 |
1189 // Call the target. | 1189 // Call the target. |
1190 if (mode == InterpreterPushArgsMode::kJSFunction) { | 1190 if (mode == InterpreterPushArgsMode::kJSFunction) { |
1191 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | 1191 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, |
1192 tail_call_mode), | 1192 tail_call_mode), |
1193 RelocInfo::CODE_TARGET); | 1193 RelocInfo::CODE_TARGET); |
1194 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { | |
1195 __ Jump(masm->isolate()->builtins()->CallWithSpread(), | |
1196 RelocInfo::CODE_TARGET); | |
1194 } else { | 1197 } else { |
1195 DCHECK_EQ(mode, InterpreterPushArgsMode::kOther); | |
1196 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 1198 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
1197 tail_call_mode), | 1199 tail_call_mode), |
1198 RelocInfo::CODE_TARGET); | 1200 RelocInfo::CODE_TARGET); |
1199 } | 1201 } |
1200 | 1202 |
1201 __ bind(&stack_overflow); | 1203 __ bind(&stack_overflow); |
1202 { | 1204 { |
1203 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1205 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1204 // Unreachable Code. | 1206 // Unreachable Code. |
1205 __ bkpt(0); | 1207 __ bkpt(0); |
(...skipping 1488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2694 | 2696 |
2695 // 3. Call to something that is not callable. | 2697 // 3. Call to something that is not callable. |
2696 __ bind(&non_callable); | 2698 __ bind(&non_callable); |
2697 { | 2699 { |
2698 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2700 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2699 __ Push(r3); | 2701 __ Push(r3); |
2700 __ CallRuntime(Runtime::kThrowCalledNonCallable); | 2702 __ CallRuntime(Runtime::kThrowCalledNonCallable); |
2701 } | 2703 } |
2702 } | 2704 } |
2703 | 2705 |
2706 static void CheckSpreadAndPushToStack(MacroAssembler* masm) { | |
2707 Register argc = r2; | |
2708 Register constructor = r3; | |
2709 Register new_target = r5; | |
2710 | |
2711 Register scratch = r4; | |
2712 Register scratch2 = r8; | |
2713 | |
2714 Register spread = r6; | |
2715 Register spread_map = r7; | |
2716 Register spread_len = r7; | |
2717 Label runtime_call, push_args; | |
2718 __ LoadP(spread, MemOperand(sp, 0)); | |
2719 __ JumpIfSmi(spread, &runtime_call); | |
2720 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); | |
2721 | |
2722 // Check that the spread is an array. | |
2723 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); | |
2724 __ bne(&runtime_call); | |
2725 | |
2726 // Check that we have the original ArrayPrototype. | |
2727 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); | |
2728 __ LoadP(scratch2, NativeContextMemOperand()); | |
2729 __ LoadP(scratch2, | |
2730 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); | |
2731 __ CmpP(scratch, scratch2); | |
2732 __ bne(&runtime_call); | |
2733 | |
2734 // Check that the ArrayPrototype hasn't been modified in a way that would | |
2735 // affect iteration. | |
2736 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); | |
2737 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
2738 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); | |
2739 __ bne(&runtime_call); | |
2740 | |
2741 // Check that the map of the initial array iterator hasn't changed. | |
2742 __ LoadP(scratch2, NativeContextMemOperand()); | |
2743 __ LoadP(scratch, | |
2744 ContextMemOperand(scratch2, | |
2745 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); | |
2746 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2747 __ LoadP(scratch2, | |
2748 ContextMemOperand( | |
2749 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | |
2750 __ CmpP(scratch, scratch2); | |
2751 __ bne(&runtime_call); | |
2752 | |
2753 // For FastPacked kinds, iteration will have the same effect as simply | |
2754 // accessing each property in order. | |
2755 Label no_protector_check; | |
2756 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | |
john.yan
2017/02/01 03:26:42
Use LoadB here will fix the endian issue.
| |
2757 __ DecodeField<Map::ElementsKindBits>(scratch); | |
2758 __ CmpP(scratch, Operand(FAST_HOLEY_ELEMENTS)); | |
2759 __ bgt(&runtime_call); | |
2760 // For non-FastHoley kinds, we can skip the protector check. | |
2761 __ CmpP(scratch, Operand(FAST_SMI_ELEMENTS)); | |
2762 __ beq(&no_protector_check); | |
2763 __ CmpP(scratch, Operand(FAST_ELEMENTS)); | |
2764 __ beq(&no_protector_check); | |
2765 // Check the ArrayProtector cell. | |
2766 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | |
2767 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | |
2768 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); | |
2769 __ bne(&runtime_call); | |
2770 | |
2771 __ bind(&no_protector_check); | |
2772 // Load the FixedArray backing store, but use the length from the array. | |
2773 __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset)); | |
2774 __ SmiUntag(spread_len); | |
2775 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | |
2776 __ b(&push_args); | |
2777 | |
2778 __ bind(&runtime_call); | |
2779 { | |
2780 // Call the builtin for the result of the spread. | |
2781 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
2782 __ SmiTag(argc); | |
2783 __ Push(constructor, new_target, argc, spread); | |
2784 __ CallRuntime(Runtime::kSpreadIterableFixed); | |
2785 __ LoadRR(spread, r2); | |
2786 __ Pop(constructor, new_target, argc); | |
2787 __ SmiUntag(argc); | |
2788 } | |
2789 | |
2790 { | |
2791 // Calculate the new nargs including the result of the spread. | |
2792 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); | |
2793 __ SmiUntag(spread_len); | |
2794 | |
2795 __ bind(&push_args); | |
2796 // argc += spread_len - 1. Subtract 1 for the spread itself. | |
2797 __ AddP(argc, argc, spread_len); | |
2798 __ SubP(argc, argc, Operand(1)); | |
2799 | |
2800 // Pop the spread argument off the stack. | |
2801 __ Pop(scratch); | |
2802 } | |
2803 | |
2804 // Check for stack overflow. | |
2805 { | |
2806 // Check the stack for overflow. We are not trying to catch interruptions | |
2807 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2808 Label done; | |
2809 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); | |
2810 // Make scratch the space we have left. The stack might already be | |
2811 // overflowed here which will cause scratch to become negative. | |
2812 __ SubP(scratch, sp, scratch); | |
2813 // Check if the arguments will overflow the stack. | |
2814 __ ShiftLeftP(r0, spread_len, Operand(kPointerSizeLog2)); | |
2815 __ CmpP(scratch, r0); | |
2816 __ bgt(&done); // Signed comparison. | |
2817 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2818 __ bind(&done); | |
2819 } | |
2820 | |
2821 // Put the evaluated spread onto the stack as additional arguments. | |
2822 { | |
2823 __ LoadImmP(scratch, Operand::Zero()); | |
2824 Label done, loop; | |
2825 __ bind(&loop); | |
2826 __ CmpP(scratch, spread_len); | |
2827 __ beq(&done); | |
2828 __ ShiftLeftP(r0, scratch, Operand(kPointerSizeLog2)); | |
2829 __ AddP(scratch2, spread, r0); | |
2830 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); | |
2831 __ Push(scratch2); | |
2832 __ AddP(scratch, scratch, Operand(1)); | |
2833 __ b(&loop); | |
2834 __ bind(&done); | |
2835 } | |
2836 } | |
2837 | |
2838 // static | |
2839 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { | |
2840 // ----------- S t a t e ------------- | |
2841 // -- r2 : the number of arguments (not including the receiver) | |
2842 // -- r3 : the constructor to call (can be any Object) | |
2843 // ----------------------------------- | |
2844 | |
2845 // CheckSpreadAndPushToStack will push r5 to save it. | |
2846 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
2847 CheckSpreadAndPushToStack(masm); | |
2848 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
2849 TailCallMode::kDisallow), | |
2850 RelocInfo::CODE_TARGET); | |
2851 } | |
2852 | |
2704 // static | 2853 // static |
2705 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | 2854 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
2706 // ----------- S t a t e ------------- | 2855 // ----------- S t a t e ------------- |
2707 // -- r2 : the number of arguments (not including the receiver) | 2856 // -- r2 : the number of arguments (not including the receiver) |
2708 // -- r3 : the constructor to call (checked to be a JSFunction) | 2857 // -- r3 : the constructor to call (checked to be a JSFunction) |
2709 // -- r5 : the new target (checked to be a constructor) | 2858 // -- r5 : the new target (checked to be a constructor) |
2710 // ----------------------------------- | 2859 // ----------------------------------- |
2711 __ AssertFunction(r3); | 2860 __ AssertFunction(r3); |
2712 | 2861 |
2713 // Calling convention for function specific ConstructStubs require | 2862 // Calling convention for function specific ConstructStubs require |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2822 } | 2971 } |
2823 | 2972 |
2824 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { | 2973 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
2825 // ----------- S t a t e ------------- | 2974 // ----------- S t a t e ------------- |
2826 // -- r2 : the number of arguments (not including the receiver) | 2975 // -- r2 : the number of arguments (not including the receiver) |
2827 // -- r3 : the constructor to call (can be any Object) | 2976 // -- r3 : the constructor to call (can be any Object) |
2828 // -- r5 : the new target (either the same as the constructor or | 2977 // -- r5 : the new target (either the same as the constructor or |
2829 // the JSFunction on which new was invoked initially) | 2978 // the JSFunction on which new was invoked initially) |
2830 // ----------------------------------- | 2979 // ----------------------------------- |
2831 | 2980 |
2832 Register argc = r2; | 2981 CheckSpreadAndPushToStack(masm); |
2833 Register constructor = r3; | |
2834 Register new_target = r5; | |
2835 | |
2836 Register scratch = r4; | |
2837 Register scratch2 = r8; | |
2838 | |
2839 Register spread = r6; | |
2840 Register spread_map = r7; | |
2841 __ LoadP(spread, MemOperand(sp, 0)); | |
2842 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); | |
2843 | |
2844 Label runtime_call, push_args; | |
2845 // Check that the spread is an array. | |
2846 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); | |
2847 __ bne(&runtime_call); | |
2848 | |
2849 // Check that we have the original ArrayPrototype. | |
2850 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); | |
2851 __ LoadP(scratch2, NativeContextMemOperand()); | |
2852 __ LoadP(scratch2, | |
2853 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); | |
2854 __ CmpP(scratch, scratch2); | |
2855 __ bne(&runtime_call); | |
2856 | |
2857 // Check that the ArrayPrototype hasn't been modified in a way that would | |
2858 // affect iteration. | |
2859 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); | |
2860 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
2861 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); | |
2862 __ bne(&runtime_call); | |
2863 | |
2864 // Check that the map of the initial array iterator hasn't changed. | |
2865 __ LoadP(scratch2, NativeContextMemOperand()); | |
2866 __ LoadP(scratch, | |
2867 ContextMemOperand(scratch2, | |
2868 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); | |
2869 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2870 __ LoadP(scratch2, | |
2871 ContextMemOperand( | |
2872 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); | |
2873 __ CmpP(scratch, scratch2); | |
2874 __ bne(&runtime_call); | |
2875 | |
2876 // For FastPacked kinds, iteration will have the same effect as simply | |
2877 // accessing each property in order. | |
2878 Label no_protector_check; | |
2879 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); | |
2880 __ DecodeField<Map::ElementsKindBits>(scratch); | |
2881 __ CmpP(scratch, Operand(FAST_HOLEY_ELEMENTS)); | |
2882 __ bgt(&runtime_call); | |
2883 // For non-FastHoley kinds, we can skip the protector check. | |
2884 __ CmpP(scratch, Operand(FAST_SMI_ELEMENTS)); | |
2885 __ beq(&no_protector_check); | |
2886 __ CmpP(scratch, Operand(FAST_ELEMENTS)); | |
2887 __ beq(&no_protector_check); | |
2888 // Check the ArrayProtector cell. | |
2889 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); | |
2890 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); | |
2891 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); | |
2892 __ bne(&runtime_call); | |
2893 | |
2894 __ bind(&no_protector_check); | |
2895 // Load the FixedArray backing store. | |
2896 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); | |
2897 __ b(&push_args); | |
2898 | |
2899 __ bind(&runtime_call); | |
2900 { | |
2901 // Call the builtin for the result of the spread. | |
2902 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
2903 __ SmiTag(argc); | |
2904 __ Push(constructor, new_target, argc, spread); | |
2905 __ CallRuntime(Runtime::kSpreadIterableFixed); | |
2906 __ LoadRR(spread, r2); | |
2907 __ Pop(constructor, new_target, argc); | |
2908 __ SmiUntag(argc); | |
2909 } | |
2910 | |
2911 Register spread_len = r7; | |
2912 __ bind(&push_args); | |
2913 { | |
2914 // Pop the spread argument off the stack. | |
2915 __ Pop(scratch); | |
2916 // Calculate the new nargs including the result of the spread. | |
2917 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); | |
2918 __ SmiUntag(spread_len); | |
2919 // argc += spread_len - 1. Subtract 1 for the spread itself. | |
2920 __ AddP(argc, argc, spread_len); | |
2921 __ SubP(argc, argc, Operand(1)); | |
2922 } | |
2923 | |
2924 // Check for stack overflow. | |
2925 { | |
2926 // Check the stack for overflow. We are not trying to catch interruptions | |
2927 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2928 Label done; | |
2929 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); | |
2930 // Make scratch the space we have left. The stack might already be | |
2931 // overflowed here which will cause scratch to become negative. | |
2932 __ SubP(scratch, sp, scratch); | |
2933 // Check if the arguments will overflow the stack. | |
2934 __ ShiftLeftP(r0, spread_len, Operand(kPointerSizeLog2)); | |
2935 __ CmpP(scratch, r0); | |
2936 __ bgt(&done); // Signed comparison. | |
2937 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2938 __ bind(&done); | |
2939 } | |
2940 | |
2941 // Put the evaluated spread onto the stack as additional arguments. | |
2942 { | |
2943 __ LoadImmP(scratch, Operand::Zero()); | |
2944 Label done, loop; | |
2945 __ bind(&loop); | |
2946 __ CmpP(scratch, spread_len); | |
2947 __ beq(&done); | |
2948 __ ShiftLeftP(r0, scratch, Operand(kPointerSizeLog2)); | |
2949 __ AddP(scratch2, spread, r0); | |
2950 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); | |
2951 __ Push(scratch2); | |
2952 __ AddP(scratch, scratch, Operand(1)); | |
2953 __ b(&loop); | |
2954 __ bind(&done); | |
2955 } | |
2956 | |
2957 // Dispatch. | |
2958 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2982 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2959 } | 2983 } |
2960 | 2984 |
2961 // static | 2985 // static |
2962 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2986 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2963 // ----------- S t a t e ------------- | 2987 // ----------- S t a t e ------------- |
2964 // -- r3 : requested object size (untagged) | 2988 // -- r3 : requested object size (untagged) |
2965 // -- lr : return address | 2989 // -- lr : return address |
2966 // ----------------------------------- | 2990 // ----------------------------------- |
2967 __ SmiTag(r3); | 2991 __ SmiTag(r3); |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3124 __ bkpt(0); | 3148 __ bkpt(0); |
3125 } | 3149 } |
3126 } | 3150 } |
3127 | 3151 |
3128 #undef __ | 3152 #undef __ |
3129 | 3153 |
3130 } // namespace internal | 3154 } // namespace internal |
3131 } // namespace v8 | 3155 } // namespace v8 |
3132 | 3156 |
3133 #endif // V8_TARGET_ARCH_S390 | 3157 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |