Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(929)

Side by Side Diff: src/builtins/mips64/builtins-mips64.cc

Issue 2649143002: [Turbofan] Implement call with spread bytecode in assembly code. (Closed)
Patch Set: Mips ports Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1130 matching lines...) Expand 10 before | Expand all | Expand 10 after
1141 __ bind(&loop_header); 1141 __ bind(&loop_header);
1142 __ ld(scratch, MemOperand(index)); 1142 __ ld(scratch, MemOperand(index));
1143 __ Daddu(index, index, Operand(-kPointerSize)); 1143 __ Daddu(index, index, Operand(-kPointerSize));
1144 __ push(scratch); 1144 __ push(scratch);
1145 __ bind(&loop_check); 1145 __ bind(&loop_check);
1146 __ Branch(&loop_header, gt, index, Operand(scratch2)); 1146 __ Branch(&loop_header, gt, index, Operand(scratch2));
1147 } 1147 }
1148 1148
1149 // static 1149 // static
1150 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 1150 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1151 MacroAssembler* masm, TailCallMode tail_call_mode, 1151 MacroAssembler* masm, TailCallMode tail_call_mode, PushArgsMode mode) {
1152 CallableType function_type) {
1153 // ----------- S t a t e ------------- 1152 // ----------- S t a t e -------------
1154 // -- a0 : the number of arguments (not including the receiver) 1153 // -- a0 : the number of arguments (not including the receiver)
1155 // -- a2 : the address of the first argument to be pushed. Subsequent 1154 // -- a2 : the address of the first argument to be pushed. Subsequent
1156 // arguments should be consecutive above this, in the same order as 1155 // arguments should be consecutive above this, in the same order as
1157 // they are to be pushed onto the stack. 1156 // they are to be pushed onto the stack.
1158 // -- a1 : the target to call (can be any Object). 1157 // -- a1 : the target to call (can be any Object).
1159 // ----------------------------------- 1158 // -----------------------------------
1160 Label stack_overflow; 1159 Label stack_overflow;
1161 1160
1162 __ Daddu(a3, a0, Operand(1)); // Add one for receiver. 1161 __ Daddu(a3, a0, Operand(1)); // Add one for receiver.
1163 1162
1164 // This function modifies a2, t0 and a4. 1163 // This function modifies a2, t0 and a4.
1165 Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow); 1164 Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow);
1166 1165
1167 // Call the target. 1166 // Call the target.
1168 if (function_type == CallableType::kJSFunction) { 1167 if (mode == PushArgsMode::kJSFunction) {
1169 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 1168 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1170 tail_call_mode), 1169 tail_call_mode),
1171 RelocInfo::CODE_TARGET); 1170 RelocInfo::CODE_TARGET);
1171 } else if (mode == PushArgsMode::kWithFinalSpread) {
1172 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1173 RelocInfo::CODE_TARGET);
1172 } else { 1174 } else {
1173 DCHECK_EQ(function_type, CallableType::kAny);
1174 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 1175 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1175 tail_call_mode), 1176 tail_call_mode),
1176 RelocInfo::CODE_TARGET); 1177 RelocInfo::CODE_TARGET);
1177 } 1178 }
1178 1179
1179 __ bind(&stack_overflow); 1180 __ bind(&stack_overflow);
1180 { 1181 {
1181 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1182 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1182 // Unreachable code. 1183 // Unreachable code.
1183 __ break_(0xCC); 1184 __ break_(0xCC);
1184 } 1185 }
1185 } 1186 }
1186 1187
1187 // static 1188 // static
1188 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 1189 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1189 MacroAssembler* masm, PushArgsConstructMode mode) { 1190 MacroAssembler* masm, PushArgsMode mode) {
1190 // ----------- S t a t e ------------- 1191 // ----------- S t a t e -------------
1191 // -- a0 : argument count (not including receiver) 1192 // -- a0 : argument count (not including receiver)
1192 // -- a3 : new target 1193 // -- a3 : new target
1193 // -- a1 : constructor to call 1194 // -- a1 : constructor to call
1194 // -- a2 : allocation site feedback if available, undefined otherwise. 1195 // -- a2 : allocation site feedback if available, undefined otherwise.
1195 // -- a4 : address of the first argument 1196 // -- a4 : address of the first argument
1196 // ----------------------------------- 1197 // -----------------------------------
1197 Label stack_overflow; 1198 Label stack_overflow;
1198 1199
1199 // Push a slot for the receiver. 1200 // Push a slot for the receiver.
1200 __ push(zero_reg); 1201 __ push(zero_reg);
1201 1202
1202 // This function modifies t0, a4 and a5. 1203 // This function modifies t0, a4 and a5.
1203 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow); 1204 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow);
1204 1205
1205 __ AssertUndefinedOrAllocationSite(a2, t0); 1206 __ AssertUndefinedOrAllocationSite(a2, t0);
1206 if (mode == PushArgsConstructMode::kJSFunction) { 1207 if (mode == PushArgsMode::kJSFunction) {
1207 __ AssertFunction(a1); 1208 __ AssertFunction(a1);
1208 1209
1209 // Tail call to the function-specific construct stub (still in the caller 1210 // Tail call to the function-specific construct stub (still in the caller
1210 // context at this point). 1211 // context at this point).
1211 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 1212 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1212 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); 1213 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
1213 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); 1214 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1214 __ Jump(at); 1215 __ Jump(at);
1215 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { 1216 } else if (mode == PushArgsMode::kWithFinalSpread) {
1216 // Call the constructor with a0, a1, and a3 unmodified. 1217 // Call the constructor with a0, a1, and a3 unmodified.
1217 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 1218 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1218 RelocInfo::CODE_TARGET); 1219 RelocInfo::CODE_TARGET);
1219 } else { 1220 } else {
1220 DCHECK_EQ(PushArgsConstructMode::kOther, mode); 1221 DCHECK_EQ(PushArgsMode::kOther, mode);
1221 // Call the constructor with a0, a1, and a3 unmodified. 1222 // Call the constructor with a0, a1, and a3 unmodified.
1222 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1223 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1223 } 1224 }
1224 1225
1225 __ bind(&stack_overflow); 1226 __ bind(&stack_overflow);
1226 { 1227 {
1227 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1228 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1228 // Unreachable code. 1229 // Unreachable code.
1229 __ break_(0xCC); 1230 __ break_(0xCC);
1230 } 1231 }
(...skipping 1414 matching lines...) Expand 10 before | Expand all | Expand 10 after
2645 2646
2646 // 3. Call to something that is not callable. 2647 // 3. Call to something that is not callable.
2647 __ bind(&non_callable); 2648 __ bind(&non_callable);
2648 { 2649 {
2649 FrameScope scope(masm, StackFrame::INTERNAL); 2650 FrameScope scope(masm, StackFrame::INTERNAL);
2650 __ Push(a1); 2651 __ Push(a1);
2651 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2652 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2652 } 2653 }
2653 } 2654 }
2654 2655
2656 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2657 Register argc = a0;
2658 Register constructor = a1;
2659 Register new_target = a3;
2660
2661 Register scratch = t0;
2662 Register scratch2 = t1;
2663
2664 Register spread = a2;
2665 Register spread_map = a4;
2666
2667 Register spread_len = a4;
2668
2669 Register native_context = a5;
2670
2671 __ ld(spread, MemOperand(sp, 0));
2672 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2673 __ ld(native_context, NativeContextMemOperand());
2674
2675 Label runtime_call, push_args;
2676 // Check that the spread is an array.
2677 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2678 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2679
2680 // Check that we have the original ArrayPrototype.
2681 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2682 __ ld(scratch2, ContextMemOperand(native_context,
2683 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2684 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2685
2686 // Check that the ArrayPrototype hasn't been modified in a way that would
2687 // affect iteration.
2688 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2689 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2690 __ Branch(&runtime_call, ne, scratch,
2691 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2692
2693 // Check that the map of the initial array iterator hasn't changed.
2694 __ ld(scratch,
2695 ContextMemOperand(native_context,
2696 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2697 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2698 __ ld(scratch2,
2699 ContextMemOperand(native_context,
2700 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2701 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2702
2703 // For FastPacked kinds, iteration will have the same effect as simply
2704 // accessing each property in order.
2705 Label no_protector_check;
2706 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2707 __ DecodeField<Map::ElementsKindBits>(scratch);
2708 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2709 // For non-FastHoley kinds, we can skip the protector check.
2710 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2711 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2712 // Check the ArrayProtector cell.
2713 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2714 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2715 __ Branch(&runtime_call, ne, scratch,
2716 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2717
2718 __ bind(&no_protector_check);
2719 // Load the FixedArray backing store, but use the length from the array.
2720 __ lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset));
2721 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2722 __ Branch(&push_args);
2723
2724 __ bind(&runtime_call);
2725 {
2726 // Call the builtin for the result of the spread.
2727 FrameScope scope(masm, StackFrame::INTERNAL);
2728 __ SmiTag(argc);
2729 __ Push(constructor, new_target, argc, spread);
2730 __ CallRuntime(Runtime::kSpreadIterableFixed);
2731 __ mov(spread, v0);
2732 __ Pop(constructor, new_target, argc);
2733 __ SmiUntag(argc);
2734 }
2735
2736 {
2737 // Calculate the new nargs including the result of the spread.
2738 __ lw(spread_len,
2739 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2740
2741 __ bind(&push_args);
2742 // argc += spread_len - 1. Subtract 1 for the spread itself.
2743 __ Daddu(argc, argc, spread_len);
2744 __ Dsubu(argc, argc, Operand(1));
2745
2746 // Pop the spread argument off the stack.
2747 __ Pop(scratch);
2748 }
2749
2750 // Check for stack overflow.
2751 {
2752 // Check the stack for overflow. We are not trying to catch interruptions
2753 // (i.e. debug break and preemption) here, so check the "real stack limit".
2754 Label done;
2755 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2756 // Make scratch the space we have left. The stack might already be
2757 // overflowed here which will cause ip to become negative.
2758 __ Dsubu(scratch, sp, scratch);
2759 // Check if the arguments will overflow the stack.
2760 __ dsll(at, spread_len, kPointerSizeLog2);
2761 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison.
2762 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2763 __ bind(&done);
2764 }
2765
2766 // Put the evaluated spread onto the stack as additional arguments.
2767 {
2768 __ mov(scratch, zero_reg);
2769 Label done, loop;
2770 __ bind(&loop);
2771 __ Branch(&done, eq, scratch, Operand(spread_len));
2772 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2);
2773 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2774 __ Push(scratch2);
2775 __ Daddu(scratch, scratch, Operand(1));
2776 __ Branch(&loop);
2777 __ bind(&done);
2778 }
2779 }
2780
2781 // static
2782 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2783 // ----------- S t a t e -------------
2784 // -- a0 : the number of arguments (not including the receiver)
2785 // -- a1 : the target to call (can be any Object).
2786 // -----------------------------------
2787
2788 // CheckSpreadAndPushToStack will push a3 to save it.
2789 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2790 CheckSpreadAndPushToStack(masm);
2791 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2792 TailCallMode::kDisallow),
2793 RelocInfo::CODE_TARGET);
2794 }
2795
2655 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2796 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2656 // ----------- S t a t e ------------- 2797 // ----------- S t a t e -------------
2657 // -- a0 : the number of arguments (not including the receiver) 2798 // -- a0 : the number of arguments (not including the receiver)
2658 // -- a1 : the constructor to call (checked to be a JSFunction) 2799 // -- a1 : the constructor to call (checked to be a JSFunction)
2659 // -- a3 : the new target (checked to be a constructor) 2800 // -- a3 : the new target (checked to be a constructor)
2660 // ----------------------------------- 2801 // -----------------------------------
2661 __ AssertFunction(a1); 2802 __ AssertFunction(a1);
2662 2803
2663 // Calling convention for function specific ConstructStubs require 2804 // Calling convention for function specific ConstructStubs require
2664 // a2 to contain either an AllocationSite or undefined. 2805 // a2 to contain either an AllocationSite or undefined.
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
2832 2973
2833 // static 2974 // static
2834 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 2975 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2835 // ----------- S t a t e ------------- 2976 // ----------- S t a t e -------------
2836 // -- a0 : the number of arguments (not including the receiver) 2977 // -- a0 : the number of arguments (not including the receiver)
2837 // -- a1 : the constructor to call (can be any Object) 2978 // -- a1 : the constructor to call (can be any Object)
2838 // -- a3 : the new target (either the same as the constructor or 2979 // -- a3 : the new target (either the same as the constructor or
2839 // the JSFunction on which new was invoked initially) 2980 // the JSFunction on which new was invoked initially)
2840 // ----------------------------------- 2981 // -----------------------------------
2841 2982
2842 Register argc = a0; 2983 CheckSpreadAndPushToStack(masm);
2843 Register constructor = a1;
2844 Register new_target = a3;
2845
2846 Register scratch = t0;
2847 Register scratch2 = t1;
2848
2849 Register spread = a2;
2850 Register spread_map = a4;
2851
2852 Register native_context = a5;
2853
2854 __ ld(spread, MemOperand(sp, 0));
2855 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2856 __ ld(native_context, NativeContextMemOperand());
2857
2858 Label runtime_call, push_args;
2859 // Check that the spread is an array.
2860 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2861 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2862
2863 // Check that we have the original ArrayPrototype.
2864 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2865 __ ld(scratch2, ContextMemOperand(native_context,
2866 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2867 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2868
2869 // Check that the ArrayPrototype hasn't been modified in a way that would
2870 // affect iteration.
2871 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2872 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2873 __ Branch(&runtime_call, ne, scratch,
2874 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2875
2876 // Check that the map of the initial array iterator hasn't changed.
2877 __ ld(scratch,
2878 ContextMemOperand(native_context,
2879 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2880 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2881 __ ld(scratch2,
2882 ContextMemOperand(native_context,
2883 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2884 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2885
2886 // For FastPacked kinds, iteration will have the same effect as simply
2887 // accessing each property in order.
2888 Label no_protector_check;
2889 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2890 __ DecodeField<Map::ElementsKindBits>(scratch);
2891 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2892 // For non-FastHoley kinds, we can skip the protector check.
2893 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2894 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2895 // Check the ArrayProtector cell.
2896 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2897 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2898 __ Branch(&runtime_call, ne, scratch,
2899 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2900
2901 __ bind(&no_protector_check);
2902 // Load the FixedArray backing store.
2903 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2904 __ Branch(&push_args);
2905
2906 __ bind(&runtime_call);
2907 {
2908 // Call the builtin for the result of the spread.
2909 FrameScope scope(masm, StackFrame::INTERNAL);
2910 __ SmiTag(argc);
2911 __ Push(constructor, new_target, argc, spread);
2912 __ CallRuntime(Runtime::kSpreadIterableFixed);
2913 __ mov(spread, v0);
2914 __ Pop(constructor, new_target, argc);
2915 __ SmiUntag(argc);
2916 }
2917
2918 Register spread_len = a4;
2919 __ bind(&push_args);
2920 {
2921 // Pop the spread argument off the stack.
2922 __ Pop(scratch);
2923 // Calculate the new nargs including the result of the spread.
2924 __ lw(spread_len,
2925 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2926 // argc += spread_len - 1. Subtract 1 for the spread itself.
2927 __ Daddu(argc, argc, spread_len);
2928 __ Dsubu(argc, argc, Operand(1));
2929 }
2930
2931 // Check for stack overflow.
2932 {
2933 // Check the stack for overflow. We are not trying to catch interruptions
2934 // (i.e. debug break and preemption) here, so check the "real stack limit".
2935 Label done;
2936 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2937 // Make scratch the space we have left. The stack might already be
2938 // overflowed here which will cause ip to become negative.
2939 __ Dsubu(scratch, sp, scratch);
2940 // Check if the arguments will overflow the stack.
2941 __ dsll(at, spread_len, kPointerSizeLog2);
2942 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison.
2943 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2944 __ bind(&done);
2945 }
2946
2947 // Put the evaluated spread onto the stack as additional arguments.
2948 {
2949 __ mov(scratch, zero_reg);
2950 Label done, loop;
2951 __ bind(&loop);
2952 __ Branch(&done, eq, scratch, Operand(spread_len));
2953 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2);
2954 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2955 __ Push(scratch2);
2956 __ Daddu(scratch, scratch, Operand(1));
2957 __ Branch(&loop);
2958 __ bind(&done);
2959 }
2960
2961 // Dispatch.
2962 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2984 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2963 } 2985 }
2964 2986
2965 // static 2987 // static
2966 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2988 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2967 // ----------- S t a t e ------------- 2989 // ----------- S t a t e -------------
2968 // -- a0 : requested object size (untagged) 2990 // -- a0 : requested object size (untagged)
2969 // -- ra : return address 2991 // -- ra : return address
2970 // ----------------------------------- 2992 // -----------------------------------
2971 __ SmiTag(a0); 2993 __ SmiTag(a0);
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
3129 __ break_(0xCC); 3151 __ break_(0xCC);
3130 } 3152 }
3131 } 3153 }
3132 3154
3133 #undef __ 3155 #undef __
3134 3156
3135 } // namespace internal 3157 } // namespace internal
3136 } // namespace v8 3158 } // namespace v8
3137 3159
3138 #endif // V8_TARGET_ARCH_MIPS64 3160 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698