Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(52)

Side by Side Diff: src/builtins/mips/builtins-mips.cc

Issue 2649143002: [Turbofan] Implement call with spread bytecode in assembly code. (Closed)
Patch Set: Mips ports Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1139 matching lines...) Expand 10 before | Expand all | Expand 10 after
1150 __ bind(&loop_header); 1150 __ bind(&loop_header);
1151 __ lw(scratch, MemOperand(index)); 1151 __ lw(scratch, MemOperand(index));
1152 __ Addu(index, index, Operand(-kPointerSize)); 1152 __ Addu(index, index, Operand(-kPointerSize));
1153 __ push(scratch); 1153 __ push(scratch);
1154 __ bind(&loop_check); 1154 __ bind(&loop_check);
1155 __ Branch(&loop_header, gt, index, Operand(scratch2)); 1155 __ Branch(&loop_header, gt, index, Operand(scratch2));
1156 } 1156 }
1157 1157
1158 // static 1158 // static
1159 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 1159 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1160 MacroAssembler* masm, TailCallMode tail_call_mode, 1160 MacroAssembler* masm, TailCallMode tail_call_mode, PushArgsMode mode) {
1161 CallableType function_type) {
1162 // ----------- S t a t e ------------- 1161 // ----------- S t a t e -------------
1163 // -- a0 : the number of arguments (not including the receiver) 1162 // -- a0 : the number of arguments (not including the receiver)
1164 // -- a2 : the address of the first argument to be pushed. Subsequent 1163 // -- a2 : the address of the first argument to be pushed. Subsequent
1165 // arguments should be consecutive above this, in the same order as 1164 // arguments should be consecutive above this, in the same order as
1166 // they are to be pushed onto the stack. 1165 // they are to be pushed onto the stack.
1167 // -- a1 : the target to call (can be any Object). 1166 // -- a1 : the target to call (can be any Object).
1168 // ----------------------------------- 1167 // -----------------------------------
1169 Label stack_overflow; 1168 Label stack_overflow;
1170 1169
1171 __ Addu(t0, a0, Operand(1)); // Add one for receiver. 1170 __ Addu(t0, a0, Operand(1)); // Add one for receiver.
1172 1171
1173 // This function modifies a2, t4 and t1. 1172 // This function modifies a2, t4 and t1.
1174 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow); 1173 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow);
1175 1174
1176 // Call the target. 1175 // Call the target.
1177 if (function_type == CallableType::kJSFunction) { 1176 if (mode == PushArgsMode::kJSFunction) {
1178 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 1177 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1179 tail_call_mode), 1178 tail_call_mode),
1180 RelocInfo::CODE_TARGET); 1179 RelocInfo::CODE_TARGET);
1180 } else if (mode == PushArgsMode::kWithFinalSpread) {
1181 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1182 RelocInfo::CODE_TARGET);
1181 } else { 1183 } else {
1182 DCHECK_EQ(function_type, CallableType::kAny);
1183 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 1184 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1184 tail_call_mode), 1185 tail_call_mode),
1185 RelocInfo::CODE_TARGET); 1186 RelocInfo::CODE_TARGET);
1186 } 1187 }
1187 1188
1188 __ bind(&stack_overflow); 1189 __ bind(&stack_overflow);
1189 { 1190 {
1190 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1191 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1191 // Unreachable code. 1192 // Unreachable code.
1192 __ break_(0xCC); 1193 __ break_(0xCC);
1193 } 1194 }
1194 } 1195 }
1195 1196
1196 // static 1197 // static
1197 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 1198 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1198 MacroAssembler* masm, PushArgsConstructMode mode) { 1199 MacroAssembler* masm, PushArgsMode mode) {
1199 // ----------- S t a t e ------------- 1200 // ----------- S t a t e -------------
1200 // -- a0 : argument count (not including receiver) 1201 // -- a0 : argument count (not including receiver)
1201 // -- a3 : new target 1202 // -- a3 : new target
1202 // -- a1 : constructor to call 1203 // -- a1 : constructor to call
1203 // -- a2 : allocation site feedback if available, undefined otherwise. 1204 // -- a2 : allocation site feedback if available, undefined otherwise.
1204 // -- t4 : address of the first argument 1205 // -- t4 : address of the first argument
1205 // ----------------------------------- 1206 // -----------------------------------
1206 Label stack_overflow; 1207 Label stack_overflow;
1207 1208
1208 // Push a slot for the receiver. 1209 // Push a slot for the receiver.
1209 __ push(zero_reg); 1210 __ push(zero_reg);
1210 1211
1211 // This function modified t4, t1 and t0. 1212 // This function modified t4, t1 and t0.
1212 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); 1213 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow);
1213 1214
1214 __ AssertUndefinedOrAllocationSite(a2, t0); 1215 __ AssertUndefinedOrAllocationSite(a2, t0);
1215 if (mode == PushArgsConstructMode::kJSFunction) { 1216 if (mode == PushArgsMode::kJSFunction) {
1216 __ AssertFunction(a1); 1217 __ AssertFunction(a1);
1217 1218
1218 // Tail call to the function-specific construct stub (still in the caller 1219 // Tail call to the function-specific construct stub (still in the caller
1219 // context at this point). 1220 // context at this point).
1220 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 1221 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1221 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); 1222 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
1222 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1223 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1223 __ Jump(at); 1224 __ Jump(at);
1224 } else if (mode == PushArgsConstructMode::kWithFinalSpread) { 1225 } else if (mode == PushArgsMode::kWithFinalSpread) {
1225 // Call the constructor with a0, a1, and a3 unmodified. 1226 // Call the constructor with a0, a1, and a3 unmodified.
1226 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 1227 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1227 RelocInfo::CODE_TARGET); 1228 RelocInfo::CODE_TARGET);
1228 } else { 1229 } else {
1229 DCHECK_EQ(PushArgsConstructMode::kOther, mode); 1230 DCHECK_EQ(PushArgsMode::kOther, mode);
1230 // Call the constructor with a0, a1, and a3 unmodified. 1231 // Call the constructor with a0, a1, and a3 unmodified.
1231 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1232 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1232 } 1233 }
1233 1234
1234 __ bind(&stack_overflow); 1235 __ bind(&stack_overflow);
1235 { 1236 {
1236 __ TailCallRuntime(Runtime::kThrowStackOverflow); 1237 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1237 // Unreachable code. 1238 // Unreachable code.
1238 __ break_(0xCC); 1239 __ break_(0xCC);
1239 } 1240 }
(...skipping 1379 matching lines...) Expand 10 before | Expand all | Expand 10 after
2619 2620
2620 // 3. Call to something that is not callable. 2621 // 3. Call to something that is not callable.
2621 __ bind(&non_callable); 2622 __ bind(&non_callable);
2622 { 2623 {
2623 FrameScope scope(masm, StackFrame::INTERNAL); 2624 FrameScope scope(masm, StackFrame::INTERNAL);
2624 __ Push(a1); 2625 __ Push(a1);
2625 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2626 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2626 } 2627 }
2627 } 2628 }
2628 2629
2630 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2631 Register argc = a0;
2632 Register constructor = a1;
2633 Register new_target = a3;
2634
2635 Register scratch = t0;
2636 Register scratch2 = t1;
2637
2638 Register spread = a2;
2639 Register spread_map = t3;
2640
2641 Register spread_len = t3;
2642
2643 Register native_context = t4;
2644
2645 __ lw(spread, MemOperand(sp, 0));
2646 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2647 __ lw(native_context, NativeContextMemOperand());
2648
2649 Label runtime_call, push_args;
2650 // Check that the spread is an array.
2651 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2652 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2653
2654 // Check that we have the original ArrayPrototype.
2655 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2656 __ lw(scratch2, ContextMemOperand(native_context,
2657 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2658 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2659
2660 // Check that the ArrayPrototype hasn't been modified in a way that would
2661 // affect iteration.
2662 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2663 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2664 __ Branch(&runtime_call, ne, scratch,
2665 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2666
2667 // Check that the map of the initial array iterator hasn't changed.
2668 __ lw(scratch,
2669 ContextMemOperand(native_context,
2670 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2671 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2672 __ lw(scratch2,
2673 ContextMemOperand(native_context,
2674 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2675 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2676
2677 // For FastPacked kinds, iteration will have the same effect as simply
2678 // accessing each property in order.
2679 Label no_protector_check;
2680 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2681 __ DecodeField<Map::ElementsKindBits>(scratch);
2682 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2683 // For non-FastHoley kinds, we can skip the protector check.
2684 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2685 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2686 // Check the ArrayProtector cell.
2687 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2688 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2689 __ Branch(&runtime_call, ne, scratch,
2690 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2691
2692 __ bind(&no_protector_check);
2693 // Load the FixedArray backing store, but use the length from the array.
2694 __ lw(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
2695 __ SmiUntag(spread_len);
2696 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2697 __ Branch(&push_args);
2698
2699 __ bind(&runtime_call);
2700 {
2701 // Call the builtin for the result of the spread.
2702 FrameScope scope(masm, StackFrame::INTERNAL);
2703 __ SmiTag(argc);
2704 __ Push(constructor, new_target, argc, spread);
2705 __ CallRuntime(Runtime::kSpreadIterableFixed);
2706 __ mov(spread, v0);
2707 __ Pop(constructor, new_target, argc);
2708 __ SmiUntag(argc);
2709 }
2710
2711 {
2712 // Calculate the new nargs including the result of the spread.
2713 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2714 __ SmiUntag(spread_len);
2715
2716 __ bind(&push_args);
2717 // argc += spread_len - 1. Subtract 1 for the spread itself.
2718 __ Addu(argc, argc, spread_len);
2719 __ Subu(argc, argc, Operand(1));
2720
2721 // Pop the spread argument off the stack.
2722 __ Pop(scratch);
2723 }
2724
2725 // Check for stack overflow.
2726 {
2727 // Check the stack for overflow. We are not trying to catch interruptions
2728 // (i.e. debug break and preemption) here, so check the "real stack limit".
2729 Label done;
2730 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2731 // Make scratch the space we have left. The stack might already be
2732 // overflowed here which will cause ip to become negative.
2733 __ Subu(scratch, sp, scratch);
2734 // Check if the arguments will overflow the stack.
2735 __ sll(at, spread_len, kPointerSizeLog2);
2736 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison.
2737 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2738 __ bind(&done);
2739 }
2740
2741 // Put the evaluated spread onto the stack as additional arguments.
2742 {
2743 __ mov(scratch, zero_reg);
2744 Label done, loop;
2745 __ bind(&loop);
2746 __ Branch(&done, eq, scratch, Operand(spread_len));
2747 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2);
2748 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2749 __ Push(scratch2);
2750 __ Addu(scratch, scratch, Operand(1));
2751 __ Branch(&loop);
2752 __ bind(&done);
2753 }
2754 }
2755
2756 // static
2757 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2758 // ----------- S t a t e -------------
2759 // -- a0 : the number of arguments (not including the receiver)
2760 // -- a1 : the target to call (can be any Object).
2761 // -----------------------------------
2762
2763 // CheckSpreadAndPushToStack will push a3 to save it.
2764 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2765 CheckSpreadAndPushToStack(masm);
2766 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2767 TailCallMode::kDisallow),
2768 RelocInfo::CODE_TARGET);
2769 }
2770
2629 // static 2771 // static
2630 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2772 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2631 // ----------- S t a t e ------------- 2773 // ----------- S t a t e -------------
2632 // -- a0 : the number of arguments (not including the receiver) 2774 // -- a0 : the number of arguments (not including the receiver)
2633 // -- a1 : the constructor to call (checked to be a JSFunction) 2775 // -- a1 : the constructor to call (checked to be a JSFunction)
2634 // -- a3 : the new target (checked to be a constructor) 2776 // -- a3 : the new target (checked to be a constructor)
2635 // ----------------------------------- 2777 // -----------------------------------
2636 __ AssertFunction(a1); 2778 __ AssertFunction(a1);
2637 2779
2638 // Calling convention for function specific ConstructStubs require 2780 // Calling convention for function specific ConstructStubs require
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2809 2951
2810 // static 2952 // static
2811 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 2953 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
2812 // ----------- S t a t e ------------- 2954 // ----------- S t a t e -------------
2813 // -- a0 : the number of arguments (not including the receiver) 2955 // -- a0 : the number of arguments (not including the receiver)
2814 // -- a1 : the constructor to call (can be any Object) 2956 // -- a1 : the constructor to call (can be any Object)
2815 // -- a3 : the new target (either the same as the constructor or 2957 // -- a3 : the new target (either the same as the constructor or
2816 // the JSFunction on which new was invoked initially) 2958 // the JSFunction on which new was invoked initially)
2817 // ----------------------------------- 2959 // -----------------------------------
2818 2960
2819 Register argc = a0; 2961 CheckSpreadAndPushToStack(masm);
2820 Register constructor = a1;
2821 Register new_target = a3;
2822
2823 Register scratch = t0;
2824 Register scratch2 = t1;
2825
2826 Register spread = a2;
2827 Register spread_map = t3;
2828
2829 Register native_context = t4;
2830
2831 __ lw(spread, MemOperand(sp, 0));
2832 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2833 __ lw(native_context, NativeContextMemOperand());
2834
2835 Label runtime_call, push_args;
2836 // Check that the spread is an array.
2837 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2838 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2839
2840 // Check that we have the original ArrayPrototype.
2841 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2842 __ lw(scratch2, ContextMemOperand(native_context,
2843 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2844 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2845
2846 // Check that the ArrayPrototype hasn't been modified in a way that would
2847 // affect iteration.
2848 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2849 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
2850 __ Branch(&runtime_call, ne, scratch,
2851 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2852
2853 // Check that the map of the initial array iterator hasn't changed.
2854 __ lw(scratch,
2855 ContextMemOperand(native_context,
2856 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2857 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2858 __ lw(scratch2,
2859 ContextMemOperand(native_context,
2860 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2861 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2862
2863 // For FastPacked kinds, iteration will have the same effect as simply
2864 // accessing each property in order.
2865 Label no_protector_check;
2866 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2867 __ DecodeField<Map::ElementsKindBits>(scratch);
2868 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2869 // For non-FastHoley kinds, we can skip the protector check.
2870 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2871 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2872 // Check the ArrayProtector cell.
2873 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2874 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2875 __ Branch(&runtime_call, ne, scratch,
2876 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2877
2878 __ bind(&no_protector_check);
2879 // Load the FixedArray backing store.
2880 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2881 __ Branch(&push_args);
2882
2883 __ bind(&runtime_call);
2884 {
2885 // Call the builtin for the result of the spread.
2886 FrameScope scope(masm, StackFrame::INTERNAL);
2887 __ SmiTag(argc);
2888 __ Push(constructor, new_target, argc, spread);
2889 __ CallRuntime(Runtime::kSpreadIterableFixed);
2890 __ mov(spread, v0);
2891 __ Pop(constructor, new_target, argc);
2892 __ SmiUntag(argc);
2893 }
2894
2895 Register spread_len = t3;
2896 __ bind(&push_args);
2897 {
2898 // Pop the spread argument off the stack.
2899 __ Pop(scratch);
2900 // Calculate the new nargs including the result of the spread.
2901 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2902 __ SmiUntag(spread_len);
2903 // argc += spread_len - 1. Subtract 1 for the spread itself.
2904 __ Addu(argc, argc, spread_len);
2905 __ Subu(argc, argc, Operand(1));
2906 }
2907
2908 // Check for stack overflow.
2909 {
2910 // Check the stack for overflow. We are not trying to catch interruptions
2911 // (i.e. debug break and preemption) here, so check the "real stack limit".
2912 Label done;
2913 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2914 // Make scratch the space we have left. The stack might already be
2915 // overflowed here which will cause ip to become negative.
2916 __ Subu(scratch, sp, scratch);
2917 // Check if the arguments will overflow the stack.
2918 __ sll(at, spread_len, kPointerSizeLog2);
2919 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison.
2920 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2921 __ bind(&done);
2922 }
2923
2924 // Put the evaluated spread onto the stack as additional arguments.
2925 {
2926 __ mov(scratch, zero_reg);
2927 Label done, loop;
2928 __ bind(&loop);
2929 __ Branch(&done, eq, scratch, Operand(spread_len));
2930 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2);
2931 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2932 __ Push(scratch2);
2933 __ Addu(scratch, scratch, Operand(1));
2934 __ Branch(&loop);
2935 __ bind(&done);
2936 }
2937
2938 // Dispatch.
2939 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2962 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2940 } 2963 }
2941 2964
2942 // static 2965 // static
2943 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2966 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2944 // ----------- S t a t e ------------- 2967 // ----------- S t a t e -------------
2945 // -- a0 : requested object size (untagged) 2968 // -- a0 : requested object size (untagged)
2946 // -- ra : return address 2969 // -- ra : return address
2947 // ----------------------------------- 2970 // -----------------------------------
2948 __ SmiTag(a0); 2971 __ SmiTag(a0);
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
3104 __ break_(0xCC); 3127 __ break_(0xCC);
3105 } 3128 }
3106 } 3129 }
3107 3130
3108 #undef __ 3131 #undef __
3109 3132
3110 } // namespace internal 3133 } // namespace internal
3111 } // namespace v8 3134 } // namespace v8
3112 3135
3113 #endif // V8_TARGET_ARCH_MIPS 3136 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698