OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1252 __ TailCallStub(&stub); | 1252 __ TailCallStub(&stub); |
1253 | 1253 |
1254 __ bind(&stack_overflow); | 1254 __ bind(&stack_overflow); |
1255 { | 1255 { |
1256 __ TailCallRuntime(Runtime::kThrowStackOverflow); | 1256 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
1257 // Unreachable code. | 1257 // Unreachable code. |
1258 __ bkpt(0); | 1258 __ bkpt(0); |
1259 } | 1259 } |
1260 } | 1260 } |
1261 | 1261 |
| 1262 // static |
| 1263 void Builtins::Generate_InterpreterPushArgsAndConstructWithSpread( |
| 1264 MacroAssembler* masm) { |
| 1265 // ----------- S t a t e ------------- |
| 1266 // -- r0 : argument count (not including receiver) |
| 1267 // -- r1 : constructor to call |
| 1268 // -- r2 : allocation site feedback if available, undefined otherwise. |
| 1269 // -- r3 : new target |
| 1270 // -- r4 : address of the first argument |
| 1271 // ----------------------------------- |
| 1272 Label stack_overflow; |
| 1273 |
| 1274 // Push a slot for the receiver to be constructed. |
| 1275 __ mov(ip, Operand::Zero()); |
| 1276 __ push(ip); |
| 1277 |
| 1278 // Push the arguments. r5, r4, r6 will be modified. |
| 1279 Generate_InterpreterPushArgs(masm, r0, r4, r5, r6, &stack_overflow); |
| 1280 |
| 1281 __ AssertUndefinedOrAllocationSite(r2, r5); |
| 1282 // Call the constructor with r0, r1, and r3 unmodified. |
| 1283 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
| 1284 RelocInfo::CODE_TARGET); |
| 1285 |
| 1286 __ bind(&stack_overflow); |
| 1287 { |
| 1288 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 1289 // Unreachable code. |
| 1290 __ bkpt(0); |
| 1291 } |
| 1292 } |
| 1293 |
1262 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { | 1294 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { |
1263 // Set the return address to the correct point in the interpreter entry | 1295 // Set the return address to the correct point in the interpreter entry |
1264 // trampoline. | 1296 // trampoline. |
1265 Smi* interpreter_entry_return_pc_offset( | 1297 Smi* interpreter_entry_return_pc_offset( |
1266 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | 1298 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); |
1267 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero); | 1299 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero); |
1268 __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 1300 __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
1269 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() + | 1301 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() + |
1270 Code::kHeaderSize - kHeapObjectTag)); | 1302 Code::kHeaderSize - kHeapObjectTag)); |
1271 | 1303 |
(...skipping 1451 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2723 } | 2755 } |
2724 | 2756 |
2725 // Called Construct on an Object that doesn't have a [[Construct]] internal | 2757 // Called Construct on an Object that doesn't have a [[Construct]] internal |
2726 // method. | 2758 // method. |
2727 __ bind(&non_constructor); | 2759 __ bind(&non_constructor); |
2728 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | 2760 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), |
2729 RelocInfo::CODE_TARGET); | 2761 RelocInfo::CODE_TARGET); |
2730 } | 2762 } |
2731 | 2763 |
2732 // static | 2764 // static |
| 2765 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
| 2766 // ----------- S t a t e ------------- |
| 2767 // -- r0 : the number of arguments (not including the receiver) |
| 2768 // -- r1 : the constructor to call (can be any Object) |
| 2769 // -- r3 : the new target (either the same as the constructor or |
| 2770 // the JSFunction on which new was invoked initially) |
| 2771 // ----------------------------------- |
| 2772 |
| 2773 Register argc = r0; |
| 2774 Register constructor = r1; |
| 2775 Register new_target = r3; |
| 2776 |
| 2777 Register scratch = r2; |
| 2778 Register scratch2 = r6; |
| 2779 |
| 2780 Register spread = r4; |
| 2781 Register spread_map = r5; |
| 2782 __ ldr(spread, MemOperand(sp, 0)); |
| 2783 __ ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
| 2784 |
| 2785 Label runtime_call, push_args; |
| 2786 // Check that the spread is an array. |
| 2787 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); |
| 2788 __ b(ne, &runtime_call); |
| 2789 |
| 2790 // Check that we have the original ArrayPrototype. |
| 2791 __ ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
| 2792 __ ldr(scratch2, NativeContextMemOperand()); |
| 2793 __ ldr(scratch2, |
| 2794 ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2795 __ cmp(scratch, scratch2); |
| 2796 __ b(ne, &runtime_call); |
| 2797 |
| 2798 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2799 // affect iteration. |
| 2800 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2801 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 2802 __ cmp(scratch, Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2803 __ b(ne, &runtime_call); |
| 2804 |
| 2805 // Check that the map of the initial array iterator hasn't changed. |
| 2806 __ ldr(scratch2, NativeContextMemOperand()); |
| 2807 __ ldr(scratch, |
| 2808 ContextMemOperand(scratch2, |
| 2809 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2810 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2811 __ ldr(scratch2, |
| 2812 ContextMemOperand( |
| 2813 scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2814 __ cmp(scratch, scratch2); |
| 2815 __ b(ne, &runtime_call); |
| 2816 |
| 2817 // For FastPacked kinds, iteration will have the same effect as simply |
| 2818 // accessing each property in order. |
| 2819 Label no_protector_check; |
| 2820 __ ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
| 2821 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2822 __ cmp(scratch, Operand(LAST_FAST_ELEMENTS_KIND)); |
| 2823 __ b(hi, &runtime_call); |
| 2824 // For non-FastHoley kinds, we can skip the protector check. |
| 2825 __ cmp(scratch, Operand(FAST_SMI_ELEMENTS)); |
| 2826 __ b(eq, &no_protector_check); |
| 2827 __ cmp(scratch, Operand(FAST_ELEMENTS)); |
| 2828 __ b(eq, &no_protector_check); |
| 2829 __ cmp(scratch, Operand(FAST_DOUBLE_ELEMENTS)); |
| 2830 __ b(eq, &no_protector_check); |
| 2831 // Check the ArrayProtector cell. |
| 2832 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2833 __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2834 __ cmp(scratch, Operand(Smi::FromInt(Isolate::kProtectorValid))); |
| 2835 __ b(ne, &runtime_call); |
| 2836 |
| 2837 __ bind(&no_protector_check); |
| 2838 // Load the FixedArray backing store. |
| 2839 __ ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
| 2840 __ b(&push_args); |
| 2841 |
| 2842 __ bind(&runtime_call); |
| 2843 { |
| 2844 // Call the builtin for the result of the spread. |
| 2845 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 2846 __ SmiTag(argc); |
| 2847 __ Push(constructor); |
| 2848 __ Push(new_target); |
| 2849 __ Push(argc); |
| 2850 __ Push(spread); |
| 2851 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2852 __ mov(spread, r0); |
| 2853 __ Pop(argc); |
| 2854 __ Pop(new_target); |
| 2855 __ Pop(constructor); |
| 2856 __ SmiUntag(argc); |
| 2857 } |
| 2858 |
| 2859 Register spread_len = r5; |
| 2860 __ bind(&push_args); |
| 2861 { |
| 2862 // Pop the spread argument off the stack. |
| 2863 __ Pop(scratch); |
| 2864 // Calculate the new nargs including the result of the spread. |
| 2865 __ ldr(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); |
| 2866 __ SmiUntag(spread_len); |
| 2867 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2868 __ add(argc, argc, spread_len); |
| 2869 __ sub(argc, argc, Operand(1)); |
| 2870 } |
| 2871 |
| 2872 // Check for stack overflow. |
| 2873 { |
| 2874 // Check the stack for overflow. We are not trying to catch interruptions |
| 2875 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2876 Label done; |
| 2877 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2878 // Make scratch the space we have left. The stack might already be |
| 2879 // overflowed here which will cause scratch to become negative. |
| 2880 __ sub(scratch, sp, scratch); |
| 2881 // Check if the arguments will overflow the stack. |
| 2882 __ cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2)); |
| 2883 __ b(gt, &done); // Signed comparison. |
| 2884 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2885 __ bind(&done); |
| 2886 } |
| 2887 |
| 2888 // Put the evaluated spread onto the stack as additional arguments. |
| 2889 { |
| 2890 __ mov(scratch, Operand(0)); |
| 2891 Label done, loop; |
| 2892 __ bind(&loop); |
| 2893 __ cmp(scratch, spread_len); |
| 2894 __ b(eq, &done); |
| 2895 __ add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2)); |
| 2896 __ ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
| 2897 __ Push(scratch2); |
| 2898 __ add(scratch, scratch, Operand(1)); |
| 2899 __ b(&loop); |
| 2900 __ bind(&done); |
| 2901 } |
| 2902 |
| 2903 // Dispatch. |
| 2904 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 2905 } |
| 2906 |
| 2907 // static |
2733 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 2908 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2734 // ----------- S t a t e ------------- | 2909 // ----------- S t a t e ------------- |
2735 // -- r1 : requested object size (untagged) | 2910 // -- r1 : requested object size (untagged) |
2736 // -- lr : return address | 2911 // -- lr : return address |
2737 // ----------------------------------- | 2912 // ----------------------------------- |
2738 __ SmiTag(r1); | 2913 __ SmiTag(r1); |
2739 __ Push(r1); | 2914 __ Push(r1); |
2740 __ Move(cp, Smi::kZero); | 2915 __ Move(cp, Smi::kZero); |
2741 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | 2916 __ TailCallRuntime(Runtime::kAllocateInNewSpace); |
2742 } | 2917 } |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2887 __ bkpt(0); | 3062 __ bkpt(0); |
2888 } | 3063 } |
2889 } | 3064 } |
2890 | 3065 |
2891 #undef __ | 3066 #undef __ |
2892 | 3067 |
2893 } // namespace internal | 3068 } // namespace internal |
2894 } // namespace v8 | 3069 } // namespace v8 |
2895 | 3070 |
2896 #endif // V8_TARGET_ARCH_ARM | 3071 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |